Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • mosaic/software/parallel-computing/openfpm/openfpm_pdata
  • argupta/openfpm_pdata
2 results
Show changes
Commits on Source (3372)
Showing
with 59 additions and 3156 deletions
......@@ -25,6 +25,7 @@
*.log
*.sql
*.sqlite
*.xml
# OS generated files #
######################
......@@ -36,40 +37,42 @@
ehthumbs.db
Thumbs.db
# CLion IDE-related
.idea/
cmake-build-debug/
cmake-build-release/
# Script-generated build folder
build/
###### Other
AUTHORS
COPYING
INSTALL
NEWS
README
**/vtk/Makefile
**/src/Makefile
./Makefile
Makefile.in
config.status
configure
numerics
Makefile
**/.deps
**/src/config
aclocal.m4
**/autom4te.cache
example.mk
src/pdata
vtk/cart_dec
vtk/dom_box
vtk/metis_dec
.autotools
.cproject
.project
.settings
ar-lib
compile
config.guess
config.sub
depcomp
install-sh
missing
install_dir
**/*.vtk
!**/test_data/*.vtk
*.csv
*.h5
*.json
*.html
Makefile
/build
*CMake*
*cmake*
**/*cmake*
**/*CMake*
**/.gitignore
.gitignore
/doxygen/
[submodule "openfpm_vcluster"]
path = openfpm_vcluster
url = ssh://git@ppmcore.mpi-cbg.de/incardon/openfpm_vcluster.git
[submodule "openfpm_devices"]
path = openfpm_devices
url = ssh://git@ppmcore.mpi-cbg.de/incardon/openfpm_devices.git
[submodule "openfpm_io"]
path = openfpm_io
url = ssh://git@ppmcore.mpi-cbg.de/incardon/openfpm_io.git
[submodule "openfpm_data"]
path = openfpm_data
url = ssh://git@ppmcore.mpi-cbg.de/incardon/openfpm_data.git
[submodule "openfpm_numerics"]
path = openfpm_numerics
url = ssh://git@ppmcore.mpi-cbg.de/incardon/openfpm_numerics.git
# Change Log
All notable changes to this project will be documented in this file.
## [0.4.0] -
### Added
- Grid with periodic boundary conditions
- VTK Writer for distributed grid, now is the default writer
### Fixed
- GPU compilation
### Changed
## [0.3.0] - 16-04-2016
### Added
- Molacular Dynamic example
- addUpdateCell list for more optimal update of the cell list instead of recreate the CellList
### Fixed
- Nothing to report
### Changed
- Eliminated global_v_cluster, init_global_v_cluster, delete_global_v_cluster,
substituted by
create_vcluster, openfpm_init, openfpm_finalize
- CartDecomposition parameter for the distributed structures is now optional
- template getPos<0>(), substituted by getPos()
## [0.2.1] - 01-04-2016
### Changed
- GoogleChart name function changed: AddPointGraph to AddLinesGraph and AddColumsGraph to AddHistGraph
## [0.2.0] - 2016-03-25
### Added
- Added Load Balancing and Dynamic Load Balancing on Beta
- PSE 1D example with multiple precision
- Plot example for GoogleChart plotting
- Distributed data structure now support 128bit floating point precision (on Beta)
### Fixed
- Detection 32 bit system and report as an error
- Bug in rounding off for periodic boundary condition
### Changed
- Nothing to report
## [0.1.0] - 2016-02-05
### Added
- PSE 1D example
- Cell list example
- Verlet list example
- Kickstart for OpenFPM_numeric
- Automated dependency installation for SUITESPRASE EIGEN OPENBLAS(LAPACK)
### Fixed
- CRITICAL BUG in periodic bondary condition
- BOOST auto updated to 1.60
- Compilation with multiple .cpp files
### Changed
- Nothing to report
cmake_minimum_required(VERSION 3.8 FATAL_ERROR)
project(openfpm_pdata LANGUAGES C CXX)
if (POLICY CMP0074)
cmake_policy(SET CMP0074 NEW)
endif()
set(openfpm_VERSION 5.0.0)
if (METIS_FOUND)
set(DEFINE_HAVE_METIS "#define HAVE_METIS 1")
else()
message( FATAL_ERROR "Metis is required in order to install OpenFPM" )
endif()
if (PARMETIS_FOUND)
set(DEFINE_HAVE_PARMETIS "#define HAVE_PARMETIS 1")
else()
message( FATAL_ERROR "ParMetis is required in order to install OpenFPM")
endif()
add_subdirectory (src)
file(READ ${CMAKE_CURRENT_SOURCE_DIR}/src/cmake/openfpmConfig-configure.cmake CMAKE_OPENFPM_CONFIG_VARS)
set(CMAKE_OPENFPM_CONFIG_VARS "${CMAKE_OPENFPM_CONFIG_VARS}\nmessage(STATUS \"Found OpenFPM version ${openfpm_VERSION} (\$\{CMAKE_CURRENT_LIST_FILE\})\")")
set(CMAKE_OPENFPM_CONFIG_VARS "${CMAKE_OPENFPM_CONFIG_VARS}\nset(OPENFPM_CUDA_ON_BACKEND \"${CUDA_ON_BACKEND}\")")
file(WRITE ${CMAKE_CURRENT_SOURCE_DIR}/src/cmake/openfpmConfig-Vars.cmake "${CMAKE_OPENFPM_CONFIG_VARS}")
get_directory_property(hasParent PARENT_DIRECTORY)
if(hasParent)
set(DEFINE_HAVE_METIS ${DEFINE_HAVE_METIS} CACHE INTERNAL "")
set(DEFINE_HAVE_PARMETIS ${DEFINE_HAVE_PARMETIS} CACHE INTERNAL "")
endif()
SUBDIRS = src images openfpm_data openfpm_io openfpm_devices openfpm_vcluster openfpm_numerics
bin_PROGRAMS =
#!/bin/sh
# a u t o g e n . s h
#
# Copyright (c) 2005-2009 United States Government as represented by
# the U.S. Army Research Laboratory.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of the author may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
###
#
# Script for automatically preparing the sources for compilation by
# performing the myriad of necessary steps. The script attempts to
# detect proper version support, and outputs warnings about particular
# systems that have autotool peculiarities.
#
# Basically, if everything is set up and installed correctly, the
# script will validate that minimum versions of the GNU Build System
# tools are installed, account for several common configuration
# issues, and then simply run autoreconf for you.
#
# If autoreconf fails, which can happen for many valid configurations,
# this script proceeds to run manual preparation steps effectively
# providing a POSIX shell script (mostly complete) reimplementation of
# autoreconf.
#
# The AUTORECONF, AUTOCONF, AUTOMAKE, LIBTOOLIZE, ACLOCAL, AUTOHEADER
# environment variables and corresponding _OPTIONS variables (e.g.
# AUTORECONF_OPTIONS) may be used to override the default automatic
# detection behaviors. Similarly the _VERSION variables will override
# the minimum required version numbers.
#
# Examples:
#
# To obtain help on usage:
# ./autogen.sh --help
#
# To obtain verbose output:
# ./autogen.sh --verbose
#
# To skip autoreconf and prepare manually:
# AUTORECONF=false ./autogen.sh
#
# To verbosely try running with an older (unsupported) autoconf:
# AUTOCONF_VERSION=2.50 ./autogen.sh --verbose
#
# Author:
# Christopher Sean Morrison <morrison@brlcad.org>
#
# Patches:
# Sebastian Pipping <sebastian@pipping.org>
#
######################################################################
# set to minimum acceptable version of autoconf
if [ "x$AUTOCONF_VERSION" = "x" ] ; then
AUTOCONF_VERSION=2.52
fi
# set to minimum acceptable version of automake
if [ "x$AUTOMAKE_VERSION" = "x" ] ; then
AUTOMAKE_VERSION=1.6.0
fi
# set to minimum acceptable version of libtool
if [ "x$LIBTOOL_VERSION" = "x" ] ; then
LIBTOOL_VERSION=1.4.2
fi
##################
# ident function #
##################
ident ( ) {
# extract copyright from header
__copyright="`grep Copyright $AUTOGEN_SH | head -${HEAD_N}1 | awk '{print $4}'`"
if [ "x$__copyright" = "x" ] ; then
__copyright="`date +%Y`"
fi
# extract version from CVS Id string
__id="$Id: autogen.sh 33925 2009-03-01 23:27:06Z brlcad $"
__version="`echo $__id | sed 's/.*\([0-9][0-9][0-9][0-9]\)[-\/]\([0-9][0-9]\)[-\/]\([0-9][0-9]\).*/\1\2\3/'`"
if [ "x$__version" = "x" ] ; then
__version=""
fi
echo "autogen.sh build preparation script by Christopher Sean Morrison"
echo " + config.guess download patch by Sebastian Pipping (2008-12-03)"
echo "revised 3-clause BSD-style license, copyright (c) $__copyright"
echo "script version $__version, ISO/IEC 9945 POSIX shell script"
}
##################
# USAGE FUNCTION #
##################
usage ( ) {
echo "Usage: $AUTOGEN_SH [-h|--help] [-v|--verbose] [-q|--quiet] [-d|--download] [--version]"
echo " --help Help on $NAME_OF_AUTOGEN usage"
echo " --verbose Verbose progress output"
echo " --quiet Quiet suppressed progress output"
echo " --download Download the latest config.guess from gnulib"
echo " --version Only perform GNU Build System version checks"
echo
echo "Description: This script will validate that minimum versions of the"
echo "GNU Build System tools are installed and then run autoreconf for you."
echo "Should autoreconf fail, manual preparation steps will be run"
echo "potentially accounting for several common preparation issues. The"
echo "AUTORECONF, AUTOCONF, AUTOMAKE, LIBTOOLIZE, ACLOCAL, AUTOHEADER,"
echo "PROJECT, & CONFIGURE environment variables and corresponding _OPTIONS"
echo "variables (e.g. AUTORECONF_OPTIONS) may be used to override the"
echo "default automatic detection behavior."
echo
ident
return 0
}
##########################
# VERSION_ERROR FUNCTION #
##########################
version_error ( ) {
if [ "x$1" = "x" ] ; then
echo "INTERNAL ERROR: version_error was not provided a version"
exit 1
fi
if [ "x$2" = "x" ] ; then
echo "INTERNAL ERROR: version_error was not provided an application name"
exit 1
fi
$ECHO
$ECHO "ERROR: To prepare the ${PROJECT} build system from scratch,"
$ECHO " at least version $1 of $2 must be installed."
$ECHO
$ECHO "$NAME_OF_AUTOGEN does not need to be run on the same machine that will"
$ECHO "run configure or make. Either the GNU Autotools will need to be installed"
$ECHO "or upgraded on this system, or $NAME_OF_AUTOGEN must be run on the source"
$ECHO "code on another system and then transferred to here. -- Cheers!"
$ECHO
}
##########################
# VERSION_CHECK FUNCTION #
##########################
version_check ( ) {
if [ "x$1" = "x" ] ; then
echo "INTERNAL ERROR: version_check was not provided a minimum version"
exit 1
fi
_min="$1"
if [ "x$2" = "x" ] ; then
echo "INTERNAL ERROR: version check was not provided a comparison version"
exit 1
fi
_cur="$2"
# needed to handle versions like 1.10 and 1.4-p6
_min="`echo ${_min}. | sed 's/[^0-9]/./g' | sed 's/\.\././g'`"
_cur="`echo ${_cur}. | sed 's/[^0-9]/./g' | sed 's/\.\././g'`"
_min_major="`echo $_min | cut -d. -f1`"
_min_minor="`echo $_min | cut -d. -f2`"
_min_patch="`echo $_min | cut -d. -f3`"
_cur_major="`echo $_cur | cut -d. -f1`"
_cur_minor="`echo $_cur | cut -d. -f2`"
_cur_patch="`echo $_cur | cut -d. -f3`"
if [ "x$_min_major" = "x" ] ; then
_min_major=0
fi
if [ "x$_min_minor" = "x" ] ; then
_min_minor=0
fi
if [ "x$_min_patch" = "x" ] ; then
_min_patch=0
fi
if [ "x$_cur_minor" = "x" ] ; then
_cur_major=0
fi
if [ "x$_cur_minor" = "x" ] ; then
_cur_minor=0
fi
if [ "x$_cur_patch" = "x" ] ; then
_cur_patch=0
fi
$VERBOSE_ECHO "Checking if ${_cur_major}.${_cur_minor}.${_cur_patch} is greater than ${_min_major}.${_min_minor}.${_min_patch}"
if [ $_min_major -lt $_cur_major ] ; then
return 0
elif [ $_min_major -eq $_cur_major ] ; then
if [ $_min_minor -lt $_cur_minor ] ; then
return 0
elif [ $_min_minor -eq $_cur_minor ] ; then
if [ $_min_patch -lt $_cur_patch ] ; then
return 0
elif [ $_min_patch -eq $_cur_patch ] ; then
return 0
fi
fi
fi
return 1
}
######################################
# LOCATE_CONFIGURE_TEMPLATE FUNCTION #
######################################
locate_configure_template ( ) {
_pwd="`pwd`"
if test -f "./configure.ac" ; then
echo "./configure.ac"
elif test -f "./configure.in" ; then
echo "./configure.in"
elif test -f "$_pwd/configure.ac" ; then
echo "$_pwd/configure.ac"
elif test -f "$_pwd/configure.in" ; then
echo "$_pwd/configure.in"
elif test -f "$PATH_TO_AUTOGEN/configure.ac" ; then
echo "$PATH_TO_AUTOGEN/configure.ac"
elif test -f "$PATH_TO_AUTOGEN/configure.in" ; then
echo "$PATH_TO_AUTOGEN/configure.in"
fi
}
##################
# argument check #
##################
ARGS="$*"
PATH_TO_AUTOGEN="`dirname $0`"
NAME_OF_AUTOGEN="`basename $0`"
AUTOGEN_SH="$PATH_TO_AUTOGEN/$NAME_OF_AUTOGEN"
LIBTOOL_M4="${PATH_TO_AUTOGEN}/misc/libtool.m4"
if [ "x$HELP" = "x" ] ; then
HELP=no
fi
if [ "x$QUIET" = "x" ] ; then
QUIET=no
fi
if [ "x$VERBOSE" = "x" ] ; then
VERBOSE=no
fi
if [ "x$VERSION_ONLY" = "x" ] ; then
VERSION_ONLY=no
fi
if [ "x$DOWNLOAD" = "x" ] ; then
DOWNLOAD=no
fi
if [ "x$AUTORECONF_OPTIONS" = "x" ] ; then
AUTORECONF_OPTIONS="-i -f"
fi
if [ "x$AUTOCONF_OPTIONS" = "x" ] ; then
AUTOCONF_OPTIONS="-f"
fi
if [ "x$AUTOMAKE_OPTIONS" = "x" ] ; then
AUTOMAKE_OPTIONS="-a -c -f"
fi
ALT_AUTOMAKE_OPTIONS="-a -c"
if [ "x$LIBTOOLIZE_OPTIONS" = "x" ] ; then
LIBTOOLIZE_OPTIONS="--automake -c -f"
fi
ALT_LIBTOOLIZE_OPTIONS="--automake --copy --force"
if [ "x$ACLOCAL_OPTIONS" = "x" ] ; then
ACLOCAL_OPTIONS=""
fi
if [ "x$AUTOHEADER_OPTIONS" = "x" ] ; then
AUTOHEADER_OPTIONS=""
fi
if [ "x$CONFIG_GUESS_URL" = "x" ] ; then
CONFIG_GUESS_URL="http://git.savannah.gnu.org/gitweb/?p=gnulib.git;a=blob_plain;f=build-aux/config.guess;hb=HEAD"
fi
for arg in $ARGS ; do
case "x$arg" in
x--help) HELP=yes ;;
x-[hH]) HELP=yes ;;
x--quiet) QUIET=yes ;;
x-[qQ]) QUIET=yes ;;
x--verbose) VERBOSE=yes ;;
x-[dD]) DOWNLOAD=yes ;;
x--download) DOWNLOAD=yes ;;
x-[vV]) VERBOSE=yes ;;
x--version) VERSION_ONLY=yes ;;
*)
echo "Unknown option: $arg"
echo
usage
exit 1
;;
esac
done
#####################
# environment check #
#####################
# sanity check before recursions potentially begin
if [ ! -f "$AUTOGEN_SH" ] ; then
echo "INTERNAL ERROR: $AUTOGEN_SH does not exist"
if [ ! "x$0" = "x$AUTOGEN_SH" ] ; then
echo "INTERNAL ERROR: dirname/basename inconsistency: $0 != $AUTOGEN_SH"
fi
exit 1
fi
# force locale setting to C so things like date output as expected
LC_ALL=C
# commands that this script expects
for __cmd in echo head tail pwd ; do
echo "test" | $__cmd > /dev/null 2>&1
if [ $? != 0 ] ; then
echo "INTERNAL ERROR: '${__cmd}' command is required"
exit 2
fi
done
echo "test" | grep "test" > /dev/null 2>&1
if test ! x$? = x0 ; then
echo "INTERNAL ERROR: grep command is required"
exit 1
fi
echo "test" | sed "s/test/test/" > /dev/null 2>&1
if test ! x$? = x0 ; then
echo "INTERNAL ERROR: sed command is required"
exit 1
fi
# determine the behavior of echo
case `echo "testing\c"; echo 1,2,3`,`echo -n testing; echo 1,2,3` in
*c*,-n*) ECHO_N= ECHO_C='
' ECHO_T=' ' ;;
*c*,* ) ECHO_N=-n ECHO_C= ECHO_T= ;;
*) ECHO_N= ECHO_C='\c' ECHO_T= ;;
esac
# determine the behavior of head
case "x`echo 'head' | head -n 1 2>&1`" in
*xhead*) HEAD_N="n " ;;
*) HEAD_N="" ;;
esac
# determine the behavior of tail
case "x`echo 'tail' | tail -n 1 2>&1`" in
*xtail*) TAIL_N="n " ;;
*) TAIL_N="" ;;
esac
VERBOSE_ECHO=:
ECHO=:
if [ "x$QUIET" = "xyes" ] ; then
if [ "x$VERBOSE" = "xyes" ] ; then
echo "Verbose output quelled by quiet option. Further output disabled."
fi
else
ECHO=echo
if [ "x$VERBOSE" = "xyes" ] ; then
echo "Verbose output enabled"
VERBOSE_ECHO=echo
fi
fi
# allow a recursive run to disable further recursions
if [ "x$RUN_RECURSIVE" = "x" ] ; then
RUN_RECURSIVE=yes
fi
################################################
# check for help arg and bypass version checks #
################################################
if [ "x`echo $ARGS | sed 's/.*[hH][eE][lL][pP].*/help/'`" = "xhelp" ] ; then
HELP=yes
fi
if [ "x$HELP" = "xyes" ] ; then
usage
$ECHO "---"
$ECHO "Help was requested. No preparation or configuration will be performed."
exit 0
fi
#######################
# set up signal traps #
#######################
untrap_abnormal ( ) {
for sig in 1 2 13 15; do
trap - $sig
done
}
# do this cleanup whenever we exit.
trap '
# start from the root
if test -d "$START_PATH" ; then
cd "$START_PATH"
fi
# restore/delete backup files
if test "x$PFC_INIT" = "x1" ; then
recursive_restore
fi
' 0
# trap SIGHUP (1), SIGINT (2), SIGPIPE (13), SIGTERM (15)
for sig in 1 2 13 15; do
trap '
$ECHO ""
$ECHO "Aborting $NAME_OF_AUTOGEN: caught signal '$sig'"
# start from the root
if test -d "$START_PATH" ; then
cd "$START_PATH"
fi
# clean up on abnormal exit
$VERBOSE_ECHO "rm -rf autom4te.cache"
rm -rf autom4te.cache
if test -f "acinclude.m4.$$.backup" ; then
$VERBOSE_ECHO "cat acinclude.m4.$$.backup > acinclude.m4"
chmod u+w acinclude.m4
cat acinclude.m4.$$.backup > acinclude.m4
$VERBOSE_ECHO "rm -f acinclude.m4.$$.backup"
rm -f acinclude.m4.$$.backup
fi
{ (exit 1); exit 1; }
' $sig
done
#############################
# look for a configure file #
#############################
if [ "x$CONFIGURE" = "x" ] ; then
CONFIGURE="`locate_configure_template`"
if [ ! "x$CONFIGURE" = "x" ] ; then
$VERBOSE_ECHO "Found a configure template: $CONFIGURE"
fi
else
$ECHO "Using CONFIGURE environment variable override: $CONFIGURE"
fi
if [ "x$CONFIGURE" = "x" ] ; then
if [ "x$VERSION_ONLY" = "xyes" ] ; then
CONFIGURE=/dev/null
else
$ECHO
$ECHO "A configure.ac or configure.in file could not be located implying"
$ECHO "that the GNU Build System is at least not used in this directory. In"
$ECHO "any case, there is nothing to do here without one of those files."
$ECHO
$ECHO "ERROR: No configure.in or configure.ac file found in `pwd`"
exit 1
fi
fi
####################
# get project name #
####################
if [ "x$PROJECT" = "x" ] ; then
PROJECT="`grep AC_INIT $CONFIGURE | grep -v '.*#.*AC_INIT' | tail -${TAIL_N}1 | sed 's/^[ ]*AC_INIT(\([^,)]*\).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
if [ "x$PROJECT" = "xAC_INIT" ] ; then
# projects might be using the older/deprecated arg-less AC_INIT .. look for AM_INIT_AUTOMAKE instead
PROJECT="`grep AM_INIT_AUTOMAKE $CONFIGURE | grep -v '.*#.*AM_INIT_AUTOMAKE' | tail -${TAIL_N}1 | sed 's/^[ ]*AM_INIT_AUTOMAKE(\([^,)]*\).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
fi
if [ "x$PROJECT" = "xAM_INIT_AUTOMAKE" ] ; then
PROJECT="project"
fi
if [ "x$PROJECT" = "x" ] ; then
PROJECT="project"
fi
else
$ECHO "Using PROJECT environment variable override: $PROJECT"
fi
$ECHO "Preparing the $PROJECT build system...please wait"
$ECHO
########################
# check for autoreconf #
########################
HAVE_AUTORECONF=no
if [ "x$AUTORECONF" = "x" ] ; then
for AUTORECONF in autoreconf ; do
$VERBOSE_ECHO "Checking autoreconf version: $AUTORECONF --version"
$AUTORECONF --version > /dev/null 2>&1
if [ $? = 0 ] ; then
HAVE_AUTORECONF=yes
break
fi
done
else
HAVE_AUTORECONF=yes
$ECHO "Using AUTORECONF environment variable override: $AUTORECONF"
fi
##########################
# autoconf version check #
##########################
_acfound=no
if [ "x$AUTOCONF" = "x" ] ; then
for AUTOCONF in autoconf ; do
$VERBOSE_ECHO "Checking autoconf version: $AUTOCONF --version"
$AUTOCONF --version > /dev/null 2>&1
if [ $? = 0 ] ; then
_acfound=yes
break
fi
done
else
_acfound=yes
$ECHO "Using AUTOCONF environment variable override: $AUTOCONF"
fi
_report_error=no
if [ ! "x$_acfound" = "xyes" ] ; then
$ECHO "ERROR: Unable to locate GNU Autoconf."
_report_error=yes
else
_version="`$AUTOCONF --version | head -${HEAD_N}1 | sed 's/[^0-9]*\([0-9\.][0-9\.]*\)/\1/'`"
if [ "x$_version" = "x" ] ; then
_version="0.0.0"
fi
$ECHO "Found GNU Autoconf version $_version"
version_check "$AUTOCONF_VERSION" "$_version"
if [ $? -ne 0 ] ; then
_report_error=yes
fi
fi
if [ "x$_report_error" = "xyes" ] ; then
version_error "$AUTOCONF_VERSION" "GNU Autoconf"
exit 1
fi
##########################
# automake version check #
##########################
_amfound=no
if [ "x$AUTOMAKE" = "x" ] ; then
for AUTOMAKE in automake ; do
$VERBOSE_ECHO "Checking automake version: $AUTOMAKE --version"
$AUTOMAKE --version > /dev/null 2>&1
if [ $? = 0 ] ; then
_amfound=yes
break
fi
done
else
_amfound=yes
$ECHO "Using AUTOMAKE environment variable override: $AUTOMAKE"
fi
_report_error=no
if [ ! "x$_amfound" = "xyes" ] ; then
$ECHO
$ECHO "ERROR: Unable to locate GNU Automake."
_report_error=yes
else
_version="`$AUTOMAKE --version | head -${HEAD_N}1 | sed 's/[^0-9]*\([0-9\.][0-9\.]*\)/\1/'`"
if [ "x$_version" = "x" ] ; then
_version="0.0.0"
fi
$ECHO "Found GNU Automake version $_version"
version_check "$AUTOMAKE_VERSION" "$_version"
if [ $? -ne 0 ] ; then
_report_error=yes
fi
fi
if [ "x$_report_error" = "xyes" ] ; then
version_error "$AUTOMAKE_VERSION" "GNU Automake"
exit 1
fi
########################
# check for libtoolize #
########################
HAVE_LIBTOOLIZE=yes
HAVE_ALT_LIBTOOLIZE=no
_ltfound=no
if [ "x$LIBTOOLIZE" = "x" ] ; then
LIBTOOLIZE=libtoolize
$VERBOSE_ECHO "Checking libtoolize version: $LIBTOOLIZE --version"
$LIBTOOLIZE --version > /dev/null 2>&1
if [ ! $? = 0 ] ; then
HAVE_LIBTOOLIZE=no
$ECHO
if [ "x$HAVE_AUTORECONF" = "xno" ] ; then
$ECHO "Warning: libtoolize does not appear to be available."
else
$ECHO "Warning: libtoolize does not appear to be available. This means that"
$ECHO "the automatic build preparation via autoreconf will probably not work."
$ECHO "Preparing the build by running each step individually, however, should"
$ECHO "work and will be done automatically for you if autoreconf fails."
fi
# look for some alternates
for tool in glibtoolize libtoolize15 libtoolize14 libtoolize13 ; do
$VERBOSE_ECHO "Checking libtoolize alternate: $tool --version"
_glibtoolize="`$tool --version > /dev/null 2>&1`"
if [ $? = 0 ] ; then
$VERBOSE_ECHO "Found $tool --version"
_glti="`which $tool`"
if [ "x$_glti" = "x" ] ; then
$VERBOSE_ECHO "Cannot find $tool with which"
continue;
fi
if test ! -f "$_glti" ; then
$VERBOSE_ECHO "Cannot use $tool, $_glti is not a file"
continue;
fi
_gltidir="`dirname $_glti`"
if [ "x$_gltidir" = "x" ] ; then
$VERBOSE_ECHO "Cannot find $tool path with dirname of $_glti"
continue;
fi
if test ! -d "$_gltidir" ; then
$VERBOSE_ECHO "Cannot use $tool, $_gltidir is not a directory"
continue;
fi
HAVE_ALT_LIBTOOLIZE=yes
LIBTOOLIZE="$tool"
$ECHO
$ECHO "Fortunately, $tool was found which means that your system may simply"
$ECHO "have a non-standard or incomplete GNU Autotools install. If you have"
$ECHO "sufficient system access, it may be possible to quell this warning by"
$ECHO "running:"
$ECHO
sudo -V > /dev/null 2>&1
if [ $? = 0 ] ; then
$ECHO " sudo ln -s $_glti $_gltidir/libtoolize"
$ECHO
else
$ECHO " ln -s $_glti $_gltidir/libtoolize"
$ECHO
$ECHO "Run that as root or with proper permissions to the $_gltidir directory"
$ECHO
fi
_ltfound=yes
break
fi
done
else
_ltfound=yes
fi
else
_ltfound=yes
$ECHO "Using LIBTOOLIZE environment variable override: $LIBTOOLIZE"
fi
############################
# libtoolize version check #
############################
_report_error=no
if [ ! "x$_ltfound" = "xyes" ] ; then
$ECHO
$ECHO "ERROR: Unable to locate GNU Libtool."
_report_error=yes
else
_version="`$LIBTOOLIZE --version | head -${HEAD_N}1 | sed 's/[^0-9]*\([0-9\.][0-9\.]*\)/\1/'`"
if [ "x$_version" = "x" ] ; then
_version="0.0.0"
fi
$ECHO "Found GNU Libtool version $_version"
version_check "$LIBTOOL_VERSION" "$_version"
if [ $? -ne 0 ] ; then
_report_error=yes
fi
fi
if [ "x$_report_error" = "xyes" ] ; then
version_error "$LIBTOOL_VERSION" "GNU Libtool"
exit 1
fi
#####################
# check for aclocal #
#####################
if [ "x$ACLOCAL" = "x" ] ; then
for ACLOCAL in aclocal ; do
$VERBOSE_ECHO "Checking aclocal version: $ACLOCAL --version"
$ACLOCAL --version > /dev/null 2>&1
if [ $? = 0 ] ; then
break
fi
done
else
$ECHO "Using ACLOCAL environment variable override: $ACLOCAL"
fi
########################
# check for autoheader #
########################
if [ "x$AUTOHEADER" = "x" ] ; then
for AUTOHEADER in autoheader ; do
$VERBOSE_ECHO "Checking autoheader version: $AUTOHEADER --version"
$AUTOHEADER --version > /dev/null 2>&1
if [ $? = 0 ] ; then
break
fi
done
else
$ECHO "Using AUTOHEADER environment variable override: $AUTOHEADER"
fi
#########################
# check if version only #
#########################
$VERBOSE_ECHO "Checking whether to only output version information"
if [ "x$VERSION_ONLY" = "xyes" ] ; then
$ECHO
ident
$ECHO "---"
$ECHO "Version requested. No preparation or configuration will be performed."
exit 0
fi
#################################
# PROTECT_FROM_CLOBBER FUNCTION #
#################################
protect_from_clobber ( ) {
PFC_INIT=1
# protect COPYING & INSTALL from overwrite by automake. the
# automake force option will (inappropriately) ignore the existing
# contents of a COPYING and/or INSTALL files (depending on the
# version) instead of just forcing *missing* files like it does
# for AUTHORS, NEWS, and README. this is broken but extremely
# prevalent behavior, so we protect against it by keeping a backup
# of the file that can later be restored.
for file in COPYING INSTALL ; do
if test -f ${file} ; then
if test -f ${file}.$$.protect_from_automake.backup ; then
$VERBOSE_ECHO "Already backed up ${file} in `pwd`"
else
$VERBOSE_ECHO "Backing up ${file} in `pwd`"
$VERBOSE_ECHO "cp -p ${file} ${file}.$$.protect_from_automake.backup"
cp -p ${file} ${file}.$$.protect_from_automake.backup
fi
fi
done
}
##############################
# RECURSIVE_PROTECT FUNCTION #
##############################
recursive_protect ( ) {
# for projects using recursive configure, run the build
# preparation steps for the subdirectories. this function assumes
# START_PATH was set to pwd before recursion begins so that
# relative paths work.
# git 'r done, protect COPYING and INSTALL from being clobbered
protect_from_clobber
if test -d autom4te.cache ; then
$VERBOSE_ECHO "Found an autom4te.cache directory, deleting it"
$VERBOSE_ECHO "rm -rf autom4te.cache"
rm -rf autom4te.cache
fi
# find configure template
_configure="`locate_configure_template`"
if [ "x$_configure" = "x" ] ; then
return
fi
# $VERBOSE_ECHO "Looking for configure template found `pwd`/$_configure"
# look for subdirs
# $VERBOSE_ECHO "Looking for subdirs in `pwd`"
_det_config_subdirs="`grep AC_CONFIG_SUBDIRS $_configure | grep -v '.*#.*AC_CONFIG_SUBDIRS' | sed 's/^[ ]*AC_CONFIG_SUBDIRS(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
CHECK_DIRS=""
for dir in $_det_config_subdirs ; do
if test -d "`pwd`/$dir" ; then
CHECK_DIRS="$CHECK_DIRS \"`pwd`/$dir\""
fi
done
# process subdirs
if [ ! "x$CHECK_DIRS" = "x" ] ; then
$VERBOSE_ECHO "Recursively scanning the following directories:"
$VERBOSE_ECHO " $CHECK_DIRS"
for dir in $CHECK_DIRS ; do
$VERBOSE_ECHO "Protecting files from automake in $dir"
cd "$START_PATH"
eval "cd $dir"
# recursively git 'r done
recursive_protect
done
fi
} # end of recursive_protect
#############################
# RESTORE_CLOBBERED FUNCION #
#############################
restore_clobbered ( ) {
# The automake (and autoreconf by extension) -f/--force-missing
# option may overwrite COPYING and INSTALL even if they do exist.
# Here we restore the files if necessary.
spacer=no
for file in COPYING INSTALL ; do
if test -f ${file}.$$.protect_from_automake.backup ; then
if test -f ${file} ; then
# compare entire content, restore if needed
if test "x`cat ${file}`" != "x`cat ${file}.$$.protect_from_automake.backup`" ; then
if test "x$spacer" = "xno" ; then
$VERBOSE_ECHO
spacer=yes
fi
# restore the backup
$VERBOSE_ECHO "Restoring ${file} from backup (automake -f likely clobbered it)"
$VERBOSE_ECHO "rm -f ${file}"
rm -f ${file}
$VERBOSE_ECHO "mv ${file}.$$.protect_from_automake.backup ${file}"
mv ${file}.$$.protect_from_automake.backup ${file}
fi # check contents
elif test -f ${file}.$$.protect_from_automake.backup ; then
$VERBOSE_ECHO "mv ${file}.$$.protect_from_automake.backup ${file}"
mv ${file}.$$.protect_from_automake.backup ${file}
fi # -f ${file}
# just in case
$VERBOSE_ECHO "rm -f ${file}.$$.protect_from_automake.backup"
rm -f ${file}.$$.protect_from_automake.backup
fi # -f ${file}.$$.protect_from_automake.backup
done
CONFIGURE="`locate_configure_template`"
if [ "x$CONFIGURE" = "x" ] ; then
return
fi
_aux_dir="`grep AC_CONFIG_AUX_DIR $CONFIGURE | grep -v '.*#.*AC_CONFIG_AUX_DIR' | tail -${TAIL_N}1 | sed 's/^[ ]*AC_CONFIG_AUX_DIR(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
if test ! -d "$_aux_dir" ; then
_aux_dir=.
fi
for file in config.guess config.sub ltmain.sh ; do
if test -f "${_aux_dir}/${file}" ; then
$VERBOSE_ECHO "rm -f \"${_aux_dir}/${file}.backup\""
rm -f "${_aux_dir}/${file}.backup"
fi
done
} # end of restore_clobbered
##############################
# RECURSIVE_RESTORE FUNCTION #
##############################
recursive_restore ( ) {
# restore COPYING and INSTALL from backup if they were clobbered
# for each directory recursively.
# git 'r undone
restore_clobbered
# find configure template
_configure="`locate_configure_template`"
if [ "x$_configure" = "x" ] ; then
return
fi
# look for subdirs
_det_config_subdirs="`grep AC_CONFIG_SUBDIRS $_configure | grep -v '.*#.*AC_CONFIG_SUBDIRS' | sed 's/^[ ]*AC_CONFIG_SUBDIRS(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
CHECK_DIRS=""
for dir in $_det_config_subdirs ; do
if test -d "`pwd`/$dir" ; then
CHECK_DIRS="$CHECK_DIRS \"`pwd`/$dir\""
fi
done
# process subdirs
if [ ! "x$CHECK_DIRS" = "x" ] ; then
$VERBOSE_ECHO "Recursively scanning the following directories:"
$VERBOSE_ECHO " $CHECK_DIRS"
for dir in $CHECK_DIRS ; do
$VERBOSE_ECHO "Checking files for automake damage in $dir"
cd "$START_PATH"
eval "cd $dir"
# recursively git 'r undone
recursive_restore
done
fi
} # end of recursive_restore
#######################
# INITIALIZE FUNCTION #
#######################
initialize ( ) {
# this routine performs a variety of directory-specific
# initializations. some are sanity checks, some are preventive,
# and some are necessary setup detection.
#
# this function sets:
# CONFIGURE
# SEARCH_DIRS
# CONFIG_SUBDIRS
##################################
# check for a configure template #
##################################
CONFIGURE="`locate_configure_template`"
if [ "x$CONFIGURE" = "x" ] ; then
$ECHO
$ECHO "A configure.ac or configure.in file could not be located implying"
$ECHO "that the GNU Build System is at least not used in this directory. In"
$ECHO "any case, there is nothing to do here without one of those files."
$ECHO
$ECHO "ERROR: No configure.in or configure.ac file found in `pwd`"
exit 1
fi
#####################
# detect an aux dir #
#####################
_aux_dir="`grep AC_CONFIG_AUX_DIR $CONFIGURE | grep -v '.*#.*AC_CONFIG_AUX_DIR' | tail -${TAIL_N}1 | sed 's/^[ ]*AC_CONFIG_AUX_DIR(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
if test ! -d "$_aux_dir" ; then
_aux_dir=.
else
$VERBOSE_ECHO "Detected auxillary directory: $_aux_dir"
fi
################################
# detect a recursive configure #
################################
CONFIG_SUBDIRS=""
_det_config_subdirs="`grep AC_CONFIG_SUBDIRS $CONFIGURE | grep -v '.*#.*AC_CONFIG_SUBDIRS' | sed 's/^[ ]*AC_CONFIG_SUBDIRS(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`"
for dir in $_det_config_subdirs ; do
if test -d "`pwd`/$dir" ; then
$VERBOSE_ECHO "Detected recursive configure directory: `pwd`/$dir"
CONFIG_SUBDIRS="$CONFIG_SUBDIRS `pwd`/$dir"
fi
done
###########################################################
# make sure certain required files exist for GNU projects #
###########################################################
_marker_found=""
_marker_found_message_intro='Detected non-GNU marker "'
_marker_found_message_mid='" in '
for marker in foreign cygnus ; do
_marker_found_message=${_marker_found_message_intro}${marker}${_marker_found_message_mid}
_marker_found="`grep 'AM_INIT_AUTOMAKE.*'${marker} $CONFIGURE`"
if [ ! "x$_marker_found" = "x" ] ; then
$VERBOSE_ECHO "${_marker_found_message}`basename \"$CONFIGURE\"`"
break
fi
if test -f "`dirname \"$CONFIGURE\"/Makefile.am`" ; then
_marker_found="`grep 'AUTOMAKE_OPTIONS.*'${marker} Makefile.am`"
if [ ! "x$_marker_found" = "x" ] ; then
$VERBOSE_ECHO "${_marker_found_message}Makefile.am"
break
fi
fi
done
if [ "x${_marker_found}" = "x" ] ; then
_suggest_foreign=no
for file in AUTHORS COPYING ChangeLog INSTALL NEWS README ; do
if [ ! -f $file ] ; then
$VERBOSE_ECHO "Touching ${file} since it does not exist"
_suggest_foreign=yes
touch $file
fi
done
if [ "x${_suggest_foreign}" = "xyes" ] ; then
$ECHO
$ECHO "Warning: Several files expected of projects that conform to the GNU"
$ECHO "coding standards were not found. The files were automatically added"
$ECHO "for you since you do not have a 'foreign' declaration specified."
$ECHO
$ECHO "Considered adding 'foreign' to AM_INIT_AUTOMAKE in `basename \"$CONFIGURE\"`"
if test -f "`dirname \"$CONFIGURE\"/Makefile.am`" ; then
$ECHO "or to AUTOMAKE_OPTIONS in your top-level Makefile.am file."
fi
$ECHO
fi
fi
##################################################
# make sure certain generated files do not exist #
##################################################
for file in config.guess config.sub ltmain.sh ; do
if test -f "${_aux_dir}/${file}" ; then
$VERBOSE_ECHO "mv -f \"${_aux_dir}/${file}\" \"${_aux_dir}/${file}.backup\""
mv -f "${_aux_dir}/${file}" "${_aux_dir}/${file}.backup"
fi
done
############################
# search alternate m4 dirs #
############################
SEARCH_DIRS=""
for dir in m4 ; do
if [ -d $dir ] ; then
$VERBOSE_ECHO "Found extra aclocal search directory: $dir"
SEARCH_DIRS="$SEARCH_DIRS -I $dir"
fi
done
######################################
# remove any previous build products #
######################################
if test -d autom4te.cache ; then
$VERBOSE_ECHO "Found an autom4te.cache directory, deleting it"
$VERBOSE_ECHO "rm -rf autom4te.cache"
rm -rf autom4te.cache
fi
# tcl/tk (and probably others) have a customized aclocal.m4, so can't delete it
# if test -f aclocal.m4 ; then
# $VERBOSE_ECHO "Found an aclocal.m4 file, deleting it"
# $VERBOSE_ECHO "rm -f aclocal.m4"
# rm -f aclocal.m4
# fi
} # end of initialize()
##############
# initialize #
##############
# stash path
START_PATH="`pwd`"
# Before running autoreconf or manual steps, some prep detection work
# is necessary or useful. Only needs to occur once per directory, but
# does need to traverse the entire subconfigure hierarchy to protect
# files from being clobbered even by autoreconf.
recursive_protect
# start from where we started
cd "$START_PATH"
# get ready to process
initialize
#########################################
# DOWNLOAD_GNULIB_CONFIG_GUESS FUNCTION #
#########################################
# TODO - should make sure wget/curl exist and/or work before trying to
# use them.
download_gnulib_config_guess () {
# abuse gitweb to download gnulib's latest config.guess via HTTP
config_guess_temp="config.guess.$$.download"
ret=1
for __cmd in wget curl fetch ; do
$VERBOSE_ECHO "Checking for command ${__cmd}"
${__cmd} --version > /dev/null 2>&1
ret=$?
if [ ! $ret = 0 ] ; then
continue
fi
__cmd_version=`${__cmd} --version | head -n 1 | sed -e 's/^[^0-9]\+//' -e 's/ .*//'`
$VERBOSE_ECHO "Found ${__cmd} ${__cmd_version}"
opts=""
case ${__cmd} in
wget)
opts="-O"
;;
curl)
opts="-o"
;;
fetch)
opts="-t 5 -f"
;;
esac
$VERBOSE_ECHO "Running $__cmd \"${CONFIG_GUESS_URL}\" $opts \"${config_guess_temp}\""
eval "$__cmd \"${CONFIG_GUESS_URL}\" $opts \"${config_guess_temp}\"" > /dev/null 2>&1
if [ $? = 0 ] ; then
mv -f "${config_guess_temp}" ${_aux_dir}/config.guess
ret=0
break
fi
done
if [ ! $ret = 0 ] ; then
$ECHO "Warning: config.guess download failed from: $CONFIG_GUESS_URL"
rm -f "${config_guess_temp}"
fi
}
##############################
# LIBTOOLIZE_NEEDED FUNCTION #
##############################
libtoolize_needed () {
ret=1 # means no, don't need libtoolize
for feature in AC_PROG_LIBTOOL AM_PROG_LIBTOOL LT_INIT ; do
$VERBOSE_ECHO "Searching for $feature in $CONFIGURE"
found="`grep \"^$feature.*\" $CONFIGURE`"
if [ ! "x$found" = "x" ] ; then
ret=0 # means yes, need to run libtoolize
break
fi
done
return ${ret}
}
############################################
# prepare build via autoreconf or manually #
############################################
reconfigure_manually=no
if [ "x$HAVE_AUTORECONF" = "xyes" ] ; then
$ECHO
$ECHO $ECHO_N "Automatically preparing build ... $ECHO_C"
$VERBOSE_ECHO "$AUTORECONF $SEARCH_DIRS $AUTORECONF_OPTIONS"
autoreconf_output="`$AUTORECONF $SEARCH_DIRS $AUTORECONF_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$autoreconf_output"
if [ ! $ret = 0 ] ; then
if [ "x$HAVE_ALT_LIBTOOLIZE" = "xyes" ] ; then
if [ ! "x`echo \"$autoreconf_output\" | grep libtoolize | grep \"No such file or directory\"`" = "x" ] ; then
$ECHO
$ECHO "Warning: autoreconf failed but due to what is usually a common libtool"
$ECHO "misconfiguration issue. This problem is encountered on systems that"
$ECHO "have installed libtoolize under a different name without providing a"
$ECHO "symbolic link or without setting the LIBTOOLIZE environment variable."
$ECHO
$ECHO "Restarting the preparation steps with LIBTOOLIZE set to $LIBTOOLIZE"
export LIBTOOLIZE
RUN_RECURSIVE=no
export RUN_RECURSIVE
untrap_abnormal
$VERBOSE_ECHO sh $AUTOGEN_SH "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9"
sh "$AUTOGEN_SH" "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9"
exit $?
fi
fi
$ECHO "Warning: $AUTORECONF failed"
if test -f ltmain.sh ; then
$ECHO "libtoolize being run by autoreconf is not creating ltmain.sh in the auxillary directory like it should"
fi
$ECHO "Attempting to run the preparation steps individually"
reconfigure_manually=yes
else
if [ "x$DOWNLOAD" = "xyes" ] ; then
if libtoolize_needed ; then
download_gnulib_config_guess
fi
fi
fi
else
reconfigure_manually=yes
fi
############################
# LIBTOOL_FAILURE FUNCTION #
############################
libtool_failure ( ) {
# libtool is rather error-prone in comparison to the other
# autotools and this routine attempts to compensate for some
# common failures. the output after a libtoolize failure is
# parsed for an error related to AC_PROG_LIBTOOL and if found, we
# attempt to inject a project-provided libtool.m4 file.
_autoconf_output="$1"
if [ "x$RUN_RECURSIVE" = "xno" ] ; then
# we already tried the libtool.m4, don't try again
return 1
fi
if test -f "$LIBTOOL_M4" ; then
found_libtool="`$ECHO $_autoconf_output | grep AC_PROG_LIBTOOL`"
if test ! "x$found_libtool" = "x" ; then
if test -f acinclude.m4 ; then
rm -f acinclude.m4.$$.backup
$VERBOSE_ECHO "cat acinclude.m4 > acinclude.m4.$$.backup"
cat acinclude.m4 > acinclude.m4.$$.backup
fi
$VERBOSE_ECHO "cat \"$LIBTOOL_M4\" >> acinclude.m4"
chmod u+w acinclude.m4
cat "$LIBTOOL_M4" >> acinclude.m4
# don't keep doing this
RUN_RECURSIVE=no
export RUN_RECURSIVE
untrap_abnormal
$ECHO
$ECHO "Restarting the preparation steps with libtool macros in acinclude.m4"
$VERBOSE_ECHO sh $AUTOGEN_SH "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9"
sh "$AUTOGEN_SH" "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9"
exit $?
fi
fi
}
###########################
# MANUAL_AUTOGEN FUNCTION #
###########################
manual_autogen ( ) {
##################################################
# Manual preparation steps taken are as follows: #
# aclocal [-I m4] #
# libtoolize --automake -c -f #
# aclocal [-I m4] #
# autoconf -f #
# autoheader #
# automake -a -c -f #
##################################################
###########
# aclocal #
###########
$VERBOSE_ECHO "$ACLOCAL $SEARCH_DIRS $ACLOCAL_OPTIONS"
aclocal_output="`$ACLOCAL $SEARCH_DIRS $ACLOCAL_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$aclocal_output"
if [ ! $ret = 0 ] ; then $ECHO "ERROR: $ACLOCAL failed" && exit 2 ; fi
##############
# libtoolize #
##############
if libtoolize_needed ; then
if [ "x$HAVE_LIBTOOLIZE" = "xyes" ] ; then
$VERBOSE_ECHO "$LIBTOOLIZE $LIBTOOLIZE_OPTIONS"
libtoolize_output="`$LIBTOOLIZE $LIBTOOLIZE_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$libtoolize_output"
if [ ! $ret = 0 ] ; then $ECHO "ERROR: $LIBTOOLIZE failed" && exit 2 ; fi
else
if [ "x$HAVE_ALT_LIBTOOLIZE" = "xyes" ] ; then
$VERBOSE_ECHO "$LIBTOOLIZE $ALT_LIBTOOLIZE_OPTIONS"
libtoolize_output="`$LIBTOOLIZE $ALT_LIBTOOLIZE_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$libtoolize_output"
if [ ! $ret = 0 ] ; then $ECHO "ERROR: $LIBTOOLIZE failed" && exit 2 ; fi
fi
fi
###########
# aclocal #
###########
# re-run again as instructed by libtoolize
$VERBOSE_ECHO "$ACLOCAL $SEARCH_DIRS $ACLOCAL_OPTIONS"
aclocal_output="`$ACLOCAL $SEARCH_DIRS $ACLOCAL_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$aclocal_output"
# libtoolize might put ltmain.sh in the wrong place
if test -f ltmain.sh ; then
if test ! -f "${_aux_dir}/ltmain.sh" ; then
$ECHO
$ECHO "Warning: $LIBTOOLIZE is creating ltmain.sh in the wrong directory"
$ECHO
$ECHO "Fortunately, the problem can be worked around by simply copying the"
$ECHO "file to the appropriate location (${_aux_dir}/). This has been done for you."
$ECHO
$VERBOSE_ECHO "cp -p ltmain.sh \"${_aux_dir}/ltmain.sh\""
cp -p ltmain.sh "${_aux_dir}/ltmain.sh"
$ECHO $ECHO_N "Continuing build preparation ... $ECHO_C"
fi
fi # ltmain.sh
if [ "x$DOWNLOAD" = "xyes" ] ; then
download_gnulib_config_guess
fi
fi # libtoolize_needed
############
# autoconf #
############
$VERBOSE_ECHO
$VERBOSE_ECHO "$AUTOCONF $AUTOCONF_OPTIONS"
autoconf_output="`$AUTOCONF $AUTOCONF_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$autoconf_output"
if [ ! $ret = 0 ] ; then
# retry without the -f and check for usage of macros that are too new
ac2_59_macros="AC_C_RESTRICT AC_INCLUDES_DEFAULT AC_LANG_ASSERT AC_LANG_WERROR AS_SET_CATFILE"
ac2_55_macros="AC_COMPILER_IFELSE AC_FUNC_MBRTOWC AC_HEADER_STDBOOL AC_LANG_CONFTEST AC_LANG_SOURCE AC_LANG_PROGRAM AC_LANG_CALL AC_LANG_FUNC_TRY_LINK AC_MSG_FAILURE AC_PREPROC_IFELSE"
ac2_54_macros="AC_C_BACKSLASH_A AC_CONFIG_LIBOBJ_DIR AC_GNU_SOURCE AC_PROG_EGREP AC_PROG_FGREP AC_REPLACE_FNMATCH AC_FUNC_FNMATCH_GNU AC_FUNC_REALLOC AC_TYPE_MBSTATE_T"
macros_to_search=""
ac_major="`echo ${AUTOCONF_VERSION}. | cut -d. -f1 | sed 's/[^0-9]//g'`"
ac_minor="`echo ${AUTOCONF_VERSION}. | cut -d. -f2 | sed 's/[^0-9]//g'`"
if [ $ac_major -lt 2 ] ; then
macros_to_search="$ac2_59_macros $ac2_55_macros $ac2_54_macros"
else
if [ $ac_minor -lt 54 ] ; then
macros_to_search="$ac2_59_macros $ac2_55_macros $ac2_54_macros"
elif [ $ac_minor -lt 55 ] ; then
macros_to_search="$ac2_59_macros $ac2_55_macros"
elif [ $ac_minor -lt 59 ] ; then
macros_to_search="$ac2_59_macros"
fi
fi
configure_ac_macros=__none__
for feature in $macros_to_search ; do
$VERBOSE_ECHO "Searching for $feature in $CONFIGURE"
found="`grep \"^$feature.*\" $CONFIGURE`"
if [ ! "x$found" = "x" ] ; then
if [ "x$configure_ac_macros" = "x__none__" ] ; then
configure_ac_macros="$feature"
else
configure_ac_macros="$feature $configure_ac_macros"
fi
fi
done
if [ ! "x$configure_ac_macros" = "x__none__" ] ; then
$ECHO
$ECHO "Warning: Unsupported macros were found in $CONFIGURE"
$ECHO
$ECHO "The `basename \"$CONFIGURE\"` file was scanned in order to determine if any"
$ECHO "unsupported macros are used that exceed the minimum version"
$ECHO "settings specified within this file. As such, the following macros"
$ECHO "should be removed from configure.ac or the version numbers in this"
$ECHO "file should be increased:"
$ECHO
$ECHO "$configure_ac_macros"
$ECHO
$ECHO $ECHO_N "Ignorantly continuing build preparation ... $ECHO_C"
fi
###################
# autoconf, retry #
###################
$VERBOSE_ECHO
$VERBOSE_ECHO "$AUTOCONF"
autoconf_output="`$AUTOCONF 2>&1`"
ret=$?
$VERBOSE_ECHO "$autoconf_output"
if [ ! $ret = 0 ] ; then
# test if libtool is busted
libtool_failure "$autoconf_output"
# let the user know what went wrong
cat <<EOF
$autoconf_output
EOF
$ECHO "ERROR: $AUTOCONF failed"
exit 2
else
# autoconf sans -f and possibly sans unsupported options succeed so warn verbosely
$ECHO
$ECHO "Warning: autoconf seems to have succeeded by removing the following options:"
$ECHO " AUTOCONF_OPTIONS=\"$AUTOCONF_OPTIONS\""
$ECHO
$ECHO "Removing those options should not be necessary and indicate some other"
$ECHO "problem with the build system. The build preparation is highly suspect"
$ECHO "and may result in configuration or compilation errors. Consider"
if [ "x$VERBOSE_ECHO" = "x:" ] ; then
$ECHO "rerunning the build preparation with verbose output enabled."
$ECHO " $AUTOGEN_SH --verbose"
else
$ECHO "reviewing the minimum GNU Autotools version settings contained in"
$ECHO "this script along with the macros being used in your `basename \"$CONFIGURE\"` file."
fi
$ECHO
$ECHO $ECHO_N "Continuing build preparation ... $ECHO_C"
fi # autoconf ret = 0
fi # autoconf ret = 0
##############
# autoheader #
##############
need_autoheader=no
for feature in AM_CONFIG_HEADER AC_CONFIG_HEADER ; do
$VERBOSE_ECHO "Searching for $feature in $CONFIGURE"
found="`grep \"^$feature.*\" $CONFIGURE`"
if [ ! "x$found" = "x" ] ; then
need_autoheader=yes
break
fi
done
if [ "x$need_autoheader" = "xyes" ] ; then
$VERBOSE_ECHO "$AUTOHEADER $AUTOHEADER_OPTIONS"
autoheader_output="`$AUTOHEADER $AUTOHEADER_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$autoheader_output"
if [ ! $ret = 0 ] ; then $ECHO "ERROR: $AUTOHEADER failed" && exit 2 ; fi
fi # need_autoheader
############
# automake #
############
need_automake=no
for feature in AM_INIT_AUTOMAKE ; do
$VERBOSE_ECHO "Searching for $feature in $CONFIGURE"
found="`grep \"^$feature.*\" $CONFIGURE`"
if [ ! "x$found" = "x" ] ; then
need_automake=yes
break
fi
done
if [ "x$need_automake" = "xyes" ] ; then
$VERBOSE_ECHO "$AUTOMAKE $AUTOMAKE_OPTIONS"
automake_output="`$AUTOMAKE $AUTOMAKE_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$automake_output"
if [ ! $ret = 0 ] ; then
###################
# automake, retry #
###################
$VERBOSE_ECHO
$VERBOSE_ECHO "$AUTOMAKE $ALT_AUTOMAKE_OPTIONS"
# retry without the -f
automake_output="`$AUTOMAKE $ALT_AUTOMAKE_OPTIONS 2>&1`"
ret=$?
$VERBOSE_ECHO "$automake_output"
if [ ! $ret = 0 ] ; then
# test if libtool is busted
libtool_failure "$automake_output"
# let the user know what went wrong
cat <<EOF
$automake_output
EOF
$ECHO "ERROR: $AUTOMAKE failed"
exit 2
fi # automake retry
fi # automake ret = 0
fi # need_automake
} # end of manual_autogen
#####################################
# RECURSIVE_MANUAL_AUTOGEN FUNCTION #
#####################################
recursive_manual_autogen ( ) {
# run the build preparation steps manually for this directory
manual_autogen
# for projects using recursive configure, run the build
# preparation steps for the subdirectories.
if [ ! "x$CONFIG_SUBDIRS" = "x" ] ; then
$VERBOSE_ECHO "Recursively configuring the following directories:"
$VERBOSE_ECHO " $CONFIG_SUBDIRS"
for dir in $CONFIG_SUBDIRS ; do
$VERBOSE_ECHO "Processing recursive configure in $dir"
cd "$START_PATH"
cd "$dir"
# new directory, prepare
initialize
# run manual steps for the subdir and any others below
recursive_manual_autogen
done
fi
}
################################
# run manual preparation steps #
################################
if [ "x$reconfigure_manually" = "xyes" ] ; then
$ECHO
$ECHO $ECHO_N "Preparing build ... $ECHO_C"
recursive_manual_autogen
fi
#########################
# restore and summarize #
#########################
cd "$START_PATH"
# restore COPYING and INSTALL from backup if necessary
recursive_restore
# make sure we end up with a configure script
config_ac="`locate_configure_template`"
config="`echo $config_ac | sed 's/\.ac$//' | sed 's/\.in$//'`"
if [ "x$config" = "x" ] ; then
$VERBOSE_ECHO "Could not locate the configure template (from `pwd`)"
fi
# summarize
$ECHO "done"
$ECHO
if test "x$config" = "x" -o ! -f "$config" ; then
$ECHO "WARNING: The $PROJECT build system should now be prepared but there"
$ECHO "does not seem to be a resulting configure file. This is unexpected"
$ECHO "and likely the result of an error. You should run $NAME_OF_AUTOGEN"
$ECHO "with the --verbose option to get more details on a potential"
$ECHO "misconfiguration."
else
$ECHO "The $PROJECT build system is now prepared. To build here, run:"
$ECHO " $config"
$ECHO " make"
fi
# Local Variables:
# mode: sh
# tab-width: 8
# sh-basic-offset: 4
# sh-indentation: 4
# indent-tabs-mode: t
# End:
# ex: shiftwidth=4 tabstop=8
#! /bin/bash
# Make a directory in /tmp/OpenFPM_pdata
echo "Directory: $1"
echo "Machine: $2"
mkdir src/config
git submodule init
if [ $? -ne 0 ]; then
echo -e "Configure\033[91;5;1m FAILED \033[0m"
exit 1
fi
git submodule update
if [ $? -ne 0 ]; then
echo -e "Configure\033[91;5;1m FAILED \033[0m"
exit 1
fi
mkdir openfpm_numerics/src/config
# pull from all the projects
cd openfpm_data
git checkout develop
mkdir src/config
git pull origin develop
if [ $? -ne 0 ]; then
echo -e "Configure\033[91;5;1m FAILED \033[0m"
exit 1
fi
cd ..
cd openfpm_devices
mkdir src/config
git pull origin master
if [ $? -ne 0 ]; then
echo -e "Configure\033[91;5;1m FAILED \033[0m"
exit 1
fi
cd ..
cd openfpm_vcluster
mkdir src/config
git pull origin master
if [ $? -ne 0 ]; then
echo -e "Configure\033[91;5;1m FAILED \033[0m"
exit 1
fi
cd ..
cd openfpm_io
mkdir src/config
git pull origin master
if [ $? -ne 0 ]; then
echo -e "Configure\033[91;5;1m FAILED \033[0m"
exit 1
fi
cd ..
if [ "$2" == "gin" ]
then
echo "Compiling on gin\n"
source ~/.bashrc
module load gcc/4.9.2
./install -s -c "--prefix=/home/jenkins/openfpm_install"
make
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
make install
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 2 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 3 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 4 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 5 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 6 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 7 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 8 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 9 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 10 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 11 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 12 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
elif [ "$2" == "wetcluster" ]
then
echo "Compiling on wetcluster"
## produce the module path
source ~/.bashrc
module load gcc/4.9.2
module load openmpi/1.8.1
module load boost/1.54.0
sh ./autogen.sh
./install -s -c "--with-boost=/sw/apps/boost/1.54.0/ CXX=mpic++"
make
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
## Run on the cluster
bsub -o output_run2.%J -K -n 2 -R "span[hosts=1]" "module load openmpi/1.8.1 ; module load gcc/4.9.2; module load boost/1.54.0; mpirun -np 2 ./src/pdata"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
bsub -o output_run3.%J -K -n 3 -R "span[hosts=1]" "module load openmpi/1.8.1 ; module load gcc/4.9.2; module load boost/1.54.0; mpirun -np 3 ./src/pdata"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
bsub -o output_run4.%J -K -n 4 -R "span[hosts=1]" "module load openmpi/1.8.1 ; module load gcc/4.9.2; module load boost/1.54.0; mpirun -np 4 ./src/pdata"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
bsub -o output_run5.%J -K -n 5 -R "span[hosts=1]" "module load openmpi/1.8.1 ; module load gcc/4.9.2; module load boost/1.54.0; mpirun -np 5 ./src/pdata"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
bsub -o output_run6.%J -K -n 6 -R "span[hosts=1]" "module load openmpi/1.8.1 ; module load gcc/4.9.2; module load boost/1.54.0; mpirun -np 6 ./src/pdata"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
bsub -o output_run7.%J -K -n 7 -R "span[hosts=1]" "module load openmpi/1.8.1 ; module load gcc/4.9.2; module load boost/1.54.0; mpirun -np 7 ./src/pdata"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
bsub -o output_run8.%J -K -n 8 -R "span[hosts=1]" "module load openmpi/1.8.1 ; module load gcc/4.9.2; module load boost/1.54.0; mpirun -np 8 ./src/pdata"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
bsub -o output_run9.%J -K -n 9 -R "span[hosts=1]" "module load openmpi/1.8.1 ; module load gcc/4.9.2; module load boost/1.54.0; mpirun -np 9 ./src/pdata"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
bsub -o output_run10.%J -K -n 10 -R "span[hosts=1]" "module load openmpi/1.8.1 ; module load gcc/4.9.2; module load boost/1.54.0; mpirun -np 10 ./src/pdata"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
bsub -o output_run11.%J -K -n 11 -R "span[hosts=1]" "module load openmpi/1.8.1 ; module load gcc/4.9.2; module load boost/1.54.0; mpirun -np 11 ./src/pdata"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
bsub -o output_run12.%J -K -n 12 -R "span[hosts=1]" "module load openmpi/1.8.1 ; module load gcc/4.9.2; module load boost/1.54.0; mpirun -np 12 ./src/pdata"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
# bsub -o output_run32.%J -K -n 32 "module load openmpi/1.8.1 ; module load gcc/4.9.2; mpirun -np 32 ./src/vcluster"
# if [ $? -ne 0 ]; then exit 1 ; fi
# bsub -o output_run32.%J -K -n 64 "module load openmpi/1.8.1 ; module load gcc/4.9.2; mpirun -np 64 ./src/vcluster"
# if [ $? -ne 0 ]; then exit 1 ; fi
# bsub -o output_run32.%J -K -n 128 "module load openmpi/1.8.1 ; module load gcc/4.9.2; mpirun -np 128 ./src/vcluster"
# if [ $? -ne 0 ]; then exit 1 ; fi
elif [ "$2" == "taurus" ]
then
echo "Compiling on taurus"
source /etc/profile
echo "$PATH"
module load eigen/3.2.0
module load suitesparse/4.2.1-gnu-multimkl
module load boost/1.60.0
module load gcc/5.3.0
module load openmpi/1.10.2-gnu
module unload bullxmpi
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/home/incard/PARMETIS/lib:/home/incard/METIS/lib:/home/incard/HDF5/lib"
./install -s -c"CXX=mpic++ --with-boost=/sw/taurus/libraries/boost/1.60.0"
make
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
salloc --nodes=1 --ntasks-per-node=24 --time=00:5:00 --mem-per-cpu=1900 --partition=haswell bash -c "ulimit -s unlimited && mpirun -np 24 src/pdata --report_level=no"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
salloc --nodes=2 --ntasks-per-node=24 --time=00:5:00 --mem-per-cpu=1900 --partition=haswell bash -c "ulimit -s unlimited && mpirun -np 48 src/pdata --report_level=no"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
salloc --nodes=4 --ntasks-per-node=24 --time=00:5:00 --mem-per-cpu=1900 --partition=haswell bash -c "ulimit -s unlimited && mpirun -np 96 src/pdata --report_level=no"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
salloc --nodes=8 --ntasks-per-node=24 --time=00:5:00 --mem-per-cpu=1900 --partition=haswell bash -c "ulimit -s unlimited && mpirun -np 192 src/pdata --report_level=no"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
salloc --nodes=10 --ntasks-per-node=24 --time=00:5:00 --mem-per-cpu=1900 --partition=haswell bash -c "ulimit -s unlimited && mpirun -np 240 src/pdata --report_level=no"
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
else
echo "Compiling general"
source ~/.bashrc
./install -s
make
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 1 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 2 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 3 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
mpirun -np 4 ./src/pdata
if [ $? -ne 0 ]; then
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Error:\", \"color\": \"#FF0000\", \"text\":\"$2 failed to complete the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
exit 1 ;
fi
fi
curl -X POST --data "payload={\"icon_emoji\": \":jenkins:\", \"username\": \"jenkins\" , \"attachments\":[{ \"title\":\"Info:\", \"color\": \"#00FF00\", \"text\":\"$2 completed succeffuly the openfpm_pdata test \" }] }" https://hooks.slack.com/services/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
# -*- Autoconf -*-
# Process this file with autoconf to produce a configure script.
## Take all the options with the exception of --enable-install-req
AC_PREREQ(2.59)
AC_INIT(FULL-PACKAGE-NAME, VERSION, BUG-REPORT-ADDRESS)
AC_CANONICAL_SYSTEM
AC_CONFIG_SRCDIR([src/main.cpp])
AC_CONFIG_SUBDIRS([openfpm_data openfpm_devices openfpm_vcluster openfpm_io openfpm_numerics])
#### Adding --with-pdata option and openfpm_pdata to prefix folder
if test "$prefix" = "NONE"; then
prefix=/usr/local
fi
base=$prefix
prefix="$prefix/openfpm_pdata"
echo "Installation dir is: $prefix"
ac_configure_args="$ac_configure_args --with-pdata=../../src"
########################
AM_INIT_AUTOMAKE([subdir-objects])
AC_CONFIG_HEADER([src/config/config.h])
m4_ifdef([ACX_PTHREAD],,[m4_include([m4/acx_pthread.m4])])
m4_ifdef([ACX_MPI],,[m4_include([m4/acx_mpi.m4])])
m4_ifdef([AX_OPENMP],,[m4_include([m4/ax_openmp.m4])])
m4_ifdef([AX_CUDA],,[m4_include([m4/ax_cuda.m4])])
m4_ifdef([IMMDX_LIB_METIS],,[m4_include([m4/immdx_lib_metis.m4])])
m4_ifdef([IMMDX_LIB_PARMETIS],,[m4_include([m4/immdx_lib_parmetis.m4])])
m4_ifdef([AX_BOOST_BASE],,[m4_include([m4/ax_boost_base.m4])])
m4_ifdef([AX_BOOST_IOSTREAMS],,[m4_include([m4/ax_boost_iostreams.m4])])
m4_ifdef([AX_BOOST_PROGRAM_OPTIONS],,[m4_include([m4/ax_boost_program_options.m4])])
m4_ifdef([AX_BOOST_UNIT_TEST_FRAMEWORK],,[m4_include([m4/ax_boost_unit_test_framework.m4])])
m4_ifdef([AX_BLAS],,[m4_include([m4/ax_blas.m4])])
m4_ifdef([AX_LAPACK],,[m4_include([m4/ax_lapack.m4])])
m4_ifdef([AX_SUITESPARSE],,[m4_include([m4/ax_suitesparse.m4])])
m4_ifdef([AX_EIGEN],,[m4_include([m4/ax_eigen.m4])])
m4_ifdef([AX_LIB_HDF5],,[m4_include([m4/ax_lib_hdf5.m4])]])
case $host_os in
*darwin*|*macosx*)
CXXFLAGS+=" --std=c++11 "
AC_DEFINE([HAVE_OSX],[],[We have OSX])
;;
*cygwin*)
# Do something specific for cygwin
CXXFLAGS+=" --std=gnu++11 "
;;
*)
#Default Case
CXXFLAGS+=" --std=c++11 "
;;
esac
NVCCFLAGS=" "
INCLUDES_PATH=" "
# Create a file with the install base folder
echo "$base" > install_dir
# Needed for build library
AC_PROG_RANLIB
AM_PROG_AR
# Checks for programs.
AC_PROG_CXX
# Checks g++ flags
AC_CANONICAL_HOST
# Check that the compiler support mpi
AC_LANG_PUSH([C++])
AC_CHECK_HEADER(mpi.h,[],[echo "mpi.h not found"
exit 200])
AC_LANG_POP([C++])
# Check target architetture
#AX_GCC_ARCHFLAG([], [CXXFLAGS="$CXXFLAGS $ax_cv_gcc_archflag"], [])
###### Check for debug compilation
AC_MSG_CHECKING(whether to build with debug information)
debuger=no
AC_ARG_ENABLE(debug,
AC_HELP_STRING(
[--enable-debug],
[enable debug data generation (def=no)]
),
debuger="$enableval"
)
AC_MSG_RESULT($debuger)
if test x"$debuger" = x"yes"; then
AC_DEFINE([DEBUG_MODE],[],[Debug])
AC_DEFINE([DEBUG],[],[Debug])
CXXFLAGS="$CXXFLAGS -g3 -Wall -O0 "
NVCCFLAGS+="$NVCCFLAGS -g -O0 "
else
CXXFLAGS="$CXXFLAGS -Wall -O3 -g3 -funroll-loops "
NVCCFLAGS+="$NVCCFLAGS -O3 "
fi
#########
## Check for Metis
IMMDX_LIB_METIS([],[echo "Cannot detect metis, use the --with-metis option if it is not installed in the default location"
exit 201])
#########
## Check for HDF5
AX_LIB_HDF5([parallel])
if test x"$with_hdf5" = x"no"; then
echo "Cannot detect hdf5, use the --with-hdf5 option if it is not installed in the default location"
exit 207
fi
########
## Enable scan coverty
AC_MSG_CHECKING(whether to build for scan coverty compilation)
AC_ARG_ENABLE(scan-coverty,
AC_HELP_STRING(
[--enable-scan-coverty],
[enable scan-coverty compilation (def=no)]
),
scancoverty="$enableval"
)
AC_MSG_RESULT($scancoverty)
if test x"$scancoverty" = x"yes"; then
AC_DEFINE([COVERTY_SCAN],[],[Compile for coverty scan])
fi
## Check for parMetis
IMMDX_LIB_PARMETIS([],[echo "Cannot detect parmetis, use the --with-parmetis option if it is not installed in the default location"
exit 208])
####### include OpenFPM_devices include path
INCLUDES_PATH+="-I. -Iconfig/ -I../openfpm_io/src -I../openfpm_data/src -I../openfpm_devices/src -I../openfpm_vcluster/src/"
###### Check for se-class1
AC_MSG_CHECKING(whether to build with security enhancement class1)
se_class1=no
AC_ARG_ENABLE(se-class1,
AC_HELP_STRING(
[--enable-se-class1],
[enable security enhancement class1]
),
se_class1="$enableval"
)
AC_MSG_RESULT($se_class1)
if test x"$se_class1" = x"yes"; then
AC_DEFINE([SE_CLASS1],[],[Security enhancement class 1])
fi
###### Check for se-class 2
AC_MSG_CHECKING(whether to build with security enhancement class 2)
se_class2=no
AC_ARG_ENABLE(se-class2,
AC_HELP_STRING(
[--enable-se-class2],
[enable security enhancement class 2]
),
se_class2="$enableval"
)
AC_MSG_RESULT($se_class2)
if test x"$se_class2" = x"yes"; then
AC_DEFINE([SE_CLASS2],[],[Security enhancement class 2])
fi
###### Check for se-class 3
AC_MSG_CHECKING(whether to build with security enhancement class 3)
se_class3=no
AC_ARG_ENABLE(se-class3,
AC_HELP_STRING(
[--enable-se-class3],
[enable security enhancement class 3]
),
se_class3="$enableval"
)
AC_MSG_RESULT($se_class3)
if test x"$se_class3" = x"yes"; then
AC_DEFINE([SE_CLASS3],[],[Security enhancement class 3])
fi
###### Check for action on error
action_on_e=continue
AC_ARG_WITH([action-on-error],
AS_HELP_STRING([--with-action-on-error=stop,throw,continue],
[specify the action to do in case of error]),
[action_on_e="$withval"],
[action_on_e=continue])
if test x"$action_on_e" = x"stop"; then
AC_DEFINE([STOP_ON_ERROR],[],[If an error occur stop the program])
fi
if test x"$action_on_e" = x"throw"; then
AC_DEFINE([THROW_ON_ERROR],[],[when an error accur continue but avoid unsafe operation])
fi
##### CHECK FOR BOOST ##############
AX_BOOST_BASE([1.52],[],[echo "boost not found"
exit 202])
AX_BOOST_UNIT_TEST_FRAMEWORK
AX_BOOST_PROGRAM_OPTIONS
AX_BOOST_IOSTREAMS
if test x"$ax_cv_boost_unit_test_framework" = x"no"; then
echo "Notify boost unit test framework not usable"
exit 202
fi
if test x"$ax_cv_boost_iostreams" = x"no"; then
echo "Notify boost iostream not usable"
exit 202
fi
if test x"$ax_cv_boost_programs_options" = x"no"; then
echo "Notify boost program options not usable"
exit 202
fi
### Unfortunately a lot of linux distros install a pretty old MPI in the system wide folder,
### override such MPI with the installed one is extremely difficult and tricky, because we want
### to include "some" system library but exclude mpi. One possibility is to give specifically
### the wanted libmpi.so directly to the linker. But this is not possible because this lib is
### given by mpic++ in the form -L/path/to/mpi -lmpi, the other is completely eliminate every
### -L with a system default library
###
# eliminate any /usr/lib and and /usr/include from $BOOST_LIB and $BOOST_INCLUDE
BOOST_LDFLAGS=$(echo "$BOOST_LDFLAGS" | sed -e 's/ -L\/usr\/lib64[ \b]//g' | sed -e 's/ -L\/usr\/lib[ \b]//g')
BOOST_CPPFLAGS=$(echo "$BOOST_CPPFLAGS" | sed -e 's/-I\/usr\/include[ \b]//g')
AC_SUBST(BOOST_LDFLAGS)
AC_SUBST(BOOST_CPPFLAGS)
###### Checking for OpenBLAS
AX_BLAS([],[echo "blas not found"
exit 204])
AX_LAPACK([],[echo "lapack not found"
exit 204])
###### Checking for SUITESPARSE
AX_SUITESPARSE([],[echo "suitesparse not found"
exit 205])
###### Checking for EIGEN
AX_EIGEN([],[echo "eigen not found"
exit 206])
###### RT runtime lib
AC_CHECK_LIB(rt, clock_gettime, [AC_DEFINE([HAVE_CLOCK_GETTIME],[],[Have clock time])
OPT_LIBS="$OPT_LIBS -lrt"
])
####### Checking for GPU support
AX_CUDA
## detect for NVCC
if test x"$NVCC_EXIST" = x"yes"; then
AC_MSG_CHECKING(whether to build with GPU support)
gpu_support=yes
AC_ARG_ENABLE(gpu,
AC_HELP_STRING(
[--enable-gpu],
[enable gpu support]
),
gpu_support="$enableval"
)
AC_MSG_RESULT($gpu_support)
if test x"$gpu_support" = x"yes"; then
AC_DEFINE([GPU],[],[GPU support])
else
CUDA_LIBS=""
CUDA_CFLAGS=""
fi
else
gpu_support=no
fi
if test x$gpu_support = x"no"; then
CUDA_LIBS=""
CUDA_CFLAGS=""
fi
# Set this conditional if cuda is wanted
AM_CONDITIONAL(BUILDCUDA, test ! x$NVCC = x"no")
###########################
# Define that there is MPI
AC_DEFINE([HAVE_MPI],[],[MPI Enabled])
AC_SUBST(NVCCFLAGS)
AC_SUBST(INCLUDES_PATH)
AC_SUBST(OPT_LIBS)
# Checks for typedefs, structures, and compiler characteristics.
# Checks for library functions.
AC_CONFIG_FILES([Makefile src/Makefile images/Makefile ])
AC_OUTPUT
echo ""
echo "***********************************"
echo "* *"
if [ test x"$profiler" = x"yes" ]; then
echo "* profiler: yes *"
else
echo "* profiler: no *"
fi
if [ test x"$memcheck" = x"yes" ]; then
echo "* memcheck: yes *"
else
echo "* memcheck: no *"
fi
if [ test x"$debuger" = x"yes" ]; then
echo "* debug: yes *"
else
echo "* debug: no *"
fi
if [ test x"$se_class1" = x"yes" ]; then
echo "* se-class1: yes *"
else
echo "* se-class1: no *"
fi
if [ test x"$se_class2" = x"yes" ]; then
echo "* se-class2: yes *"
else
echo "* se-class2: no *"
fi
if [ test x"$se_class3" = x"yes" ]; then
echo "* se-class3: yes *"
else
echo "* se-class3: no *"
fi
if [ test x"$gpu_support" = x"no" ]; then
echo "* gpu: no *"
else
echo "* gpu: yes *"
fi
echo "* *"
echo "***********************************"
include ../../example.mk
CC=mpic++
LDIR =
OBJ = main.o
%.o: %.cpp
$(CC) -O3 -c --std=c++11 -o $@ $< $(INCLUDE_PATH)
grid: $(OBJ)
$(CC) -o $@ $^ $(CFLAGS) $(LIBS_PATH) $(LIBS)
all: grid
.PHONY: clean all
clean:
rm -f *.o *~ core grid
[pack]
files = main.cpp Makefile
#include "Grid/grid_dist_id.hpp"
#include "data_type/aggregate.hpp"
#include "Decomposition/CartDecomposition.hpp"
/*
* ### WIKI 1 ###
*
* ## Simple example
*
* This example show several basic functionalities of the distributed grid
*
* ### WIKI END ###
*
*/
int main(int argc, char* argv[])
{
//
// ### WIKI 2 ###
//
// Initialize the library and several objects
//
openfpm_init(&argc,&argv);
//
// ### WIKI 3 ###
//
// Create
// * A 3D box that define the domain
// * an array of 3 unsigned integer that will define the size of the grid on each dimension
// * A Ghost object that will define the extension of the ghost part for each sub-domain in physical units
Box<3,float> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
size_t sz[3];
sz[0] = 100;
sz[1] = 100;
sz[2] = 100;
// Ghost
Ghost<3,float> g(0.01);
//
// ### WIKI 4 ###
//
// Create a distributed grid in 3D (1° template parameter) space in with float precision (2° template parameter)
// each grid point contain a vector of dimension 3 (float[3]),
// using a CartesianDecomposition strategy (4° parameter) (the parameter 1° and 2° inside CartDecomposition must match 1° and 2°
// of grid_dist_id)
//
// Constructor parameters:
//
// * sz: size of the grid on each dimension
// * domain: where the grid is defined
// * g: ghost extension
//
grid_dist_id<3, float, aggregate<float[3]>> g_dist(sz,domain,g);
// ### WIKI 5 ###
//
// Get an iterator that go through the points of the grid (No ghost)
//
auto dom = g_dist.getDomainIterator();
// ### WIKI END ###
size_t count = 0;
// Iterate over all the points
while (dom.isNext())
{
//
// ### WIKI 6 ###
//
// Get the local grid key, the local grid key store internally the sub-domain id (each sub-domain contain a grid)
// and the local grid point id identified by 2 integers in 2D 3 integer in 3D and so on. These two distinct elements are
// available with key.getSub() and key.getKey()
//
auto key = dom.get();
//
// ### WIKI 7 ###
//
// Here we convert the local grid position, into global position, key_g contain 3 integers that identify the position
// of the grid point in global coordinates
//
//
auto key_g = g_dist.getGKey(key);
//
// ### WIKI 8 ###
//
// we write on the grid point of position (i,j,k) the value i*i + j*j + k*k on the component [0] of the vector
g_dist.template get<0>(key)[0] = key_g.get(0)*key_g.get(0) + key_g.get(1)*key_g.get(1) + key_g.get(2)*key_g.get(2);
// ### WIKI END ###
// Count the points
count++;
//
// ### WIKI 9 ###
//
// next point
++dom;
// ### WIKI END ###
}
//
// ### WIKI 10 ###
//
// Each sub-domain has an extended part, that is materially contained from another processor that in general is not synchronized
// ghost_get<0> synchronize the property 0 (the vector) in the ghost part
//
//
g_dist.template ghost_get<0>();
//
// ### WIKI 11 ###
//
// count contain the number of points the local processor contain, if we are interested to count the total number across the processor
// we can use the function add, to sum across processors. First we have to get an instance of Vcluster, queue an operation of add with
// the variable count and finally execute. All the operation are asynchronous, execute work like a barrier and ensure that all the
// queued operations are executed
//
Vcluster & vcl = g_dist.getVC();
vcl.sum(count);
vcl.execute();
// only master output
if (vcl.getProcessUnitID() == 0)
std::cout << "Number of points: " << count << "\n";
//
// ### WIKI 12 ###
//
// Finally we want a nice output to visualize the information stored by the distributed grid
//
g_dist.write("output");
//
// ### WIKI 13 ###
//
// For debugging purpose and demonstration we output the decomposition
//
g_dist.getDecomposition().write("out_dec");
//
// ### WIKI 14 ###
//
// Deinitialize the library
//
openfpm_finalize();
}
include ../../example.mk
CC=mpic++
LDIR =
OBJ = main.o
%.o: %.cpp
$(CC) -O3 -c --std=c++11 -o $@ $< $(INCLUDE_PATH)
stencil: $(OBJ)
$(CC) -o $@ $^ $(CFLAGS) $(LIBS_PATH) $(LIBS)
all: stencil
.PHONY: clean all
clean:
rm -f *.o *~ core stencil
[pack]
files = main.cpp Makefile
#include "Grid/grid_dist_id.hpp"
#include "data_type/aggregate.hpp"
#include "Decomposition/CartDecomposition.hpp"
/*
* ### WIKI 1 ###
*
* ## Simple example
*
* This example show how to move grid_key in order to create a Laplacian stencil,
* be careful, the function move are convenient, but not the fastest implementation
*
* ### WIKI END ###
*
*/
/*
*
* ### WIKI 2 ###
*
* Define some convenient constants and types
*
*/
constexpr size_t x = 0;
constexpr size_t y = 1;
constexpr size_t z = 2;
constexpr size_t A = 0;
constexpr size_t B = 0;
typedef aggregate<float[3],float[3]> grid_point;
int main(int argc, char* argv[])
{
//
// ### WIKI 3 ###
//
// Initialize the library and several objects
//
openfpm_init(&argc,&argv);
//
// ### WIKI 4 ###
//
// Create several object needed later, in particular
// * A 3D box that define the domain
// * an array of 3 unsigned integer that define the size of the grid on each dimension
// * A Ghost object that will define the extension of the ghost part for each sub-domain in physical units
Box<3,float> domain({0.0,0.0,0.0},{1.0,1.0,1.0});
size_t sz[3];
sz[0] = 100;
sz[1] = 100;
sz[2] = 100;
// Ghost
Ghost<3,float> g(0.03);
//
// ### WIKI 4 ###
//
// Create a distributed grid in 3D (1° template parameter) space in with float precision (2° template parameter)
// each grid point contain a vector of dimension 3 (float[3]),
// using a CartesianDecomposition strategy (4° parameter) (the parameter 1° and 2° inside CartDecomposition must match 1° and 2°
// of grid_dist_id)
//
// Constructor parameters:
//
// * sz: size of the grid on each dimension
// * domain: where the grid is defined
// * g: ghost extension
//
grid_dist_id<3, float, grid_point> g_dist(sz,domain,g);
// ### WIKI 5 ###
//
// Get an iterator that go throught the point of the domain (No ghost)
//
auto dom = g_dist.getDomainIterator();
// ### WIKI END ###
while (dom.isNext())
{
//
// ### WIKI 6 ###
//
// Get the local grid key, the local grid key store internally the sub-domain id (each sub-domain contain a grid)
// and the local grid point id identified by 2 integers in 2D 3 integer in 3D and so on. These two distinct elements are
// available with key.getSub() and key.getKey()
//
auto key = dom.get();
//
// ### WIKI 7 ###
//
// Here we convert the local grid position, into global position, key_g contain 3 integers that identify the position
// of the grid point in global coordinates
//
//
auto key_g = g_dist.getGKey(key);
//
// ### WIKI 8 ###
//
// we write on the grid point of position (i,j,k) the value i*i + j*j + k*k on the component [0] of the vector
g_dist.template get<0>(key)[0] = key_g.get(0)*key_g.get(0) + key_g.get(1)*key_g.get(1) + key_g.get(2)*key_g.get(2);
//
// ### WIKI 9 ###
//
// next point
++dom;
// ### WIKI END ###
}
//
// ### WIKI 10 ###
//
// Each sub-domain has an extended part, that is materially contained from another processor that in general is not synchronized
// ghost_get<0> synchronize the property 0 (the vector) in the ghost part
//
//
g_dist.template ghost_get<0>();
//
// ### WIKI 11 ###
//
// Get again another iterator, iterate across all the domain points, calculating a Laplace stencil
//
//
auto dom2 = g_dist.getDomainIterator();
while (dom2.isNext())
{
auto key = dom2.get();
// Laplace stencil
g_dist.template get<B>(key)[1] = g_dist.template get<A>(key.move(x,1))[0] + g_dist.template get<A>(key.move(x,-1))[0] +
g_dist.template get<A>(key.move(y,1))[0] + g_dist.template get<A>(key.move(y,-1))[0] +
g_dist.template get<A>(key.move(z,1))[0] + g_dist.template get<A>(key.move(z,-1))[0] -
6*g_dist.template get<A>(key)[0];
++dom2;
}
//
// ### WIKI 12 ###
//
// Finally we want a nice output to visualize the information stored by the distributed grid
//
g_dist.write("output");
//
// ### WIKI 14 ###
//
// Deinitialize the library
//
openfpm_finalize();
}
SUBDIRS := $(wildcard */.)
all clean:
for dir in $(SUBDIRS); do \
$(MAKE) -C $$dir $@; \
done
clean: $(SUBDIRS)
.PHONY: all clean $(SUBDIRS)
SUBDIRS := $(wildcard */.)
all clean:
for dir in $(SUBDIRS); do \
$(MAKE) -C $$dir $@; \
done
clean: $(SUBDIRS)
.PHONY: all clean $(SUBDIRS)
/*
* ### WIKI 1 ###
*
* ## Simple example
*
* In this example show an agent based simulation
*
* ### WIKI END ###
*
*/
#include "Vector/vector_dist.hpp"
#include "Decomposition/CartDecomposition.hpp"
#include "PSE/Kernels.hpp"
#include "Plot/util.hpp"
#include "Plot/GoogleChart.hpp"
#include "data_type/aggregate.hpp"
#include <cmath>
struct animal
{
typedef boost::fusion::vector<float[2], size_t, size_t, long int> type;
//! Attributes name
struct attributes
{
static const std::string name[];
};
//! type of the positional field
typedef float s_type;
//! The data
type data;
//! position property id in boost::fusion::vector
static const unsigned int pos = 0;
//! genre of animal property id in boost::fusion::vector
static const unsigned int genre = 1;
//! state property id in boost::fusion::vector
static const unsigned int status = 2;
//! alive time property id in boost::fusion::vector
static const unsigned int time_a = 3;
//! total number of properties boost::fusion::vector
static const unsigned int max_prop = 4;
animal()
{
}
inline animal(const animal & p)
{
boost::fusion::at_c<0>(data)[0] = boost::fusion::at_c<0>(p.data)[0];
boost::fusion::at_c<0>(data)[1] = boost::fusion::at_c<0>(p.data)[1];
//boost::fusion::at_c<0>(data)[2] = boost::fusion::at_c<0>(p.data)[2];
boost::fusion::at_c<1>(data) = boost::fusion::at_c<1>(p.data);
boost::fusion::at_c<2>(data) = boost::fusion::at_c<2>(p.data);
boost::fusion::at_c<3>(data) = boost::fusion::at_c<3>(p.data);
}
template<unsigned int id> inline auto get() -> decltype(boost::fusion::at_c < id > (data))
{
return boost::fusion::at_c<id>(data);
}
template<unsigned int id> inline auto get() const -> const decltype(boost::fusion::at_c < id > (data))
{
return boost::fusion::at_c<id>(data);
}
template<unsigned int dim, typename Mem> inline animal(const encapc<dim, animal, Mem> & p)
{
this->operator=(p);
}
template<unsigned int dim, typename Mem> inline animal & operator=(const encapc<dim, animal, Mem> & p)
{
boost::fusion::at_c<0>(data)[0] = p.template get<0>()[0];
boost::fusion::at_c<0>(data)[1] = p.template get<0>()[1];
//boost::fusion::at_c<0>(data)[2] = p.template get<0>()[2];
boost::fusion::at_c<1>(data) = p.template get<1>();
boost::fusion::at_c<2>(data) = p.template get<2>();
boost::fusion::at_c<3>(data) = p.template get<3>();
return *this;
}
static bool noPointers()
{
return true;
}
};
const std::string animal::attributes::name[] = { "pos", "genre", "status", "time_a", "j_repr" };
int main(int argc, char* argv[])
{
init_global_v_cluster(&argc,&argv);
Vcluster & v_cl = *global_v_cluster;
//time the animal stays alive without eating
size_t PRED_TIME_A = 14;
size_t PREY_TIME_A = 7;
size_t PREDATOR = 1, PREY = 0;
size_t ALIVE = 1, DEAD = 0;
// Predators reproducing probability
float PRED_REPR = 0.2;
// Predators eating probability
float PRED_EAT = 0.6;
// Prey reproducing probability
float PREY_REPR = 0.5;
// set the seed
// create the random generator engine
std::srand(v_cl.getProcessUnitID());
unsigned seed = std::chrono::system_clock::now().time_since_epoch().count();
std::default_random_engine eg(seed);
std::uniform_real_distribution<float> ud(0.0f, 1.0f);
std::uniform_real_distribution<float> md(-1.0f, 1.0f);
std::uniform_real_distribution<float> uc(0.0f, 0.7f);
std::uniform_real_distribution<float> lc(0.3f, 1.0f);
size_t k = 100000;
Box<2, float> box( { 0.0, 0.0 }, { 1.0, 1.0 });
// Grid info
grid_sm<2, void> info( { 8, 8 });
// Boundary conditions
size_t bc[2] = { PERIODIC, PERIODIC };
// factor
float factor = pow(global_v_cluster->getProcessingUnits() / 2.0f, 1.0f / 3.0f);
// interaction radius
float r_cut = 0.01 / factor;
// ghost
Ghost<2, float> ghost(r_cut);
// Distributed vector
vector_dist<2, float, animal, CartDecomposition<2, float, HeapMemory, ParMetisDistribution<2, float>>> vd(k,box,bc,ghost);
// Init DLB tool
DLB dlb(v_cl);
// Set unbalance threshold
dlb.setHeurisitc(DLB::Heuristic::UNBALANCE_THRLD);
dlb.setThresholdLevel(DLB::ThresholdLevel::THRLD_MEDIUM);
auto it = vd.getIterator();
while (it.isNext())
{
auto key = it.get();
if(ud(eg) < 0.7 )
{
vd.template getPos<animal::pos>(key)[0] = lc(eg);
vd.template getPos<animal::pos>(key)[1] = lc(eg);
vd.template getProp<animal::genre>(key) = PREY;
vd.template getProp<animal::status>(key) = ALIVE;
vd.template getProp<animal::time_a>(key) = PREY_TIME_A;
}
else
{
vd.template getPos<animal::pos>(key)[0] = uc(eg);
vd.template getPos<animal::pos>(key)[1] = uc(eg);
vd.template getProp<animal::genre>(key) = PREDATOR;
vd.template getProp<animal::status>(key) = ALIVE;
vd.template getProp<animal::time_a>(key) = PRED_TIME_A;
}
++it;
}
vd.map();
vd.addComputationCosts();
vd.getDecomposition().rebalance(dlb);
vd.map();
//vd.getDecomposition().getDistribution().write("parmetis_prey_predators_" + std::to_string(0) + ".vtk");
//vd.write("particles_", 0, NO_GHOST);
// 100 step random walk
for (size_t j = 0; j < 100; j++)
{
size_t prey = 0, predators = 0;
auto it = vd.getDomainIterator();
while (it.isNext())
{
auto key = it.get();
vd.template getPos<animal::pos>(key)[0] += 0.01 * md(eg);
vd.template getPos<animal::pos>(key)[1] += 0.01 * md(eg);
if(vd.template getProp<animal::genre>(key) == PREY)
prey++;
else
predators++;
++it;
}
vd.map();
/////// Interactions ///
// get ghosts
vd.ghost_get<0>();
// vector of dead animals
openfpm::vector<size_t> deads;
openfpm::vector<vect_dist_key_dx> reps_prey;
openfpm::vector<vect_dist_key_dx> reps_pred;
// get the cell list with a cutoff radius
bool error = false;
auto NN = vd.getCellList(0.01/factor);
// iterate across the domain particle
auto it2 = vd.getDomainIterator();
while (it2.isNext())
{
auto p = it2.get();
Point<2,float> xp = vd.getPos<0>(p);
size_t gp = vd.getProp<animal::genre>(p);
size_t sp = vd.getProp<animal::status>(p);
if(sp == ALIVE)
{
if(gp == PREY)
{
if( prey < k/1.5 && ud(eg) < PREY_REPR )
reps_prey.add(p);
vd.getProp<animal::time_a>(p)--;
if(vd.getProp<animal::time_a>(p) <= 0)
{
vd.getProp<animal::status>(p) = DEAD;
prey--;
}
}
else if(gp == PREDATOR)
{
vd.getProp<animal::time_a>(p)--;
if(vd.getProp<animal::time_a>(p) <= 0)
{
vd.getProp<animal::status>(p) = DEAD;
}
else
{
auto Np = NN.getIterator(NN.getCell(xp));
while (Np.isNext())
{
auto q = Np.get();
size_t gq = vd.getProp<animal::genre>(q);
size_t sq = vd.getProp<animal::status>(q);
Point<2,float> xq = vd.getPos<0>(q);
Point<2,float> f = (xp - xq);
float distance = f.norm();
if (distance < 2*r_cut*sqrt(2) && gq == PREY && sq == ALIVE)
{
if( ud(eg) < PRED_EAT )
{
vd.getProp<animal::status>(q) = DEAD;
vd.getProp<animal::time_a>(p) = PRED_TIME_A;
if( ud(eg) < PRED_REPR )
reps_pred.add(p);
}
}
++Np;
}
}
}
}
++it2;
}
vd.deleteGhost();
// Replicate
for (size_t i = 0 ; i < reps_prey.size() ; i++)
{
vd.add();
vd.getLastPos<animal::pos>()[0] = vd.getPos<0>(reps_prey.get(i))[0];
vd.getLastPos<animal::pos>()[1] = vd.getPos<0>(reps_prey.get(i))[1];
vd.getLastProp<animal::genre>() = PREY;
vd.getLastProp<animal::status>() = ALIVE;
vd.getLastProp<animal::time_a>() = PREY_TIME_A;
}
for (size_t i = 0 ; i < reps_pred.size() ; i++)
{
vd.add();
vd.getLastPos<animal::pos>()[0] = vd.getPos<0>(reps_pred.get(i))[0];
vd.getLastPos<animal::pos>()[1] = vd.getPos<0>(reps_pred.get(i))[1];
vd.getLastProp<animal::genre>() = PREDATOR;
vd.getLastProp<animal::status>() = ALIVE;
vd.getLastProp<animal::time_a>() = PRED_TIME_A;
}
auto it3 = vd.getDomainIterator();
while (it3.isNext())
{
auto key = it3.get();
if(vd.getProp<animal::status>(key.getKey()) == DEAD)
{
deads.add(key.getKey());
}
++it3;
}
deads.sort();
vd.remove(deads, 0);
deads.resize(0);
vd.deleteGhost();
////////////////////////
vd.addComputationCosts();
vd.getDecomposition().rebalance(dlb);
vd.map();
}
//
// ### WIKI 10 ###
//
// Deinitialize the library
//
delete_global_v_cluster();
}
SUBDIRS := $(wildcard */.)
all clean:
for dir in $(SUBDIRS); do \
$(MAKE) -C $$dir $@; \
done
clean: $(SUBDIRS)
.PHONY: all clean $(SUBDIRS)
include ../../../example.mk
CC=mpic++
LDIR =
OBJ = main.o
%.o: %.cpp
$(CC) -O3 -c --std=c++11 -o $@ $< $(INCLUDE_PATH)
pse_1d: $(OBJ)
$(CC) -o $@ $^ $(CFLAGS) $(LIBS_PATH) $(LIBS)
all: pse_1d
.PHONY: clean all
clean:
rm -f *.o *~ core pse_1d
[pack]
files = main.cpp Makefile