From b94e0b766f38f491f30741e89ad5b10349f397c4 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 13 Feb 2025 10:30:51 +0100 Subject: [PATCH 01/40] Initial import Metisse interface --- src/amuse/community/metisse/Makefile | 95 + src/amuse/community/metisse/__init__.py | 1 + src/amuse/community/metisse/interface.f90 | 29 + src/amuse/community/metisse/interface.py | 106 + .../community/metisse/packages/.DS_Store | Bin 0 -> 6148 bytes .../metisse/packages/amuse-metisse.amuse_deps | 1 + .../metisse/packages/amuse-metisse/metisse | 1 + .../packages/amuse-metisse/pyproject.toml | 44 + .../community/metisse/support/aclocal.m4 | 21 + .../community/metisse/support/config.mk.in | 45 + src/amuse/community/metisse/support/configure | 8307 +++++++++++++++++ .../community/metisse/support/configure.ac | 62 + src/amuse/community/metisse/support/shared | 1 + .../community/metisse/tests/test_metisse.py | 19 + 14 files changed, 8732 insertions(+) create mode 100644 src/amuse/community/metisse/Makefile create mode 100644 src/amuse/community/metisse/__init__.py create mode 100644 src/amuse/community/metisse/interface.f90 create mode 100644 src/amuse/community/metisse/interface.py create mode 100644 src/amuse/community/metisse/packages/.DS_Store create mode 100644 src/amuse/community/metisse/packages/amuse-metisse.amuse_deps create mode 120000 src/amuse/community/metisse/packages/amuse-metisse/metisse create mode 100644 src/amuse/community/metisse/packages/amuse-metisse/pyproject.toml create mode 100644 src/amuse/community/metisse/support/aclocal.m4 create mode 100644 src/amuse/community/metisse/support/config.mk.in create mode 100755 src/amuse/community/metisse/support/configure create mode 100644 src/amuse/community/metisse/support/configure.ac create mode 120000 src/amuse/community/metisse/support/shared create mode 100644 src/amuse/community/metisse/tests/test_metisse.py diff --git a/src/amuse/community/metisse/Makefile b/src/amuse/community/metisse/Makefile new file mode 100644 index 0000000000..3592cb3bac --- /dev/null +++ b/src/amuse/community/metisse/Makefile @@ -0,0 +1,95 @@ +ifneq (,$(filter-out clean distclean, $(MAKECMDGOALS))) +include support/config.mk +endif + + +# Detecting dependencies +support/config.mk: + cd support && ./configure + + +# Downloading the code +##### Remove if not needed ##### +VERSION = 121257ede5486660d9eb5fbb23b2c377f02ae465 + +metisse.tar.gz: + ##### Modify URL as needed ##### + $(DOWNLOAD) https://github.com/TeamMETISSE/METISSE/archive/$(VERSION).tar.gz >$@ + +PATCHES := $(file < patches/series) +PATCHES := $(patsubst %,%,$(PATCHES)) # replace newlines with spaces + +src/metisse-$(VERSION): metisse.tar.gz + ##### Modify as needed ##### + tar xf $< + mv metisse-$(VERSION) src + ##### Apply patches here, if needed ##### + #patch + # ) + pass + +# the definition of the code data stores, either particle sets: + def define_particle_sets(self, handler): + # handler.define_set("particles", "index_of_the_particle") + # handler.set_new("particles", "new_particle") + # handler.set_delete("particles", "delete_particle") + # handler.add_setter("particles", "set_state") + # handler.add_getter("particles", "get_state") + # handler.add_setter("particles", "set_mass") + # handler.add_getter("particles", "get_mass", names=("mass",)) + pass + +# and/or grids: + def define_grids(self, handler): + # handler.define_grid("grid",axes_names = ["x", "y"], grid_class=StructuredGrid) + # handler.set_grid_range("grid", "_grid_range") + # handler.add_getter("grid", "get_grid_position", names=["x", "y"]) + # handler.add_getter("grid", "get_rho", names=["density"]) + # handler.add_setter("grid", "set_rho", names=["density"]) + pass + + +class MetisseParticles(Particles): + def __init__(self, code_interface, storage=None): + Particles.__init__(self, storage=storage) + self._private.code_interface = code_interface diff --git a/src/amuse/community/metisse/packages/.DS_Store b/src/amuse/community/metisse/packages/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..c6a944795a2c2cdb863daf6997a2455f2864afbe GIT binary patch literal 6148 zcmeH~Jx&BM427Rzixo60D#~1df*XWoRumKV9_ez#tNT5?@e`%v!CEK;eFYT05~q|;wAuBP^(kq&E>c}`Zg z>`<&#r@yptSPj)I0wORba2m^%-~ZmP|I+`Dle7{65%^aGY|%Y+YyMK+t&^YgyS8vV qb9@_PTR5lDBY+FPiZ2Io#lIO>Q~S`CPJc_M>LK7/dev/null 2>&1 +then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else case e in #( + e) case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac ;; +esac +fi + + + +# Reset variables that may have inherited troublesome values from +# the environment. + +# IFS needs to be set, to space, tab, and newline, in precisely that order. +# (If _AS_PATH_WALK were called with IFS unset, it would have the +# side effect of setting IFS to empty, thus disabling word splitting.) +# Quoting is to prevent editors from complaining about space-tab. +as_nl=' +' +export as_nl +IFS=" "" $as_nl" + +PS1='$ ' +PS2='> ' +PS4='+ ' + +# Ensure predictable behavior from utilities with locale-dependent output. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# We cannot yet rely on "unset" to work, but we need these variables +# to be unset--not just set to an empty or harmless value--now, to +# avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct +# also avoids known problems related to "unset" and subshell syntax +# in other old shells (e.g. bash 2.01 and pdksh 5.2.14). +for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH +do eval test \${$as_var+y} \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done + +# Ensure that fds 0, 1, and 2 are open. +if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi +if (exec 3>&2) ; then :; else exec 2>/dev/null; fi + +# The user is always right. +if ${PATH_SEPARATOR+false} :; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + test -r "$as_dir$0" && as_myself=$as_dir$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as 'sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + + +# Use a proper internal environment variable to ensure we don't fall + # into an infinite loop, continuously re-executing ourselves. + if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then + _as_can_reexec=no; export _as_can_reexec; + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed 'exec'. +printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 + fi + # We don't want this to propagate to other subprocesses. + { _as_can_reexec=; unset _as_can_reexec;} +if test "x$CONFIG_SHELL" = x; then + as_bourne_compatible="if test \${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 +then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which + # is contrary to our usage. Disable this feature. + alias -g '\${1+\"\$@\"}'='\"\$@\"' + setopt NO_GLOB_SUBST +else case e in #( + e) case \`(set -o) 2>/dev/null\` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac ;; +esac +fi +" + as_required="as_fn_return () { (exit \$1); } +as_fn_success () { as_fn_return 0; } +as_fn_failure () { as_fn_return 1; } +as_fn_ret_success () { return 0; } +as_fn_ret_failure () { return 1; } + +exitcode=0 +as_fn_success || { exitcode=1; echo as_fn_success failed.; } +as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } +as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } +as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } +if ( set x; as_fn_ret_success y && test x = \"\$1\" ) +then : + +else case e in #( + e) exitcode=1; echo positional parameters were not saved. ;; +esac +fi +test x\$exitcode = x0 || exit 1 +blah=\$(echo \$(echo blah)) +test x\"\$blah\" = xblah || exit 1 +test -x / || exit 1" + as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO + as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO + eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && + test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 +test \$(( 1 + 1 )) = 2 || exit 1" + if (eval "$as_required") 2>/dev/null +then : + as_have_required=yes +else case e in #( + e) as_have_required=no ;; +esac +fi + if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null +then : + +else case e in #( + e) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +as_found=false +for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + as_found=: + case $as_dir in #( + /*) + for as_base in sh bash ksh sh5; do + # Try only shells that exist, to save several forks. + as_shell=$as_dir$as_base + if { test -f "$as_shell" || test -f "$as_shell.exe"; } && + as_run=a "$as_shell" -c "$as_bourne_compatible""$as_required" 2>/dev/null +then : + CONFIG_SHELL=$as_shell as_have_required=yes + if as_run=a "$as_shell" -c "$as_bourne_compatible""$as_suggested" 2>/dev/null +then : + break 2 +fi +fi + done;; + esac + as_found=false +done +IFS=$as_save_IFS +if $as_found +then : + +else case e in #( + e) if { test -f "$SHELL" || test -f "$SHELL.exe"; } && + as_run=a "$SHELL" -c "$as_bourne_compatible""$as_required" 2>/dev/null +then : + CONFIG_SHELL=$SHELL as_have_required=yes +fi ;; +esac +fi + + + if test "x$CONFIG_SHELL" != x +then : + export CONFIG_SHELL + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed 'exec'. +printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 +fi + + if test x$as_have_required = xno +then : + printf "%s\n" "$0: This script requires a shell more modern than all" + printf "%s\n" "$0: the shells that I found on your system." + if test ${ZSH_VERSION+y} ; then + printf "%s\n" "$0: In particular, zsh $ZSH_VERSION has bugs and should" + printf "%s\n" "$0: be upgraded to zsh 4.3.4 or later." + else + printf "%s\n" "$0: Please tell bug-autoconf@gnu.org about your system, +$0: including any error possibly output before this +$0: message. Then install a modern shell, or manually run +$0: the script under such a shell if you do have one." + fi + exit 1 +fi ;; +esac +fi +fi +SHELL=${CONFIG_SHELL-/bin/sh} +export SHELL +# Unset more variables known to interfere with behavior of common tools. +CLICOLOR_FORCE= GREP_OPTIONS= +unset CLICOLOR_FORCE GREP_OPTIONS + +## --------------------- ## +## M4sh Shell Functions. ## +## --------------------- ## +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset + + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`printf "%s\n" "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null +then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else case e in #( + e) as_fn_append () + { + eval $1=\$$1\$2 + } ;; +esac +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null +then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else case e in #( + e) as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } ;; +esac +fi # as_fn_arith + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + printf "%s\n" "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + + + as_lineno_1=$LINENO as_lineno_1a=$LINENO + as_lineno_2=$LINENO as_lineno_2a=$LINENO + eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && + test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { + # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) + sed -n ' + p + /[$]LINENO/= + ' <$as_myself | + sed ' + t clear + :clear + s/[$]LINENO.*/&-/ + t lineno + b + :lineno + N + :loop + s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ + t loop + s/-\n.*// + ' >$as_me.lineno && + chmod +x "$as_me.lineno" || + { printf "%s\n" "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } + + # If we had to re-execute with $CONFIG_SHELL, we're ensured to have + # already done that, so ensure we don't try to do so again and fall + # in an infinite loop. This has already happened in practice. + _as_can_reexec=no; export _as_can_reexec + # Don't try to exec as it changes $[0], causing all sort of problems + # (the dirname of $[0] is not the place where we might find the + # original and so on. Autoconf is especially sensitive to this). + . "./$as_me.lineno" + # Exit status is that of the last command. + exit +} + + +# Determine whether it's possible to make 'echo' print without a newline. +# These variables are no longer used directly by Autoconf, but are AC_SUBSTed +# for compatibility with existing Makefiles. +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +# For backward compatibility with old third-party macros, we provide +# the shell variables $as_echo and $as_echo_n. New code should use +# AS_ECHO(["message"]) and AS_ECHO_N(["message"]), respectively. +as_echo='printf %s\n' +as_echo_n='printf %s' + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both 'ln -s file dir' and 'ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; 'ln -s' creates a wrapper executable. + # In both cases, we have to default to 'cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_sed_cpp="y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g" +as_tr_cpp="eval sed '$as_sed_cpp'" # deprecated + +# Sed expression to map a string onto a valid variable name. +as_sed_sh="y%*+%pp%;s%[^_$as_cr_alnum]%_%g" +as_tr_sh="eval sed '$as_sed_sh'" # deprecated + + +test -n "$DJDIR" || exec 7<&0 &1 + +# Name of the host. +# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, +# so uname gets run too. +ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` + +# +# Initializations. +# +ac_default_prefix=/usr/local +ac_clean_files= +ac_config_libobj_dir=. +LIBOBJS= +cross_compiling=no +subdirs= +MFLAGS= +MAKEFLAGS= + +# Identity of this package. +PACKAGE_NAME='amuse-metisse' +PACKAGE_TARNAME='amuse-metisse' +PACKAGE_VERSION='1.0' +PACKAGE_STRING='amuse-metisse 1.0' +PACKAGE_BUGREPORT='' +PACKAGE_URL='' + +ac_subst_vars='LTLIBOBJS +LIBOBJS +MPILIBS +MPI_FCLIBS +MPI_FCFLAGS +MPIFC +FOUND_SAPPORO_LIGHT +SAPPORO_LIGHT_LIBS +SAPPORO_LIGHT_CFLAGS +FOUND_G6LIB +G6LIB_LIBS +G6LIB_CFLAGS +FOUND_SIMPLE_HASH +SIMPLE_HASH_LIBS +SIMPLE_HASH_CFLAGS +FOUND_FORSOCKETS +FORSOCKETS_LIBS +FORSOCKETS_CFLAGS +FOUND_AMUSE_MPI +AMUSE_MPI_LIBS +AMUSE_MPI_CFLAGS +FOUND_STOPCONDMPI +STOPCONDMPI_LIBS +STOPCONDMPI_CFLAGS +FOUND_STOPCOND +STOPCOND_LIBS +STOPCOND_CFLAGS +PKG_CONFIG_LIBDIR +PKG_CONFIG_PATH +PKG_CONFIG +ac_ct_CC +CPPFLAGS +CFLAGS +CC +PERL +UNXZ +BUNZIP2 +GUNZIP +UNZIP +TAR +DOWNLOAD +CURL +WGET +RANLIB +AR +target_os +target_vendor +target_cpu +target +OBJEXT +EXEEXT +ac_ct_FC +LDFLAGS +FCFLAGS +FC +CPU_COUNT +EGREP +GREP +host_os +host_vendor +host_cpu +host +build_os +build_vendor +build_cpu +build +FFLAGS +target_alias +host_alias +build_alias +LIBS +ECHO_T +ECHO_N +ECHO_C +DEFS +mandir +localedir +libdir +psdir +pdfdir +dvidir +htmldir +infodir +docdir +oldincludedir +includedir +runstatedir +localstatedir +sharedstatedir +sysconfdir +datadir +datarootdir +libexecdir +sbindir +bindir +program_transform_name +prefix +exec_prefix +PACKAGE_URL +PACKAGE_BUGREPORT +PACKAGE_STRING +PACKAGE_VERSION +PACKAGE_TARNAME +PACKAGE_NAME +PATH_SEPARATOR +SHELL' +ac_subst_files='' +ac_user_opts=' +enable_option_checking +' + ac_precious_vars='build_alias +host_alias +target_alias +FC +FCFLAGS +LDFLAGS +LIBS +CC +CFLAGS +CPPFLAGS +PKG_CONFIG +PKG_CONFIG_PATH +PKG_CONFIG_LIBDIR +STOPCOND_CFLAGS +STOPCOND_LIBS +STOPCONDMPI_CFLAGS +STOPCONDMPI_LIBS +AMUSE_MPI_CFLAGS +AMUSE_MPI_LIBS +FORSOCKETS_CFLAGS +FORSOCKETS_LIBS +SIMPLE_HASH_CFLAGS +SIMPLE_HASH_LIBS +G6LIB_CFLAGS +G6LIB_LIBS +SAPPORO_LIGHT_CFLAGS +SAPPORO_LIGHT_LIBS +MPIFC' + + +# Initialize some variables set by options. +ac_init_help= +ac_init_version=false +ac_unrecognized_opts= +ac_unrecognized_sep= +# The variables have the same names as the options, with +# dashes changed to underlines. +cache_file=/dev/null +exec_prefix=NONE +no_create= +no_recursion= +prefix=NONE +program_prefix=NONE +program_suffix=NONE +program_transform_name=s,x,x, +silent= +site= +srcdir= +verbose= +x_includes=NONE +x_libraries=NONE + +# Installation directory options. +# These are left unexpanded so users can "make install exec_prefix=/foo" +# and all the variables that are supposed to be based on exec_prefix +# by default will actually change. +# Use braces instead of parens because sh, perl, etc. also accept them. +# (The list follows the same order as the GNU Coding Standards.) +bindir='${exec_prefix}/bin' +sbindir='${exec_prefix}/sbin' +libexecdir='${exec_prefix}/libexec' +datarootdir='${prefix}/share' +datadir='${datarootdir}' +sysconfdir='${prefix}/etc' +sharedstatedir='${prefix}/com' +localstatedir='${prefix}/var' +runstatedir='${localstatedir}/run' +includedir='${prefix}/include' +oldincludedir='/usr/include' +docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' +infodir='${datarootdir}/info' +htmldir='${docdir}' +dvidir='${docdir}' +pdfdir='${docdir}' +psdir='${docdir}' +libdir='${exec_prefix}/lib' +localedir='${datarootdir}/locale' +mandir='${datarootdir}/man' + +ac_prev= +ac_dashdash= +for ac_option +do + # If the previous option needs an argument, assign it. + if test -n "$ac_prev"; then + eval $ac_prev=\$ac_option + ac_prev= + continue + fi + + case $ac_option in + *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; + *=) ac_optarg= ;; + *) ac_optarg=yes ;; + esac + + case $ac_dashdash$ac_option in + --) + ac_dashdash=yes ;; + + -bindir | --bindir | --bindi | --bind | --bin | --bi) + ac_prev=bindir ;; + -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) + bindir=$ac_optarg ;; + + -build | --build | --buil | --bui | --bu) + ac_prev=build_alias ;; + -build=* | --build=* | --buil=* | --bui=* | --bu=*) + build_alias=$ac_optarg ;; + + -cache-file | --cache-file | --cache-fil | --cache-fi \ + | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) + ac_prev=cache_file ;; + -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ + | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) + cache_file=$ac_optarg ;; + + --config-cache | -C) + cache_file=config.cache ;; + + -datadir | --datadir | --datadi | --datad) + ac_prev=datadir ;; + -datadir=* | --datadir=* | --datadi=* | --datad=*) + datadir=$ac_optarg ;; + + -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ + | --dataroo | --dataro | --datar) + ac_prev=datarootdir ;; + -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ + | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) + datarootdir=$ac_optarg ;; + + -disable-* | --disable-*) + ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: '$ac_useropt'" + ac_useropt_orig=$ac_useropt + ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=no ;; + + -docdir | --docdir | --docdi | --doc | --do) + ac_prev=docdir ;; + -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) + docdir=$ac_optarg ;; + + -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) + ac_prev=dvidir ;; + -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) + dvidir=$ac_optarg ;; + + -enable-* | --enable-*) + ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: '$ac_useropt'" + ac_useropt_orig=$ac_useropt + ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=\$ac_optarg ;; + + -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ + | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ + | --exec | --exe | --ex) + ac_prev=exec_prefix ;; + -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ + | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ + | --exec=* | --exe=* | --ex=*) + exec_prefix=$ac_optarg ;; + + -gas | --gas | --ga | --g) + # Obsolete; use --with-gas. + with_gas=yes ;; + + -help | --help | --hel | --he | -h) + ac_init_help=long ;; + -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) + ac_init_help=recursive ;; + -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) + ac_init_help=short ;; + + -host | --host | --hos | --ho) + ac_prev=host_alias ;; + -host=* | --host=* | --hos=* | --ho=*) + host_alias=$ac_optarg ;; + + -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) + ac_prev=htmldir ;; + -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ + | --ht=*) + htmldir=$ac_optarg ;; + + -includedir | --includedir | --includedi | --included | --include \ + | --includ | --inclu | --incl | --inc) + ac_prev=includedir ;; + -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ + | --includ=* | --inclu=* | --incl=* | --inc=*) + includedir=$ac_optarg ;; + + -infodir | --infodir | --infodi | --infod | --info | --inf) + ac_prev=infodir ;; + -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) + infodir=$ac_optarg ;; + + -libdir | --libdir | --libdi | --libd) + ac_prev=libdir ;; + -libdir=* | --libdir=* | --libdi=* | --libd=*) + libdir=$ac_optarg ;; + + -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ + | --libexe | --libex | --libe) + ac_prev=libexecdir ;; + -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ + | --libexe=* | --libex=* | --libe=*) + libexecdir=$ac_optarg ;; + + -localedir | --localedir | --localedi | --localed | --locale) + ac_prev=localedir ;; + -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) + localedir=$ac_optarg ;; + + -localstatedir | --localstatedir | --localstatedi | --localstated \ + | --localstate | --localstat | --localsta | --localst | --locals) + ac_prev=localstatedir ;; + -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ + | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) + localstatedir=$ac_optarg ;; + + -mandir | --mandir | --mandi | --mand | --man | --ma | --m) + ac_prev=mandir ;; + -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) + mandir=$ac_optarg ;; + + -nfp | --nfp | --nf) + # Obsolete; use --without-fp. + with_fp=no ;; + + -no-create | --no-create | --no-creat | --no-crea | --no-cre \ + | --no-cr | --no-c | -n) + no_create=yes ;; + + -no-recursion | --no-recursion | --no-recursio | --no-recursi \ + | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) + no_recursion=yes ;; + + -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ + | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ + | --oldin | --oldi | --old | --ol | --o) + ac_prev=oldincludedir ;; + -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ + | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ + | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) + oldincludedir=$ac_optarg ;; + + -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) + ac_prev=prefix ;; + -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) + prefix=$ac_optarg ;; + + -program-prefix | --program-prefix | --program-prefi | --program-pref \ + | --program-pre | --program-pr | --program-p) + ac_prev=program_prefix ;; + -program-prefix=* | --program-prefix=* | --program-prefi=* \ + | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) + program_prefix=$ac_optarg ;; + + -program-suffix | --program-suffix | --program-suffi | --program-suff \ + | --program-suf | --program-su | --program-s) + ac_prev=program_suffix ;; + -program-suffix=* | --program-suffix=* | --program-suffi=* \ + | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) + program_suffix=$ac_optarg ;; + + -program-transform-name | --program-transform-name \ + | --program-transform-nam | --program-transform-na \ + | --program-transform-n | --program-transform- \ + | --program-transform | --program-transfor \ + | --program-transfo | --program-transf \ + | --program-trans | --program-tran \ + | --progr-tra | --program-tr | --program-t) + ac_prev=program_transform_name ;; + -program-transform-name=* | --program-transform-name=* \ + | --program-transform-nam=* | --program-transform-na=* \ + | --program-transform-n=* | --program-transform-=* \ + | --program-transform=* | --program-transfor=* \ + | --program-transfo=* | --program-transf=* \ + | --program-trans=* | --program-tran=* \ + | --progr-tra=* | --program-tr=* | --program-t=*) + program_transform_name=$ac_optarg ;; + + -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) + ac_prev=pdfdir ;; + -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) + pdfdir=$ac_optarg ;; + + -psdir | --psdir | --psdi | --psd | --ps) + ac_prev=psdir ;; + -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) + psdir=$ac_optarg ;; + + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + silent=yes ;; + + -runstatedir | --runstatedir | --runstatedi | --runstated \ + | --runstate | --runstat | --runsta | --runst | --runs \ + | --run | --ru | --r) + ac_prev=runstatedir ;; + -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ + | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ + | --run=* | --ru=* | --r=*) + runstatedir=$ac_optarg ;; + + -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) + ac_prev=sbindir ;; + -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ + | --sbi=* | --sb=*) + sbindir=$ac_optarg ;; + + -sharedstatedir | --sharedstatedir | --sharedstatedi \ + | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ + | --sharedst | --shareds | --shared | --share | --shar \ + | --sha | --sh) + ac_prev=sharedstatedir ;; + -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ + | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ + | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ + | --sha=* | --sh=*) + sharedstatedir=$ac_optarg ;; + + -site | --site | --sit) + ac_prev=site ;; + -site=* | --site=* | --sit=*) + site=$ac_optarg ;; + + -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) + ac_prev=srcdir ;; + -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) + srcdir=$ac_optarg ;; + + -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ + | --syscon | --sysco | --sysc | --sys | --sy) + ac_prev=sysconfdir ;; + -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ + | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) + sysconfdir=$ac_optarg ;; + + -target | --target | --targe | --targ | --tar | --ta | --t) + ac_prev=target_alias ;; + -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) + target_alias=$ac_optarg ;; + + -v | -verbose | --verbose | --verbos | --verbo | --verb) + verbose=yes ;; + + -version | --version | --versio | --versi | --vers | -V) + ac_init_version=: ;; + + -with-* | --with-*) + ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: '$ac_useropt'" + ac_useropt_orig=$ac_useropt + ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=\$ac_optarg ;; + + -without-* | --without-*) + ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: '$ac_useropt'" + ac_useropt_orig=$ac_useropt + ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=no ;; + + --x) + # Obsolete; use --with-x. + with_x=yes ;; + + -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ + | --x-incl | --x-inc | --x-in | --x-i) + ac_prev=x_includes ;; + -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ + | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) + x_includes=$ac_optarg ;; + + -x-libraries | --x-libraries | --x-librarie | --x-librari \ + | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) + ac_prev=x_libraries ;; + -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ + | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) + x_libraries=$ac_optarg ;; + + -*) as_fn_error $? "unrecognized option: '$ac_option' +Try '$0 --help' for more information" + ;; + + *=*) + ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` + # Reject names that are not valid shell variable names. + case $ac_envvar in #( + '' | [0-9]* | *[!_$as_cr_alnum]* ) + as_fn_error $? "invalid variable name: '$ac_envvar'" ;; + esac + eval $ac_envvar=\$ac_optarg + export $ac_envvar ;; + + *) + # FIXME: should be removed in autoconf 3.0. + printf "%s\n" "$as_me: WARNING: you should use --build, --host, --target" >&2 + expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && + printf "%s\n" "$as_me: WARNING: invalid host type: $ac_option" >&2 + : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" + ;; + + esac +done + +if test -n "$ac_prev"; then + ac_option=--`echo $ac_prev | sed 's/_/-/g'` + as_fn_error $? "missing argument to $ac_option" +fi + +if test -n "$ac_unrecognized_opts"; then + case $enable_option_checking in + no) ;; + fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; + *) printf "%s\n" "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; + esac +fi + +# Check all directory arguments for consistency. +for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ + datadir sysconfdir sharedstatedir localstatedir includedir \ + oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ + libdir localedir mandir runstatedir +do + eval ac_val=\$$ac_var + # Remove trailing slashes. + case $ac_val in + */ ) + ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` + eval $ac_var=\$ac_val;; + esac + # Be sure to have absolute directory names. + case $ac_val in + [\\/$]* | ?:[\\/]* ) continue;; + NONE | '' ) case $ac_var in *prefix ) continue;; esac;; + esac + as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" +done + +# There might be people who depend on the old broken behavior: '$host' +# used to hold the argument of --host etc. +# FIXME: To remove some day. +build=$build_alias +host=$host_alias +target=$target_alias + +# FIXME: To remove some day. +if test "x$host_alias" != x; then + if test "x$build_alias" = x; then + cross_compiling=maybe + elif test "x$build_alias" != "x$host_alias"; then + cross_compiling=yes + fi +fi + +ac_tool_prefix= +test -n "$host_alias" && ac_tool_prefix=$host_alias- + +test "$silent" = yes && exec 6>/dev/null + + +ac_pwd=`pwd` && test -n "$ac_pwd" && +ac_ls_di=`ls -di .` && +ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || + as_fn_error $? "working directory cannot be determined" +test "X$ac_ls_di" = "X$ac_pwd_ls_di" || + as_fn_error $? "pwd does not report name of working directory" + + +# Find the source files, if location was not specified. +if test -z "$srcdir"; then + ac_srcdir_defaulted=yes + # Try the directory containing this script, then the parent directory. + ac_confdir=`$as_dirname -- "$as_myself" || +$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_myself" : 'X\(//\)[^/]' \| \ + X"$as_myself" : 'X\(//\)$' \| \ + X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X"$as_myself" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + srcdir=$ac_confdir + if test ! -r "$srcdir/$ac_unique_file"; then + srcdir=.. + fi +else + ac_srcdir_defaulted=no +fi +if test ! -r "$srcdir/$ac_unique_file"; then + test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." + as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" +fi +ac_msg="sources are in $srcdir, but 'cd $srcdir' does not work" +ac_abs_confdir=`( + cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" + pwd)` +# When building in place, set srcdir=. +if test "$ac_abs_confdir" = "$ac_pwd"; then + srcdir=. +fi +# Remove unnecessary trailing slashes from srcdir. +# Double slashes in file names in object file debugging info +# mess up M-x gdb in Emacs. +case $srcdir in +*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; +esac +for ac_var in $ac_precious_vars; do + eval ac_env_${ac_var}_set=\${${ac_var}+set} + eval ac_env_${ac_var}_value=\$${ac_var} + eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} + eval ac_cv_env_${ac_var}_value=\$${ac_var} +done + +# +# Report the --help message. +# +if test "$ac_init_help" = "long"; then + # Omit some internal or obsolete options to make the list less imposing. + # This message is too long to be a string in the A/UX 3.1 sh. + cat <<_ACEOF +'configure' configures amuse-metisse 1.0 to adapt to many kinds of systems. + +Usage: $0 [OPTION]... [VAR=VALUE]... + +To assign environment variables (e.g., CC, CFLAGS...), specify them as +VAR=VALUE. See below for descriptions of some of the useful variables. + +Defaults for the options are specified in brackets. + +Configuration: + -h, --help display this help and exit + --help=short display options specific to this package + --help=recursive display the short help of all the included packages + -V, --version display version information and exit + -q, --quiet, --silent do not print 'checking ...' messages + --cache-file=FILE cache test results in FILE [disabled] + -C, --config-cache alias for '--cache-file=config.cache' + -n, --no-create do not create output files + --srcdir=DIR find the sources in DIR [configure dir or '..'] + +Installation directories: + --prefix=PREFIX install architecture-independent files in PREFIX + [$ac_default_prefix] + --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX + [PREFIX] + +By default, 'make install' will install all the files in +'$ac_default_prefix/bin', '$ac_default_prefix/lib' etc. You can specify +an installation prefix other than '$ac_default_prefix' using '--prefix', +for instance '--prefix=\$HOME'. + +For better control, use the options below. + +Fine tuning of the installation directories: + --bindir=DIR user executables [EPREFIX/bin] + --sbindir=DIR system admin executables [EPREFIX/sbin] + --libexecdir=DIR program executables [EPREFIX/libexec] + --sysconfdir=DIR read-only single-machine data [PREFIX/etc] + --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] + --localstatedir=DIR modifiable single-machine data [PREFIX/var] + --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] + --libdir=DIR object code libraries [EPREFIX/lib] + --includedir=DIR C header files [PREFIX/include] + --oldincludedir=DIR C header files for non-gcc [/usr/include] + --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] + --datadir=DIR read-only architecture-independent data [DATAROOTDIR] + --infodir=DIR info documentation [DATAROOTDIR/info] + --localedir=DIR locale-dependent data [DATAROOTDIR/locale] + --mandir=DIR man documentation [DATAROOTDIR/man] + --docdir=DIR documentation root [DATAROOTDIR/doc/amuse-metisse] + --htmldir=DIR html documentation [DOCDIR] + --dvidir=DIR dvi documentation [DOCDIR] + --pdfdir=DIR pdf documentation [DOCDIR] + --psdir=DIR ps documentation [DOCDIR] +_ACEOF + + cat <<\_ACEOF + +System types: + --build=BUILD configure for building on BUILD [guessed] + --host=HOST cross-compile to build programs to run on HOST [BUILD] + --target=TARGET configure for building compilers for TARGET [HOST] +_ACEOF +fi + +if test -n "$ac_init_help"; then + case $ac_init_help in + short | recursive ) echo "Configuration of amuse-metisse 1.0:";; + esac + cat <<\_ACEOF + +Some influential environment variables: + FC Fortran compiler command + FCFLAGS Fortran compiler flags + LDFLAGS linker flags, e.g. -L if you have libraries in a + nonstandard directory + LIBS libraries to pass to the linker, e.g. -l + CC C compiler command + CFLAGS C compiler flags + CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if + you have headers in a nonstandard directory + PKG_CONFIG path to pkg-config utility + PKG_CONFIG_PATH + directories to add to pkg-config's search path + PKG_CONFIG_LIBDIR + path overriding pkg-config's built-in search path + STOPCOND_CFLAGS + C compiler flags for STOPCOND, overriding pkg-config + STOPCOND_LIBS + linker flags for STOPCOND, overriding pkg-config + STOPCONDMPI_CFLAGS + C compiler flags for STOPCONDMPI, overriding pkg-config + STOPCONDMPI_LIBS + linker flags for STOPCONDMPI, overriding pkg-config + AMUSE_MPI_CFLAGS + C compiler flags for AMUSE_MPI, overriding pkg-config + AMUSE_MPI_LIBS + linker flags for AMUSE_MPI, overriding pkg-config + FORSOCKETS_CFLAGS + C compiler flags for FORSOCKETS, overriding pkg-config + FORSOCKETS_LIBS + linker flags for FORSOCKETS, overriding pkg-config + SIMPLE_HASH_CFLAGS + C compiler flags for SIMPLE_HASH, overriding pkg-config + SIMPLE_HASH_LIBS + linker flags for SIMPLE_HASH, overriding pkg-config + G6LIB_CFLAGS + C compiler flags for G6LIB, overriding pkg-config + G6LIB_LIBS linker flags for G6LIB, overriding pkg-config + SAPPORO_LIGHT_CFLAGS + C compiler flags for SAPPORO_LIGHT, overriding pkg-config + SAPPORO_LIGHT_LIBS + linker flags for SAPPORO_LIGHT, overriding pkg-config + MPIFC MPI Fortran compiler command + +Use these variables to override the choices made by 'configure' or to help +it to find libraries and programs with nonstandard names/locations. + +Report bugs to the package provider. +_ACEOF +ac_status=$? +fi + +if test "$ac_init_help" = "recursive"; then + # If there are subdirs, report their specific --help. + for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue + test -d "$ac_dir" || + { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || + continue + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`printf "%s\n" "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`printf "%s\n" "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + cd "$ac_dir" || { ac_status=$?; continue; } + # Check for configure.gnu first; this name is used for a wrapper for + # Metaconfig's "Configure" on case-insensitive file systems. + if test -f "$ac_srcdir/configure.gnu"; then + echo && + $SHELL "$ac_srcdir/configure.gnu" --help=recursive + elif test -f "$ac_srcdir/configure"; then + echo && + $SHELL "$ac_srcdir/configure" --help=recursive + else + printf "%s\n" "$as_me: WARNING: no configuration information is in $ac_dir" >&2 + fi || ac_status=$? + cd "$ac_pwd" || { ac_status=$?; break; } + done +fi + +test -n "$ac_init_help" && exit $ac_status +if $ac_init_version; then + cat <<\_ACEOF +amuse-metisse configure 1.0 +generated by GNU Autoconf 2.72 + +Copyright (C) 2023 Free Software Foundation, Inc. +This configure script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it. +_ACEOF + exit +fi + +## ------------------------ ## +## Autoconf initialization. ## +## ------------------------ ## + +# ac_fn_fc_try_compile LINENO +# --------------------------- +# Try to compile conftest.$ac_ext, and return whether this succeeded. +ac_fn_fc_try_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext conftest.beam + if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +printf "%s\n" "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_fc_werror_flag" || + test ! -s conftest.err + } && test -s conftest.$ac_objext +then : + ac_retval=0 +else case e in #( + e) printf "%s\n" "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 ;; +esac +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_fc_try_compile + +# ac_fn_fc_try_link LINENO +# ------------------------ +# Try to link conftest.$ac_ext, and return whether this succeeded. +ac_fn_fc_try_link () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext conftest.beam conftest$ac_exeext + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +printf "%s\n" "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_fc_werror_flag" || + test ! -s conftest.err + } && test -s conftest$ac_exeext && { + test "$cross_compiling" = yes || + test -x conftest$ac_exeext + } +then : + ac_retval=0 +else case e in #( + e) printf "%s\n" "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 ;; +esac +fi + # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information + # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would + # interfere with the next link command; also delete a directory that is + # left behind by Apple's compiler. We do this before executing the actions. + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_fc_try_link + +# ac_fn_c_try_compile LINENO +# -------------------------- +# Try to compile conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext conftest.beam + if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +printf "%s\n" "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest.$ac_objext +then : + ac_retval=0 +else case e in #( + e) printf "%s\n" "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 ;; +esac +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_compile + +# ac_fn_c_try_link LINENO +# ----------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_link () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext conftest.beam conftest$ac_exeext + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +printf "%s\n" "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest$ac_exeext && { + test "$cross_compiling" = yes || + test -x conftest$ac_exeext + } +then : + ac_retval=0 +else case e in #( + e) printf "%s\n" "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 ;; +esac +fi + # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information + # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would + # interfere with the next link command; also delete a directory that is + # left behind by Apple's compiler. We do this before executing the actions. + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_link +ac_configure_args_raw= +for ac_arg +do + case $ac_arg in + *\'*) + ac_arg=`printf "%s\n" "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + as_fn_append ac_configure_args_raw " '$ac_arg'" +done + +case $ac_configure_args_raw in + *$as_nl*) + ac_safe_unquote= ;; + *) + ac_unsafe_z='|&;<>()$`\\"*?[ '' ' # This string ends in space, tab. + ac_unsafe_a="$ac_unsafe_z#~" + ac_safe_unquote="s/ '\\([^$ac_unsafe_a][^$ac_unsafe_z]*\\)'/ \\1/g" + ac_configure_args_raw=` printf "%s\n" "$ac_configure_args_raw" | sed "$ac_safe_unquote"`;; +esac + +cat >config.log <<_ACEOF +This file contains any messages produced by compilers while +running configure, to aid debugging if configure makes a mistake. + +It was created by amuse-metisse $as_me 1.0, which was +generated by GNU Autoconf 2.72. Invocation command line was + + $ $0$ac_configure_args_raw + +_ACEOF +exec 5>>config.log +{ +cat <<_ASUNAME +## --------- ## +## Platform. ## +## --------- ## + +hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` +uname -m = `(uname -m) 2>/dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` + +/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` +/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` +/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` +/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` + +_ASUNAME + +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + printf "%s\n" "PATH: $as_dir" + done +IFS=$as_save_IFS + +} >&5 + +cat >&5 <<_ACEOF + + +## ----------- ## +## Core tests. ## +## ----------- ## + +_ACEOF + + +# Keep a trace of the command line. +# Strip out --no-create and --no-recursion so they do not pile up. +# Strip out --silent because we don't want to record it for future runs. +# Also quote any args containing shell meta-characters. +# Make two passes to allow for proper duplicate-argument suppression. +ac_configure_args= +ac_configure_args0= +ac_configure_args1= +ac_must_keep_next=false +for ac_pass in 1 2 +do + for ac_arg + do + case $ac_arg in + -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + continue ;; + *\'*) + ac_arg=`printf "%s\n" "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + case $ac_pass in + 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; + 2) + as_fn_append ac_configure_args1 " '$ac_arg'" + if test $ac_must_keep_next = true; then + ac_must_keep_next=false # Got value, back to normal. + else + case $ac_arg in + *=* | --config-cache | -C | -disable-* | --disable-* \ + | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ + | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ + | -with-* | --with-* | -without-* | --without-* | --x) + case "$ac_configure_args0 " in + "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; + esac + ;; + -* ) ac_must_keep_next=true ;; + esac + fi + as_fn_append ac_configure_args " '$ac_arg'" + ;; + esac + done +done +{ ac_configure_args0=; unset ac_configure_args0;} +{ ac_configure_args1=; unset ac_configure_args1;} + +# When interrupted or exit'd, cleanup temporary files, and complete +# config.log. We remove comments because anyway the quotes in there +# would cause problems or look ugly. +# WARNING: Use '\'' to represent an apostrophe within the trap. +# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. +trap 'exit_status=$? + # Sanitize IFS. + IFS=" "" $as_nl" + # Save into config.log some information that might help in debugging. + { + echo + + printf "%s\n" "## ---------------- ## +## Cache variables. ## +## ---------------- ##" + echo + # The following way of writing the cache mishandles newlines in values, +( + for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + (set) 2>&1 | + case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + sed -n \ + "s/'\''/'\''\\\\'\'''\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" + ;; #( + *) + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) + echo + + printf "%s\n" "## ----------------- ## +## Output variables. ## +## ----------------- ##" + echo + for ac_var in $ac_subst_vars + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`printf "%s\n" "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + printf "%s\n" "$ac_var='\''$ac_val'\''" + done | sort + echo + + if test -n "$ac_subst_files"; then + printf "%s\n" "## ------------------- ## +## File substitutions. ## +## ------------------- ##" + echo + for ac_var in $ac_subst_files + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`printf "%s\n" "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + printf "%s\n" "$ac_var='\''$ac_val'\''" + done | sort + echo + fi + + if test -s confdefs.h; then + printf "%s\n" "## ----------- ## +## confdefs.h. ## +## ----------- ##" + echo + cat confdefs.h + echo + fi + test "$ac_signal" != 0 && + printf "%s\n" "$as_me: caught signal $ac_signal" + printf "%s\n" "$as_me: exit $exit_status" + } >&5 + rm -f core *.core core.conftest.* && + rm -f -r conftest* confdefs* conf$$* $ac_clean_files && + exit $exit_status +' 0 +for ac_signal in 1 2 13 15; do + trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal +done +ac_signal=0 + +# confdefs.h avoids OS command line length limits that DEFS can exceed. +rm -f -r conftest* confdefs.h + +printf "%s\n" "/* confdefs.h */" > confdefs.h + +# Predefined preprocessor variables. + +printf "%s\n" "#define PACKAGE_NAME \"$PACKAGE_NAME\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_TARNAME \"$PACKAGE_TARNAME\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_VERSION \"$PACKAGE_VERSION\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_STRING \"$PACKAGE_STRING\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_BUGREPORT \"$PACKAGE_BUGREPORT\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_URL \"$PACKAGE_URL\"" >>confdefs.h + + +# Let the site file select an alternate cache file if it wants to. +# Prefer an explicitly selected file to automatically selected ones. +if test -n "$CONFIG_SITE"; then + ac_site_files="$CONFIG_SITE" +elif test "x$prefix" != xNONE; then + ac_site_files="$prefix/share/config.site $prefix/etc/config.site" +else + ac_site_files="$ac_default_prefix/share/config.site $ac_default_prefix/etc/config.site" +fi + +for ac_site_file in $ac_site_files +do + case $ac_site_file in #( + */*) : + ;; #( + *) : + ac_site_file=./$ac_site_file ;; +esac + if test -f "$ac_site_file" && test -r "$ac_site_file"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 +printf "%s\n" "$as_me: loading site script $ac_site_file" >&6;} + sed 's/^/| /' "$ac_site_file" >&5 + . "$ac_site_file" \ + || { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5 +printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;} +as_fn_error $? "failed to load site script $ac_site_file +See 'config.log' for more details" "$LINENO" 5; } + fi +done + +if test -r "$cache_file"; then + # Some versions of bash will fail to source /dev/null (special files + # actually), so we avoid doing that. DJGPP emulates it as a regular file. + if test /dev/null != "$cache_file" && test -f "$cache_file"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 +printf "%s\n" "$as_me: loading cache $cache_file" >&6;} + case $cache_file in + [\\/]* | ?:[\\/]* ) . "$cache_file";; + *) . "./$cache_file";; + esac + fi +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 +printf "%s\n" "$as_me: creating cache $cache_file" >&6;} + >$cache_file +fi + +# Test code for whether the C compiler supports C89 (global declarations) +ac_c_conftest_c89_globals=' +/* Does the compiler advertise C89 conformance? + Do not test the value of __STDC__, because some compilers set it to 0 + while being otherwise adequately conformant. */ +#if !defined __STDC__ +# error "Compiler does not advertise C89 conformance" +#endif + +#include +#include +struct stat; +/* Most of the following tests are stolen from RCS 5.7 src/conf.sh. */ +struct buf { int x; }; +struct buf * (*rcsopen) (struct buf *, struct stat *, int); +static char *e (char **p, int i) +{ + return p[i]; +} +static char *f (char * (*g) (char **, int), char **p, ...) +{ + char *s; + va_list v; + va_start (v,p); + s = g (p, va_arg (v,int)); + va_end (v); + return s; +} + +/* C89 style stringification. */ +#define noexpand_stringify(a) #a +const char *stringified = noexpand_stringify(arbitrary+token=sequence); + +/* C89 style token pasting. Exercises some of the corner cases that + e.g. old MSVC gets wrong, but not very hard. */ +#define noexpand_concat(a,b) a##b +#define expand_concat(a,b) noexpand_concat(a,b) +extern int vA; +extern int vbee; +#define aye A +#define bee B +int *pvA = &expand_concat(v,aye); +int *pvbee = &noexpand_concat(v,bee); + +/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has + function prototypes and stuff, but not \xHH hex character constants. + These do not provoke an error unfortunately, instead are silently treated + as an "x". The following induces an error, until -std is added to get + proper ANSI mode. Curiously \x00 != x always comes out true, for an + array size at least. It is necessary to write \x00 == 0 to get something + that is true only with -std. */ +int osf4_cc_array ['\''\x00'\'' == 0 ? 1 : -1]; + +/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters + inside strings and character constants. */ +#define FOO(x) '\''x'\'' +int xlc6_cc_array[FOO(a) == '\''x'\'' ? 1 : -1]; + +int test (int i, double x); +struct s1 {int (*f) (int a);}; +struct s2 {int (*f) (double a);}; +int pairnames (int, char **, int *(*)(struct buf *, struct stat *, int), + int, int);' + +# Test code for whether the C compiler supports C89 (body of main). +ac_c_conftest_c89_main=' +ok |= (argc == 0 || f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]); +' + +# Test code for whether the C compiler supports C99 (global declarations) +ac_c_conftest_c99_globals=' +/* Does the compiler advertise C99 conformance? */ +#if !defined __STDC_VERSION__ || __STDC_VERSION__ < 199901L +# error "Compiler does not advertise C99 conformance" +#endif + +// See if C++-style comments work. + +#include +extern int puts (const char *); +extern int printf (const char *, ...); +extern int dprintf (int, const char *, ...); +extern void *malloc (size_t); +extern void free (void *); + +// Check varargs macros. These examples are taken from C99 6.10.3.5. +// dprintf is used instead of fprintf to avoid needing to declare +// FILE and stderr. +#define debug(...) dprintf (2, __VA_ARGS__) +#define showlist(...) puts (#__VA_ARGS__) +#define report(test,...) ((test) ? puts (#test) : printf (__VA_ARGS__)) +static void +test_varargs_macros (void) +{ + int x = 1234; + int y = 5678; + debug ("Flag"); + debug ("X = %d\n", x); + showlist (The first, second, and third items.); + report (x>y, "x is %d but y is %d", x, y); +} + +// Check long long types. +#define BIG64 18446744073709551615ull +#define BIG32 4294967295ul +#define BIG_OK (BIG64 / BIG32 == 4294967297ull && BIG64 % BIG32 == 0) +#if !BIG_OK + #error "your preprocessor is broken" +#endif +#if BIG_OK +#else + #error "your preprocessor is broken" +#endif +static long long int bignum = -9223372036854775807LL; +static unsigned long long int ubignum = BIG64; + +struct incomplete_array +{ + int datasize; + double data[]; +}; + +struct named_init { + int number; + const wchar_t *name; + double average; +}; + +typedef const char *ccp; + +static inline int +test_restrict (ccp restrict text) +{ + // Iterate through items via the restricted pointer. + // Also check for declarations in for loops. + for (unsigned int i = 0; *(text+i) != '\''\0'\''; ++i) + continue; + return 0; +} + +// Check varargs and va_copy. +static bool +test_varargs (const char *format, ...) +{ + va_list args; + va_start (args, format); + va_list args_copy; + va_copy (args_copy, args); + + const char *str = ""; + int number = 0; + float fnumber = 0; + + while (*format) + { + switch (*format++) + { + case '\''s'\'': // string + str = va_arg (args_copy, const char *); + break; + case '\''d'\'': // int + number = va_arg (args_copy, int); + break; + case '\''f'\'': // float + fnumber = va_arg (args_copy, double); + break; + default: + break; + } + } + va_end (args_copy); + va_end (args); + + return *str && number && fnumber; +} +' + +# Test code for whether the C compiler supports C99 (body of main). +ac_c_conftest_c99_main=' + // Check bool. + _Bool success = false; + success |= (argc != 0); + + // Check restrict. + if (test_restrict ("String literal") == 0) + success = true; + char *restrict newvar = "Another string"; + + // Check varargs. + success &= test_varargs ("s, d'\'' f .", "string", 65, 34.234); + test_varargs_macros (); + + // Check flexible array members. + struct incomplete_array *ia = + malloc (sizeof (struct incomplete_array) + (sizeof (double) * 10)); + ia->datasize = 10; + for (int i = 0; i < ia->datasize; ++i) + ia->data[i] = i * 1.234; + // Work around memory leak warnings. + free (ia); + + // Check named initializers. + struct named_init ni = { + .number = 34, + .name = L"Test wide string", + .average = 543.34343, + }; + + ni.number = 58; + + int dynamic_array[ni.number]; + dynamic_array[0] = argv[0][0]; + dynamic_array[ni.number - 1] = 543; + + // work around unused variable warnings + ok |= (!success || bignum == 0LL || ubignum == 0uLL || newvar[0] == '\''x'\'' + || dynamic_array[ni.number - 1] != 543); +' + +# Test code for whether the C compiler supports C11 (global declarations) +ac_c_conftest_c11_globals=' +/* Does the compiler advertise C11 conformance? */ +#if !defined __STDC_VERSION__ || __STDC_VERSION__ < 201112L +# error "Compiler does not advertise C11 conformance" +#endif + +// Check _Alignas. +char _Alignas (double) aligned_as_double; +char _Alignas (0) no_special_alignment; +extern char aligned_as_int; +char _Alignas (0) _Alignas (int) aligned_as_int; + +// Check _Alignof. +enum +{ + int_alignment = _Alignof (int), + int_array_alignment = _Alignof (int[100]), + char_alignment = _Alignof (char) +}; +_Static_assert (0 < -_Alignof (int), "_Alignof is signed"); + +// Check _Noreturn. +int _Noreturn does_not_return (void) { for (;;) continue; } + +// Check _Static_assert. +struct test_static_assert +{ + int x; + _Static_assert (sizeof (int) <= sizeof (long int), + "_Static_assert does not work in struct"); + long int y; +}; + +// Check UTF-8 literals. +#define u8 syntax error! +char const utf8_literal[] = u8"happens to be ASCII" "another string"; + +// Check duplicate typedefs. +typedef long *long_ptr; +typedef long int *long_ptr; +typedef long_ptr long_ptr; + +// Anonymous structures and unions -- taken from C11 6.7.2.1 Example 1. +struct anonymous +{ + union { + struct { int i; int j; }; + struct { int k; long int l; } w; + }; + int m; +} v1; +' + +# Test code for whether the C compiler supports C11 (body of main). +ac_c_conftest_c11_main=' + _Static_assert ((offsetof (struct anonymous, i) + == offsetof (struct anonymous, w.k)), + "Anonymous union alignment botch"); + v1.i = 2; + v1.w.k = 5; + ok |= v1.i != 5; +' + +# Test code for whether the C compiler supports C11 (complete). +ac_c_conftest_c11_program="${ac_c_conftest_c89_globals} +${ac_c_conftest_c99_globals} +${ac_c_conftest_c11_globals} + +int +main (int argc, char **argv) +{ + int ok = 0; + ${ac_c_conftest_c89_main} + ${ac_c_conftest_c99_main} + ${ac_c_conftest_c11_main} + return ok; +} +" + +# Test code for whether the C compiler supports C99 (complete). +ac_c_conftest_c99_program="${ac_c_conftest_c89_globals} +${ac_c_conftest_c99_globals} + +int +main (int argc, char **argv) +{ + int ok = 0; + ${ac_c_conftest_c89_main} + ${ac_c_conftest_c99_main} + return ok; +} +" + +# Test code for whether the C compiler supports C89 (complete). +ac_c_conftest_c89_program="${ac_c_conftest_c89_globals} + +int +main (int argc, char **argv) +{ + int ok = 0; + ${ac_c_conftest_c89_main} + return ok; +} +" + + +# Auxiliary files required by this configure script. +ac_aux_files="config.guess config.sub" + +# Locations in which to look for auxiliary files. +ac_aux_dir_candidates="${srcdir}/shared" + +# Search for a directory containing all of the required auxiliary files, +# $ac_aux_files, from the $PATH-style list $ac_aux_dir_candidates. +# If we don't find one directory that contains all the files we need, +# we report the set of missing files from the *first* directory in +# $ac_aux_dir_candidates and give up. +ac_missing_aux_files="" +ac_first_candidate=: +printf "%s\n" "$as_me:${as_lineno-$LINENO}: looking for aux files: $ac_aux_files" >&5 +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +as_found=false +for as_dir in $ac_aux_dir_candidates +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + as_found=: + + printf "%s\n" "$as_me:${as_lineno-$LINENO}: trying $as_dir" >&5 + ac_aux_dir_found=yes + ac_install_sh= + for ac_aux in $ac_aux_files + do + # As a special case, if "install-sh" is required, that requirement + # can be satisfied by any of "install-sh", "install.sh", or "shtool", + # and $ac_install_sh is set appropriately for whichever one is found. + if test x"$ac_aux" = x"install-sh" + then + if test -f "${as_dir}install-sh"; then + printf "%s\n" "$as_me:${as_lineno-$LINENO}: ${as_dir}install-sh found" >&5 + ac_install_sh="${as_dir}install-sh -c" + elif test -f "${as_dir}install.sh"; then + printf "%s\n" "$as_me:${as_lineno-$LINENO}: ${as_dir}install.sh found" >&5 + ac_install_sh="${as_dir}install.sh -c" + elif test -f "${as_dir}shtool"; then + printf "%s\n" "$as_me:${as_lineno-$LINENO}: ${as_dir}shtool found" >&5 + ac_install_sh="${as_dir}shtool install -c" + else + ac_aux_dir_found=no + if $ac_first_candidate; then + ac_missing_aux_files="${ac_missing_aux_files} install-sh" + else + break + fi + fi + else + if test -f "${as_dir}${ac_aux}"; then + printf "%s\n" "$as_me:${as_lineno-$LINENO}: ${as_dir}${ac_aux} found" >&5 + else + ac_aux_dir_found=no + if $ac_first_candidate; then + ac_missing_aux_files="${ac_missing_aux_files} ${ac_aux}" + else + break + fi + fi + fi + done + if test "$ac_aux_dir_found" = yes; then + ac_aux_dir="$as_dir" + break + fi + ac_first_candidate=false + + as_found=false +done +IFS=$as_save_IFS +if $as_found +then : + +else case e in #( + e) as_fn_error $? "cannot find required auxiliary files:$ac_missing_aux_files" "$LINENO" 5 ;; +esac +fi + + +# These three variables are undocumented and unsupported, +# and are intended to be withdrawn in a future Autoconf release. +# They can cause serious problems if a builder's source tree is in a directory +# whose full name contains unusual characters. +if test -f "${ac_aux_dir}config.guess"; then + ac_config_guess="$SHELL ${ac_aux_dir}config.guess" +fi +if test -f "${ac_aux_dir}config.sub"; then + ac_config_sub="$SHELL ${ac_aux_dir}config.sub" +fi +if test -f "$ac_aux_dir/configure"; then + ac_configure="$SHELL ${ac_aux_dir}configure" +fi + +# Check that the precious variables saved in the cache have kept the same +# value. +ac_cache_corrupted=false +for ac_var in $ac_precious_vars; do + eval ac_old_set=\$ac_cv_env_${ac_var}_set + eval ac_new_set=\$ac_env_${ac_var}_set + eval ac_old_val=\$ac_cv_env_${ac_var}_value + eval ac_new_val=\$ac_env_${ac_var}_value + case $ac_old_set,$ac_new_set in + set,) + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: '$ac_var' was set to '$ac_old_val' in the previous run" >&5 +printf "%s\n" "$as_me: error: '$ac_var' was set to '$ac_old_val' in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,set) + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: '$ac_var' was not set in the previous run" >&5 +printf "%s\n" "$as_me: error: '$ac_var' was not set in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,);; + *) + if test "x$ac_old_val" != "x$ac_new_val"; then + # differences in whitespace do not lead to failure. + ac_old_val_w=`echo x $ac_old_val` + ac_new_val_w=`echo x $ac_new_val` + if test "$ac_old_val_w" != "$ac_new_val_w"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: '$ac_var' has changed since the previous run:" >&5 +printf "%s\n" "$as_me: error: '$ac_var' has changed since the previous run:" >&2;} + ac_cache_corrupted=: + else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in '$ac_var' since the previous run:" >&5 +printf "%s\n" "$as_me: warning: ignoring whitespace changes in '$ac_var' since the previous run:" >&2;} + eval $ac_var=\$ac_old_val + fi + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: former value: '$ac_old_val'" >&5 +printf "%s\n" "$as_me: former value: '$ac_old_val'" >&2;} + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: current value: '$ac_new_val'" >&5 +printf "%s\n" "$as_me: current value: '$ac_new_val'" >&2;} + fi;; + esac + # Pass precious variables to config.status. + if test "$ac_new_set" = set; then + case $ac_new_val in + *\'*) ac_arg=$ac_var=`printf "%s\n" "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; + *) ac_arg=$ac_var=$ac_new_val ;; + esac + case " $ac_configure_args " in + *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. + *) as_fn_append ac_configure_args " '$ac_arg'" ;; + esac + fi +done +if $ac_cache_corrupted; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5 +printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;} + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 +printf "%s\n" "$as_me: error: changes in the environment can compromise the build" >&2;} + as_fn_error $? "run '${MAKE-make} distclean' and/or 'rm $cache_file' + and start over" "$LINENO" 5 +fi +## -------------------- ## +## Main body of script. ## +## -------------------- ## + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + +# Pick up Conda env or virtualenv + + if test "x$VIRTUAL_ENV" != x +then : + + CFLAGS="$CFLAGS -I${VIRTUAL_ENV}/include" + CXXFLAGS="$CXXFLAGS -I${VIRTUAL_ENV}/include" + FFLAGS="$FFLAGS -I${VIRTUAL_ENV}/include" + FCFLAGS="$FCFLAGS -I${VIRTUAL_ENV}/include" + LDFLAGS="$LDFLAGS -L${VIRTUAL_ENV}/lib -Wl,-rpath,${VIRTUAL_ENV}/lib" + PKG_CONFIG_PATH="$VIRTUAL_ENV/lib/pkgconfig:$PKG_CONFIG_PATH" + +fi + + if test "x$CONDA_PREFIX" != x +then : + + # Conda does not set FCFLAGS, so we copy from FFLAGS here + FCFLAGS="$FFLAGS" + LDFLAGS="$LDFLAGS -L${CONDA_PREFIX}/lib -Wl,-rpath,${CONDA_PREFIX}/lib" + # Conda pkg-config includes this already, but in case we have one from + # the system... + PKG_CONFIG_PATH="$PKG_CONFIG_PATH:${CONDA_PREFIX}/lib/pkgconfig" + +fi + # Needs to be exported or the PKG_CHECK_MODULES macro won't see it + export PKG_CONFIG_PATH + + + +# Set the worker language +ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + + +# Set CPU_COUNT + + + + # Make sure we can run config.sub. +$SHELL "${ac_aux_dir}config.sub" sun4 >/dev/null 2>&1 || + as_fn_error $? "cannot run $SHELL ${ac_aux_dir}config.sub" "$LINENO" 5 + +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 +printf %s "checking build system type... " >&6; } +if test ${ac_cv_build+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_build_alias=$build_alias +test "x$ac_build_alias" = x && + ac_build_alias=`$SHELL "${ac_aux_dir}config.guess"` +test "x$ac_build_alias" = x && + as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5 +ac_cv_build=`$SHELL "${ac_aux_dir}config.sub" $ac_build_alias` || + as_fn_error $? "$SHELL ${ac_aux_dir}config.sub $ac_build_alias failed" "$LINENO" 5 + ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 +printf "%s\n" "$ac_cv_build" >&6; } +case $ac_cv_build in +*-*-*) ;; +*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;; +esac +build=$ac_cv_build +ac_save_IFS=$IFS; IFS='-' +set x $ac_cv_build +shift +build_cpu=$1 +build_vendor=$2 +shift; shift +# Remember, the first character of IFS is used to create $*, +# except with old shells: +build_os=$* +IFS=$ac_save_IFS +case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac + + +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 +printf %s "checking host system type... " >&6; } +if test ${ac_cv_host+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test "x$host_alias" = x; then + ac_cv_host=$ac_cv_build +else + ac_cv_host=`$SHELL "${ac_aux_dir}config.sub" $host_alias` || + as_fn_error $? "$SHELL ${ac_aux_dir}config.sub $host_alias failed" "$LINENO" 5 +fi + ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 +printf "%s\n" "$ac_cv_host" >&6; } +case $ac_cv_host in +*-*-*) ;; +*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;; +esac +host=$ac_cv_host +ac_save_IFS=$IFS; IFS='-' +set x $ac_cv_host +shift +host_cpu=$1 +host_vendor=$2 +shift; shift +# Remember, the first character of IFS is used to create $*, +# except with old shells: +host_os=$* +IFS=$ac_save_IFS +case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac + + +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 +printf %s "checking for grep that handles long lines and -e... " >&6; } +if test ${ac_cv_path_GREP+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -z "$GREP"; then + ac_path_GREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_prog in grep ggrep + do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_GREP="$as_dir$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_GREP" || continue +# Check for GNU ac_path_GREP and select it if it is found. + # Check for GNU $ac_path_GREP +case `"$ac_path_GREP" --version 2>&1` in #( +*GNU*) + ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; +#( +*) + ac_count=0 + printf %s 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + printf "%s\n" 'GREP' >> "conftest.nl" + "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_GREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_GREP="$ac_path_GREP" + ac_path_GREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_GREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_GREP"; then + as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_GREP=$GREP +fi + ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 +printf "%s\n" "$ac_cv_path_GREP" >&6; } + GREP="$ac_cv_path_GREP" + + +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 +printf %s "checking for egrep... " >&6; } +if test ${ac_cv_path_EGREP+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 + then ac_cv_path_EGREP="$GREP -E" + else + if test -z "$EGREP"; then + ac_path_EGREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_prog in egrep + do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_EGREP="$as_dir$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_EGREP" || continue +# Check for GNU ac_path_EGREP and select it if it is found. + # Check for GNU $ac_path_EGREP +case `"$ac_path_EGREP" --version 2>&1` in #( +*GNU*) + ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; +#( +*) + ac_count=0 + printf %s 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + printf "%s\n" 'EGREP' >> "conftest.nl" + "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_EGREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_EGREP="$ac_path_EGREP" + ac_path_EGREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_EGREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_EGREP"; then + as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_EGREP=$EGREP +fi + + fi ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 +printf "%s\n" "$ac_cv_path_EGREP" >&6; } + EGREP="$ac_cv_path_EGREP" + + EGREP_TRADITIONAL=$EGREP + ac_cv_path_EGREP_TRADITIONAL=$EGREP + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking the number of available CPUs" >&5 +printf %s "checking the number of available CPUs... " >&6; } + CPU_COUNT="0" + + # Try generic methods + + # 'getconf' is POSIX utility, but '_NPROCESSORS_ONLN' and + # 'NPROCESSORS_ONLN' are platform-specific + command -v getconf >/dev/null 2>&1 && \ + CPU_COUNT=`getconf _NPROCESSORS_ONLN 2>/dev/null || getconf NPROCESSORS_ONLN 2>/dev/null` || CPU_COUNT="0" + if test "$CPU_COUNT" -gt "0" 2>/dev/null || ! command -v nproc >/dev/null 2>&1 +then : + : # empty +else case e in #( + e) # 'nproc' is part of GNU Coreutils and is widely available + CPU_COUNT=`OMP_NUM_THREADS='' nproc 2>/dev/null` || CPU_COUNT=`nproc 2>/dev/null` || CPU_COUNT="0" + ;; +esac +fi + if test "$CPU_COUNT" -gt "0" 2>/dev/null +then : + : # empty +else case e in #( + e) # Try platform-specific preferred methods + case $host_os in #( + *linux*) : + CPU_COUNT=`lscpu -p 2>/dev/null | $EGREP -e '^[0-9]+,' -c` || CPU_COUNT="0" ;; #( + *darwin*) : + CPU_COUNT=`sysctl -n hw.logicalcpu 2>/dev/null` || CPU_COUNT="0" ;; #( + freebsd*) : + command -v sysctl >/dev/null 2>&1 && CPU_COUNT=`sysctl -n kern.smp.cpus 2>/dev/null` || CPU_COUNT="0" ;; #( + netbsd*) : + command -v sysctl >/dev/null 2>&1 && CPU_COUNT=`sysctl -n hw.ncpuonline 2>/dev/null` || CPU_COUNT="0" ;; #( + solaris*) : + command -v psrinfo >/dev/null 2>&1 && CPU_COUNT=`psrinfo 2>/dev/null | $EGREP -e '^[0-9].*on-line' -c 2>/dev/null` || CPU_COUNT="0" ;; #( + mingw*) : + CPU_COUNT=`ls -qpU1 /proc/registry/HKEY_LOCAL_MACHINE/HARDWARE/DESCRIPTION/System/CentralProcessor/ 2>/dev/null | $EGREP -e '^[0-9]+/' -c` || CPU_COUNT="0" ;; #( + msys*) : + CPU_COUNT=`ls -qpU1 /proc/registry/HKEY_LOCAL_MACHINE/HARDWARE/DESCRIPTION/System/CentralProcessor/ 2>/dev/null | $EGREP -e '^[0-9]+/' -c` || CPU_COUNT="0" ;; #( + cygwin*) : + CPU_COUNT=`ls -qpU1 /proc/registry/HKEY_LOCAL_MACHINE/HARDWARE/DESCRIPTION/System/CentralProcessor/ 2>/dev/null | $EGREP -e '^[0-9]+/' -c` || CPU_COUNT="0" ;; #( + *) : + ;; +esac ;; +esac +fi + if test "$CPU_COUNT" -gt "0" 2>/dev/null || ! command -v sysctl >/dev/null 2>&1 +then : + : # empty +else case e in #( + e) # Try less preferred generic method + # 'hw.ncpu' exist on many platforms, but not on GNU/Linux + CPU_COUNT=`sysctl -n hw.ncpu 2>/dev/null` || CPU_COUNT="0" + ;; +esac +fi + if test "$CPU_COUNT" -gt "0" 2>/dev/null +then : + : # empty +else case e in #( + e) # Try platform-specific fallback methods + # They can be less accurate and slower then preferred methods + case $host_os in #( + *linux*) : + CPU_COUNT=`$EGREP -e '^processor' -c /proc/cpuinfo 2>/dev/null` || CPU_COUNT="0" ;; #( + *darwin*) : + CPU_COUNT=`system_profiler SPHardwareDataType 2>/dev/null | $EGREP -i -e 'number of cores:'|cut -d : -f 2 -s|tr -d ' '` || CPU_COUNT="0" ;; #( + freebsd*) : + CPU_COUNT=`dmesg 2>/dev/null| $EGREP -e '^cpu[0-9]+: '|sort -u|$EGREP -e '^' -c` || CPU_COUNT="0" ;; #( + netbsd*) : + CPU_COUNT=`command -v cpuctl >/dev/null 2>&1 && cpuctl list 2>/dev/null| $EGREP -e '^[0-9]+ .* online ' -c` || \ + CPU_COUNT=`dmesg 2>/dev/null| $EGREP -e '^cpu[0-9]+ at'|sort -u|$EGREP -e '^' -c` || CPU_COUNT="0" ;; #( + solaris*) : + command -v kstat >/dev/null 2>&1 && CPU_COUNT=`kstat -m cpu_info -s state -p 2>/dev/null | $EGREP -c -e 'on-line'` || \ + CPU_COUNT=`kstat -m cpu_info 2>/dev/null | $EGREP -c -e 'module: cpu_info'` || CPU_COUNT="0" ;; #( + mingw*) : + if CPU_COUNT=`reg query 'HKLM\\Hardware\\Description\\System\\CentralProcessor' 2>/dev/null | $EGREP -e '\\\\[0-9]+$' -c` +then : + : # empty +else case e in #( + e) test "$NUMBER_OF_PROCESSORS" -gt "0" 2>/dev/null && CPU_COUNT="$NUMBER_OF_PROCESSORS" ;; +esac +fi ;; #( + msys*) : + test "$NUMBER_OF_PROCESSORS" -gt "0" 2>/dev/null && CPU_COUNT="$NUMBER_OF_PROCESSORS" ;; #( + cygwin*) : + test "$NUMBER_OF_PROCESSORS" -gt "0" 2>/dev/null && CPU_COUNT="$NUMBER_OF_PROCESSORS" ;; #( + *) : + ;; +esac ;; +esac +fi + if test "x$CPU_COUNT" != "x0" && test "$CPU_COUNT" -gt 0 2>/dev/null +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CPU_COUNT" >&5 +printf "%s\n" "$CPU_COUNT" >&6; } + +else case e in #( + e) CPU_COUNT="1" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: unable to detect (assuming 1)" >&5 +printf "%s\n" "unable to detect (assuming 1)" >&6; } + ;; +esac +fi + + +# Find the compiler(s) +ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu +if test -n "$ac_tool_prefix"; then + for ac_prog in gfortran g95 xlf95 f95 fort ifort ifc efc pgfortran pgf95 lf95 ftn nagfor xlf90 f90 pgf90 pghpf epcf90 g77 xlf f77 frt pgf77 cf77 fort77 fl32 af77 + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_FC+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$FC"; then + ac_cv_prog_FC="$FC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_FC="$ac_tool_prefix$ac_prog" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +FC=$ac_cv_prog_FC +if test -n "$FC"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $FC" >&5 +printf "%s\n" "$FC" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + test -n "$FC" && break + done +fi +if test -z "$FC"; then + ac_ct_FC=$FC + for ac_prog in gfortran g95 xlf95 f95 fort ifort ifc efc pgfortran pgf95 lf95 ftn nagfor xlf90 f90 pgf90 pghpf epcf90 g77 xlf f77 frt pgf77 cf77 fort77 fl32 af77 +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_FC+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_FC"; then + ac_cv_prog_ac_ct_FC="$ac_ct_FC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_FC="$ac_prog" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_FC=$ac_cv_prog_ac_ct_FC +if test -n "$ac_ct_FC"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_FC" >&5 +printf "%s\n" "$ac_ct_FC" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + test -n "$ac_ct_FC" && break +done + + if test "x$ac_ct_FC" = x; then + FC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + FC=$ac_ct_FC + fi +fi + + +# Provide some information about the compiler. +printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for Fortran compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +printf "%s\n" "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done +rm -f a.out + +cat > conftest.$ac_ext <<_ACEOF + program main + + end +_ACEOF +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" +# Try to create an executable without -o first, disregard a.out. +# It will help us diagnose broken compilers, and finding out an intuition +# of exeext. +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether the Fortran compiler works" >&5 +printf %s "checking whether the Fortran compiler works... " >&6; } +ac_link_default=`printf "%s\n" "$ac_link" | sed 's/ -o *conftest[^ ]*//'` + +# The possible output files: +ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" + +ac_rmfiles= +for ac_file in $ac_files +do + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + * ) ac_rmfiles="$ac_rmfiles $ac_file";; + esac +done +rm -f $ac_rmfiles + +if { { ac_try="$ac_link_default" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +printf "%s\n" "$ac_try_echo"; } >&5 + (eval "$ac_link_default") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +then : + # Autoconf-2.13 could set the ac_cv_exeext variable to 'no'. +# So ignore a value of 'no', otherwise this would lead to 'EXEEXT = no' +# in a Makefile. We should not override ac_cv_exeext if it was cached, +# so that the user can short-circuit this test for compilers unknown to +# Autoconf. +for ac_file in $ac_files '' +do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) + ;; + [ab].out ) + # We found the default executable, but exeext='' is most + # certainly right. + break;; + *.* ) + if test ${ac_cv_exeext+y} && test "$ac_cv_exeext" != no; + then :; else + ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + fi + # We set ac_cv_exeext here because the later test for it is not + # safe: cross compilers may not add the suffix if given an '-o' + # argument, so we may need to know it at that point already. + # Even if this section looks crufty: it has the advantage of + # actually working. + break;; + * ) + break;; + esac +done +test "$ac_cv_exeext" = no && ac_cv_exeext= + +else case e in #( + e) ac_file='' ;; +esac +fi +if test -z "$ac_file" +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +printf "%s\n" "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + +{ { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5 +printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;} +as_fn_error 77 "Fortran compiler cannot create executables +See 'config.log' for more details" "$LINENO" 5; } +else case e in #( + e) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for Fortran compiler default output file name" >&5 +printf %s "checking for Fortran compiler default output file name... " >&6; } +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 +printf "%s\n" "$ac_file" >&6; } +ac_exeext=$ac_cv_exeext + +rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out +ac_clean_files=$ac_clean_files_save +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 +printf %s "checking for suffix of executables... " >&6; } +if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +printf "%s\n" "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +then : + # If both 'conftest.exe' and 'conftest' are 'present' (well, observable) +# catch 'conftest.exe'. For instance with Cygwin, 'ls conftest' will +# work properly (i.e., refer to 'conftest.exe'), while it won't with +# 'rm'. +for ac_file in conftest.exe conftest conftest.*; do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + break;; + * ) break;; + esac +done +else case e in #( + e) { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5 +printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;} +as_fn_error $? "cannot compute suffix of executables: cannot compile and link +See 'config.log' for more details" "$LINENO" 5; } ;; +esac +fi +rm -f conftest conftest$ac_cv_exeext +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 +printf "%s\n" "$ac_cv_exeext" >&6; } + +rm -f conftest.$ac_ext +EXEEXT=$ac_cv_exeext +ac_exeext=$EXEEXT +cat > conftest.$ac_ext <<_ACEOF + program main + open(unit=9,file='conftest.out') + close(unit=9) + + end +_ACEOF +ac_clean_files="$ac_clean_files conftest.out" +# Check that the compiler produces executables we can run. If not, either +# the compiler is broken, or we cross compile. +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 +printf %s "checking whether we are cross compiling... " >&6; } +if test "$cross_compiling" != yes; then + { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +printf "%s\n" "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } + if { ac_try='./conftest$ac_cv_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +printf "%s\n" "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then + cross_compiling=no + else + if test "$cross_compiling" = maybe; then + cross_compiling=yes + else + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5 +printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;} +as_fn_error 77 "cannot run Fortran compiled programs. +If you meant to cross compile, use '--host'. +See 'config.log' for more details" "$LINENO" 5; } + fi + fi +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 +printf "%s\n" "$cross_compiling" >&6; } + +rm -f conftest.$ac_ext conftest$ac_cv_exeext \ + conftest.o conftest.obj conftest.out +ac_clean_files=$ac_clean_files_save +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 +printf %s "checking for suffix of object files... " >&6; } +if test ${ac_cv_objext+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) cat > conftest.$ac_ext <<_ACEOF + program main + + end +_ACEOF +rm -f conftest.o conftest.obj +if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +printf "%s\n" "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +then : + for ac_file in conftest.o conftest.obj conftest.*; do + test -f "$ac_file" || continue; + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; + *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` + break;; + esac +done +else case e in #( + e) printf "%s\n" "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + +{ { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5 +printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;} +as_fn_error $? "cannot compute suffix of object files: cannot compile +See 'config.log' for more details" "$LINENO" 5; } ;; +esac +fi +rm -f conftest.$ac_cv_objext conftest.$ac_ext ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 +printf "%s\n" "$ac_cv_objext" >&6; } +OBJEXT=$ac_cv_objext +ac_objext=$OBJEXT +# If we don't use '.F' as extension, the preprocessor is not run on the +# input file. (Note that this only needs to work for GNU compilers.) +ac_save_ext=$ac_ext +ac_ext=F +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether the compiler supports GNU Fortran" >&5 +printf %s "checking whether the compiler supports GNU Fortran... " >&6; } +if test ${ac_cv_fc_compiler_gnu+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) cat > conftest.$ac_ext <<_ACEOF + program main +#ifndef __GNUC__ + choke me +#endif + + end +_ACEOF +if ac_fn_fc_try_compile "$LINENO" +then : + ac_compiler_gnu=yes +else case e in #( + e) ac_compiler_gnu=no ;; +esac +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext +ac_cv_fc_compiler_gnu=$ac_compiler_gnu + ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_fc_compiler_gnu" >&5 +printf "%s\n" "$ac_cv_fc_compiler_gnu" >&6; } +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + +ac_ext=$ac_save_ext +ac_test_FCFLAGS=${FCFLAGS+y} +ac_save_FCFLAGS=$FCFLAGS +FCFLAGS= +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether $FC accepts -g" >&5 +printf %s "checking whether $FC accepts -g... " >&6; } +if test ${ac_cv_prog_fc_g+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) FCFLAGS=-g +cat > conftest.$ac_ext <<_ACEOF + program main + + end +_ACEOF +if ac_fn_fc_try_compile "$LINENO" +then : + ac_cv_prog_fc_g=yes +else case e in #( + e) ac_cv_prog_fc_g=no ;; +esac +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext + ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_fc_g" >&5 +printf "%s\n" "$ac_cv_prog_fc_g" >&6; } +if test $ac_test_FCFLAGS; then + FCFLAGS=$ac_save_FCFLAGS +elif test $ac_cv_prog_fc_g = yes; then + if test "x$ac_cv_fc_compiler_gnu" = xyes; then + FCFLAGS="-g -O2" + else + FCFLAGS="-g" + fi +else + if test "x$ac_cv_fc_compiler_gnu" = xyes; then + FCFLAGS="-O2" + else + FCFLAGS= + fi +fi + +if test $ac_compiler_gnu = yes; then + GFC=yes +else + GFC= +fi +ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + + + +ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking if the fortran compiler supports iso c binding" >&5 +printf %s "checking if the fortran compiler supports iso c binding... " >&6; } +cat > conftest.$ac_ext <<_ACEOF + + + program conftest + use ISO_C_BINDING + integer, dimension(10) :: n + end + + +_ACEOF +if ac_fn_fc_try_link "$LINENO" +then : + + FC_ISO_C_BINDINGS="yes" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ax_cv_fc_iso_c_bindings" >&5 +printf "%s\n" "$ax_cv_fc_iso_c_bindings" >&6; } + +else case e in #( + e) + FC_ISO_C_BINDINGS="no" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no, fortran codes and sockets or embedding will not work." >&5 +printf "%s\n" "no, fortran codes and sockets or embedding will not work." >&6; } + ;; +esac +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext conftest.$ac_ext +# The intel compiler sometimes generates these work.pc and .pcl files +rm -f work.pc work.pcl +ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + + + +# Find tools for creating static libraries +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking target system type" >&5 +printf %s "checking target system type... " >&6; } +if test ${ac_cv_target+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test "x$target_alias" = x; then + ac_cv_target=$ac_cv_host +else + ac_cv_target=`$SHELL "${ac_aux_dir}config.sub" $target_alias` || + as_fn_error $? "$SHELL ${ac_aux_dir}config.sub $target_alias failed" "$LINENO" 5 +fi + ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_target" >&5 +printf "%s\n" "$ac_cv_target" >&6; } +case $ac_cv_target in +*-*-*) ;; +*) as_fn_error $? "invalid value of canonical target" "$LINENO" 5;; +esac +target=$ac_cv_target +ac_save_IFS=$IFS; IFS='-' +set x $ac_cv_target +shift +target_cpu=$1 +target_vendor=$2 +shift; shift +# Remember, the first character of IFS is used to create $*, +# except with old shells: +target_os=$* +IFS=$ac_save_IFS +case $target_os in *\ *) target_os=`echo "$target_os" | sed 's/ /-/g'`;; esac + + +# The aliases save the names the user supplied, while $host etc. +# will get canonicalized. +test -n "$target_alias" && + test "$program_prefix$program_suffix$program_transform_name" = \ + NONENONEs,x,x, && + program_prefix=${target_alias}- +# Extract the first word of "$target_alias-ar", so it can be a program name with args. +set dummy $target_alias-ar; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_AR+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$AR"; then + ac_cv_prog_AR="$AR" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_AR="$target_alias-ar" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +AR=$ac_cv_prog_AR +if test -n "$AR"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $AR" >&5 +printf "%s\n" "$AR" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +if test -z "$ac_cv_prog_AR"; then + if test "$build" = "$target"; then + ac_ct_AR=$AR + # Extract the first word of "ar", so it can be a program name with args. +set dummy ar; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_AR+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_AR"; then + ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_AR="ar" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + test -z "$ac_cv_prog_ac_ct_AR" && ac_cv_prog_ac_ct_AR=":" +fi ;; +esac +fi +ac_ct_AR=$ac_cv_prog_ac_ct_AR +if test -n "$ac_ct_AR"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5 +printf "%s\n" "$ac_ct_AR" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + AR=$ac_ct_AR + else + AR=":" + fi +else + AR="$ac_cv_prog_AR" +fi + +if test $AR = ":" +then : + as_fn_error $? "ar command not found." "$LINENO" 5 +fi + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. +set dummy ${ac_tool_prefix}ranlib; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_RANLIB+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$RANLIB"; then + ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +RANLIB=$ac_cv_prog_RANLIB +if test -n "$RANLIB"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 +printf "%s\n" "$RANLIB" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_RANLIB"; then + ac_ct_RANLIB=$RANLIB + # Extract the first word of "ranlib", so it can be a program name with args. +set dummy ranlib; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_RANLIB+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_RANLIB"; then + ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_RANLIB="ranlib" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB +if test -n "$ac_ct_RANLIB"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 +printf "%s\n" "$ac_ct_RANLIB" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + if test "x$ac_ct_RANLIB" = x; then + RANLIB=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + RANLIB=$ac_ct_RANLIB + fi +else + RANLIB="$ac_cv_prog_RANLIB" +fi + +if test $RANLIB = ":" +then : + as_fn_error $? "ranlib command not found." "$LINENO" 5 +fi + + +# Find tools to download and unpack with + + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}wget", so it can be a program name with args. +set dummy ${ac_tool_prefix}wget; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_WGET+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$WGET"; then + ac_cv_prog_WGET="$WGET" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_WGET="${ac_tool_prefix}wget" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +WGET=$ac_cv_prog_WGET +if test -n "$WGET"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $WGET" >&5 +printf "%s\n" "$WGET" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_WGET"; then + ac_ct_WGET=$WGET + # Extract the first word of "wget", so it can be a program name with args. +set dummy wget; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_WGET+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_WGET"; then + ac_cv_prog_ac_ct_WGET="$ac_ct_WGET" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_WGET="wget" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_WGET=$ac_cv_prog_ac_ct_WGET +if test -n "$ac_ct_WGET"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_WGET" >&5 +printf "%s\n" "$ac_ct_WGET" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + if test "x$ac_ct_WGET" = x; then + WGET="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + WGET=$ac_ct_WGET + fi +else + WGET="$ac_cv_prog_WGET" +fi + + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}curl", so it can be a program name with args. +set dummy ${ac_tool_prefix}curl; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_CURL+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$CURL"; then + ac_cv_prog_CURL="$CURL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_CURL="${ac_tool_prefix}curl" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +CURL=$ac_cv_prog_CURL +if test -n "$CURL"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CURL" >&5 +printf "%s\n" "$CURL" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_CURL"; then + ac_ct_CURL=$CURL + # Extract the first word of "curl", so it can be a program name with args. +set dummy curl; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_CURL+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_CURL"; then + ac_cv_prog_ac_ct_CURL="$ac_ct_CURL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CURL="curl" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_CURL=$ac_cv_prog_ac_ct_CURL +if test -n "$ac_ct_CURL"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CURL" >&5 +printf "%s\n" "$ac_ct_CURL" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + if test "x$ac_ct_CURL" = x; then + CURL="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CURL=$ac_ct_CURL + fi +else + CURL="$ac_cv_prog_CURL" +fi + + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for a wget or curl to download files with" >&5 +printf %s "checking for a wget or curl to download files with... " >&6; } + if test "x$WGET" != "x" + then + # The MESA SDK server rejects wget, this is the official work-around + DOWNLOAD="$WGET --progress=bar:force:noscroll --user-agent='' -O -" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } + else + if test "x$CURL" != "x" + then + DOWNLOAD="$CURL -L" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } + else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + fi + fi + + + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}tar", so it can be a program name with args. +set dummy ${ac_tool_prefix}tar; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_TAR+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$TAR"; then + ac_cv_prog_TAR="$TAR" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_TAR="${ac_tool_prefix}tar" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +TAR=$ac_cv_prog_TAR +if test -n "$TAR"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $TAR" >&5 +printf "%s\n" "$TAR" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_TAR"; then + ac_ct_TAR=$TAR + # Extract the first word of "tar", so it can be a program name with args. +set dummy tar; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_TAR+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_TAR"; then + ac_cv_prog_ac_ct_TAR="$ac_ct_TAR" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_TAR="tar" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_TAR=$ac_cv_prog_ac_ct_TAR +if test -n "$ac_ct_TAR"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_TAR" >&5 +printf "%s\n" "$ac_ct_TAR" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + if test "x$ac_ct_TAR" = x; then + TAR="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + TAR=$ac_ct_TAR + fi +else + TAR="$ac_cv_prog_TAR" +fi + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}unzip", so it can be a program name with args. +set dummy ${ac_tool_prefix}unzip; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_UNZIP+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$UNZIP"; then + ac_cv_prog_UNZIP="$UNZIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_UNZIP="${ac_tool_prefix}unzip" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +UNZIP=$ac_cv_prog_UNZIP +if test -n "$UNZIP"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $UNZIP" >&5 +printf "%s\n" "$UNZIP" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_UNZIP"; then + ac_ct_UNZIP=$UNZIP + # Extract the first word of "unzip", so it can be a program name with args. +set dummy unzip; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_UNZIP+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_UNZIP"; then + ac_cv_prog_ac_ct_UNZIP="$ac_ct_UNZIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_UNZIP="unzip" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_UNZIP=$ac_cv_prog_ac_ct_UNZIP +if test -n "$ac_ct_UNZIP"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_UNZIP" >&5 +printf "%s\n" "$ac_ct_UNZIP" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + if test "x$ac_ct_UNZIP" = x; then + UNZIP="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + UNZIP=$ac_ct_UNZIP + fi +else + UNZIP="$ac_cv_prog_UNZIP" +fi + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}gunzip", so it can be a program name with args. +set dummy ${ac_tool_prefix}gunzip; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_GUNZIP+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$GUNZIP"; then + ac_cv_prog_GUNZIP="$GUNZIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_GUNZIP="${ac_tool_prefix}gunzip" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +GUNZIP=$ac_cv_prog_GUNZIP +if test -n "$GUNZIP"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $GUNZIP" >&5 +printf "%s\n" "$GUNZIP" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_GUNZIP"; then + ac_ct_GUNZIP=$GUNZIP + # Extract the first word of "gunzip", so it can be a program name with args. +set dummy gunzip; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_GUNZIP+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_GUNZIP"; then + ac_cv_prog_ac_ct_GUNZIP="$ac_ct_GUNZIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_GUNZIP="gunzip" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_GUNZIP=$ac_cv_prog_ac_ct_GUNZIP +if test -n "$ac_ct_GUNZIP"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_GUNZIP" >&5 +printf "%s\n" "$ac_ct_GUNZIP" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + if test "x$ac_ct_GUNZIP" = x; then + GUNZIP="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + GUNZIP=$ac_ct_GUNZIP + fi +else + GUNZIP="$ac_cv_prog_GUNZIP" +fi + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}bunzip2", so it can be a program name with args. +set dummy ${ac_tool_prefix}bunzip2; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_BUNZIP2+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$BUNZIP2"; then + ac_cv_prog_BUNZIP2="$BUNZIP2" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_BUNZIP2="${ac_tool_prefix}bunzip2" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +BUNZIP2=$ac_cv_prog_BUNZIP2 +if test -n "$BUNZIP2"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $BUNZIP2" >&5 +printf "%s\n" "$BUNZIP2" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_BUNZIP2"; then + ac_ct_BUNZIP2=$BUNZIP2 + # Extract the first word of "bunzip2", so it can be a program name with args. +set dummy bunzip2; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_BUNZIP2+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_BUNZIP2"; then + ac_cv_prog_ac_ct_BUNZIP2="$ac_ct_BUNZIP2" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_BUNZIP2="bunzip2" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_BUNZIP2=$ac_cv_prog_ac_ct_BUNZIP2 +if test -n "$ac_ct_BUNZIP2"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_BUNZIP2" >&5 +printf "%s\n" "$ac_ct_BUNZIP2" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + if test "x$ac_ct_BUNZIP2" = x; then + BUNZIP2="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + BUNZIP2=$ac_ct_BUNZIP2 + fi +else + BUNZIP2="$ac_cv_prog_BUNZIP2" +fi + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}unxz", so it can be a program name with args. +set dummy ${ac_tool_prefix}unxz; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_UNXZ+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$UNXZ"; then + ac_cv_prog_UNXZ="$UNXZ" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_UNXZ="${ac_tool_prefix}unxz" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +UNXZ=$ac_cv_prog_UNXZ +if test -n "$UNXZ"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $UNXZ" >&5 +printf "%s\n" "$UNXZ" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_UNXZ"; then + ac_ct_UNXZ=$UNXZ + # Extract the first word of "unxz", so it can be a program name with args. +set dummy unxz; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_UNXZ+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_UNXZ"; then + ac_cv_prog_ac_ct_UNXZ="$ac_ct_UNXZ" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_UNXZ="unxz" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_UNXZ=$ac_cv_prog_ac_ct_UNXZ +if test -n "$ac_ct_UNXZ"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_UNXZ" >&5 +printf "%s\n" "$ac_ct_UNXZ" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + if test "x$ac_ct_UNXZ" = x; then + UNXZ="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + UNXZ=$ac_ct_UNXZ + fi +else + UNXZ="$ac_cv_prog_UNXZ" +fi + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}perl", so it can be a program name with args. +set dummy ${ac_tool_prefix}perl; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_PERL+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$PERL"; then + ac_cv_prog_PERL="$PERL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_PERL="${ac_tool_prefix}perl" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +PERL=$ac_cv_prog_PERL +if test -n "$PERL"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $PERL" >&5 +printf "%s\n" "$PERL" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_PERL"; then + ac_ct_PERL=$PERL + # Extract the first word of "perl", so it can be a program name with args. +set dummy perl; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_PERL+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_PERL"; then + ac_cv_prog_ac_ct_PERL="$ac_ct_PERL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_PERL="perl" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_PERL=$ac_cv_prog_ac_ct_PERL +if test -n "$ac_ct_PERL"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_PERL" >&5 +printf "%s\n" "$ac_ct_PERL" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + if test "x$ac_ct_PERL" = x; then + PERL="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + PERL=$ac_ct_PERL + fi +else + PERL="$ac_cv_prog_PERL" +fi + + + +# Find AMUSE libraries + + + + + + + + + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. +set dummy ${ac_tool_prefix}gcc; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_CC+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}gcc" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +printf "%s\n" "$CC" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_CC"; then + ac_ct_CC=$CC + # Extract the first word of "gcc", so it can be a program name with args. +set dummy gcc; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_CC+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="gcc" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +printf "%s\n" "$ac_ct_CC" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +else + CC="$ac_cv_prog_CC" +fi + +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. +set dummy ${ac_tool_prefix}cc; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_CC+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}cc" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +printf "%s\n" "$CC" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + fi +fi +if test -z "$CC"; then + # Extract the first word of "cc", so it can be a program name with args. +set dummy cc; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_CC+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else + ac_prog_rejected=no +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + if test "$as_dir$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then + ac_prog_rejected=yes + continue + fi + ac_cv_prog_CC="cc" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +if test $ac_prog_rejected = yes; then + # We found a bogon in the path, so make sure we never use it. + set dummy $ac_cv_prog_CC + shift + if test $# != 0; then + # We chose a different compiler from the bogus one. + # However, it has the same basename, so the bogon will be chosen + # first if we set CC to just the basename; use the full file name. + shift + ac_cv_prog_CC="$as_dir$ac_word${1+' '}$@" + fi +fi +fi ;; +esac +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +printf "%s\n" "$CC" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + for ac_prog in cl.exe + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_CC+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="$ac_tool_prefix$ac_prog" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +printf "%s\n" "$CC" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + test -n "$CC" && break + done +fi +if test -z "$CC"; then + ac_ct_CC=$CC + for ac_prog in cl.exe +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_CC+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="$ac_prog" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +printf "%s\n" "$ac_ct_CC" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + test -n "$ac_ct_CC" && break +done + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +fi + +fi +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}clang", so it can be a program name with args. +set dummy ${ac_tool_prefix}clang; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_CC+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}clang" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +printf "%s\n" "$CC" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_CC"; then + ac_ct_CC=$CC + # Extract the first word of "clang", so it can be a program name with args. +set dummy clang; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_ac_ct_CC+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="clang" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +printf "%s\n" "$ac_ct_CC" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +else + CC="$ac_cv_prog_CC" +fi + +fi + + +test -z "$CC" && { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in '$ac_pwd':" >&5 +printf "%s\n" "$as_me: error: in '$ac_pwd':" >&2;} +as_fn_error $? "no acceptable C compiler found in \$PATH +See 'config.log' for more details" "$LINENO" 5; } + +# Provide some information about the compiler. +printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion -version; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +printf "%s\n" "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether the compiler supports GNU C" >&5 +printf %s "checking whether the compiler supports GNU C... " >&6; } +if test ${ac_cv_c_compiler_gnu+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main (void) +{ +#ifndef __GNUC__ + choke me +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO" +then : + ac_compiler_gnu=yes +else case e in #( + e) ac_compiler_gnu=no ;; +esac +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext +ac_cv_c_compiler_gnu=$ac_compiler_gnu + ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 +printf "%s\n" "$ac_cv_c_compiler_gnu" >&6; } +ac_compiler_gnu=$ac_cv_c_compiler_gnu + +if test $ac_compiler_gnu = yes; then + GCC=yes +else + GCC= +fi +ac_test_CFLAGS=${CFLAGS+y} +ac_save_CFLAGS=$CFLAGS +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 +printf %s "checking whether $CC accepts -g... " >&6; } +if test ${ac_cv_prog_cc_g+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_save_c_werror_flag=$ac_c_werror_flag + ac_c_werror_flag=yes + ac_cv_prog_cc_g=no + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main (void) +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO" +then : + ac_cv_prog_cc_g=yes +else case e in #( + e) CFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main (void) +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO" +then : + +else case e in #( + e) ac_c_werror_flag=$ac_save_c_werror_flag + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main (void) +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO" +then : + ac_cv_prog_cc_g=yes +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext ;; +esac +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext ;; +esac +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext + ac_c_werror_flag=$ac_save_c_werror_flag ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 +printf "%s\n" "$ac_cv_prog_cc_g" >&6; } +if test $ac_test_CFLAGS; then + CFLAGS=$ac_save_CFLAGS +elif test $ac_cv_prog_cc_g = yes; then + if test "$GCC" = yes; then + CFLAGS="-g -O2" + else + CFLAGS="-g" + fi +else + if test "$GCC" = yes; then + CFLAGS="-O2" + else + CFLAGS= + fi +fi +ac_prog_cc_stdc=no +if test x$ac_prog_cc_stdc = xno +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $CC option to enable C11 features" >&5 +printf %s "checking for $CC option to enable C11 features... " >&6; } +if test ${ac_cv_prog_cc_c11+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_cv_prog_cc_c11=no +ac_save_CC=$CC +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$ac_c_conftest_c11_program +_ACEOF +for ac_arg in '' -std=gnu11 +do + CC="$ac_save_CC $ac_arg" + if ac_fn_c_try_compile "$LINENO" +then : + ac_cv_prog_cc_c11=$ac_arg +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam + test "x$ac_cv_prog_cc_c11" != "xno" && break +done +rm -f conftest.$ac_ext +CC=$ac_save_CC ;; +esac +fi + +if test "x$ac_cv_prog_cc_c11" = xno +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +printf "%s\n" "unsupported" >&6; } +else case e in #( + e) if test "x$ac_cv_prog_cc_c11" = x +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +printf "%s\n" "none needed" >&6; } +else case e in #( + e) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c11" >&5 +printf "%s\n" "$ac_cv_prog_cc_c11" >&6; } + CC="$CC $ac_cv_prog_cc_c11" ;; +esac +fi + ac_cv_prog_cc_stdc=$ac_cv_prog_cc_c11 + ac_prog_cc_stdc=c11 ;; +esac +fi +fi +if test x$ac_prog_cc_stdc = xno +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $CC option to enable C99 features" >&5 +printf %s "checking for $CC option to enable C99 features... " >&6; } +if test ${ac_cv_prog_cc_c99+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_cv_prog_cc_c99=no +ac_save_CC=$CC +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$ac_c_conftest_c99_program +_ACEOF +for ac_arg in '' -std=gnu99 -std=c99 -c99 -qlanglvl=extc1x -qlanglvl=extc99 -AC99 -D_STDC_C99= +do + CC="$ac_save_CC $ac_arg" + if ac_fn_c_try_compile "$LINENO" +then : + ac_cv_prog_cc_c99=$ac_arg +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam + test "x$ac_cv_prog_cc_c99" != "xno" && break +done +rm -f conftest.$ac_ext +CC=$ac_save_CC ;; +esac +fi + +if test "x$ac_cv_prog_cc_c99" = xno +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +printf "%s\n" "unsupported" >&6; } +else case e in #( + e) if test "x$ac_cv_prog_cc_c99" = x +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +printf "%s\n" "none needed" >&6; } +else case e in #( + e) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c99" >&5 +printf "%s\n" "$ac_cv_prog_cc_c99" >&6; } + CC="$CC $ac_cv_prog_cc_c99" ;; +esac +fi + ac_cv_prog_cc_stdc=$ac_cv_prog_cc_c99 + ac_prog_cc_stdc=c99 ;; +esac +fi +fi +if test x$ac_prog_cc_stdc = xno +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $CC option to enable C89 features" >&5 +printf %s "checking for $CC option to enable C89 features... " >&6; } +if test ${ac_cv_prog_cc_c89+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_cv_prog_cc_c89=no +ac_save_CC=$CC +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$ac_c_conftest_c89_program +_ACEOF +for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" +do + CC="$ac_save_CC $ac_arg" + if ac_fn_c_try_compile "$LINENO" +then : + ac_cv_prog_cc_c89=$ac_arg +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam + test "x$ac_cv_prog_cc_c89" != "xno" && break +done +rm -f conftest.$ac_ext +CC=$ac_save_CC ;; +esac +fi + +if test "x$ac_cv_prog_cc_c89" = xno +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +printf "%s\n" "unsupported" >&6; } +else case e in #( + e) if test "x$ac_cv_prog_cc_c89" = x +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +printf "%s\n" "none needed" >&6; } +else case e in #( + e) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 +printf "%s\n" "$ac_cv_prog_cc_c89" >&6; } + CC="$CC $ac_cv_prog_cc_c89" ;; +esac +fi + ac_cv_prog_cc_stdc=$ac_cv_prog_cc_c89 + ac_prog_cc_stdc=c89 ;; +esac +fi +fi + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + + + + + + + +if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}pkg-config", so it can be a program name with args. +set dummy ${ac_tool_prefix}pkg-config; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_path_PKG_CONFIG+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) case $PKG_CONFIG in + [\\/]* | ?:[\\/]*) + ac_cv_path_PKG_CONFIG="$PKG_CONFIG" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_path_PKG_CONFIG="$as_dir$ac_word$ac_exec_ext" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac ;; +esac +fi +PKG_CONFIG=$ac_cv_path_PKG_CONFIG +if test -n "$PKG_CONFIG"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $PKG_CONFIG" >&5 +printf "%s\n" "$PKG_CONFIG" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + +fi +if test -z "$ac_cv_path_PKG_CONFIG"; then + ac_pt_PKG_CONFIG=$PKG_CONFIG + # Extract the first word of "pkg-config", so it can be a program name with args. +set dummy pkg-config; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_path_ac_pt_PKG_CONFIG+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) case $ac_pt_PKG_CONFIG in + [\\/]* | ?:[\\/]*) + ac_cv_path_ac_pt_PKG_CONFIG="$ac_pt_PKG_CONFIG" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_path_ac_pt_PKG_CONFIG="$as_dir$ac_word$ac_exec_ext" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac ;; +esac +fi +ac_pt_PKG_CONFIG=$ac_cv_path_ac_pt_PKG_CONFIG +if test -n "$ac_pt_PKG_CONFIG"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_pt_PKG_CONFIG" >&5 +printf "%s\n" "$ac_pt_PKG_CONFIG" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + if test "x$ac_pt_PKG_CONFIG" = x; then + PKG_CONFIG="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +printf "%s\n" "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + PKG_CONFIG=$ac_pt_PKG_CONFIG + fi +else + PKG_CONFIG="$ac_cv_path_PKG_CONFIG" +fi + +fi +if test -n "$PKG_CONFIG"; then + _pkg_min_version=0.9.0 + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking pkg-config is at least version $_pkg_min_version" >&5 +printf %s "checking pkg-config is at least version $_pkg_min_version... " >&6; } + if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } + else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + PKG_CONFIG="" + fi +fi + + + amuse_save_LIBS="$LIBS" + amuse_save_LIB_CFLAGS="$STOPCOND_CFLAGS" + amuse_save_LIB_LIBS="$STOPCOND_LIBS" + amuse_save_PKG_CONFIG_PATH="$PKG_CONFIG_PATH" + + # If we have an active virtualenv, make sure pkg-config searches it + if test "a${VIRTUAL_ENV}" != "a" + then + PKG_CONFIG_PATH="${VIRTUAL_ENV}/lib/pkgconfig:${PKG_CONFIG_PATH}" + fi + + # All AMUSE libs export C symbols + ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + # Search for the library, first directly then fall back to pkg-config + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for library containing is_condition_enabled" >&5 +printf %s "checking for library containing is_condition_enabled... " >&6; } +if test ${ac_cv_search_is_condition_enabled+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_func_search_save_LIBS=$LIBS +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. + The 'extern "C"' is for builds by C++ compilers; + although this is not generally supported in C code supporting it here + has little cost and some practical benefit (sr 110532). */ +#ifdef __cplusplus +extern "C" +#endif +char is_condition_enabled (void); +int +main (void) +{ +return is_condition_enabled (); + ; + return 0; +} +_ACEOF +for ac_lib in '' stopcond +do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO" +then : + ac_cv_search_is_condition_enabled=$ac_res +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext + if test ${ac_cv_search_is_condition_enabled+y} +then : + break +fi +done +if test ${ac_cv_search_is_condition_enabled+y} +then : + +else case e in #( + e) ac_cv_search_is_condition_enabled=no ;; +esac +fi +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_is_condition_enabled" >&5 +printf "%s\n" "$ac_cv_search_is_condition_enabled" >&6; } +ac_res=$ac_cv_search_is_condition_enabled +if test "$ac_res" != no +then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + + FOUND_STOPCOND="yes" + STOPCOND_LIBS="$LIBS" + STOPCOND_CFLAGS="" + +else case e in #( + e) + +pkg_failed=no +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for STOPCOND" >&5 +printf %s "checking for STOPCOND... " >&6; } + +if test -n "$STOPCOND_CFLAGS"; then + pkg_cv_STOPCOND_CFLAGS="$STOPCOND_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"stopcond\""; } >&5 + ($PKG_CONFIG --exists --print-errors "stopcond") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_STOPCOND_CFLAGS=`$PKG_CONFIG --cflags "stopcond" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$STOPCOND_LIBS"; then + pkg_cv_STOPCOND_LIBS="$STOPCOND_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"stopcond\""; } >&5 + ($PKG_CONFIG --exists --print-errors "stopcond") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_STOPCOND_LIBS=`$PKG_CONFIG --libs "stopcond" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + STOPCOND_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "stopcond" 2>&1` + else + STOPCOND_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "stopcond" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$STOPCOND_PKG_ERRORS" >&5 + + + FOUND_STOPCOND="no" + +elif test $pkg_failed = untried; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + + FOUND_STOPCOND="no" + +else + STOPCOND_CFLAGS=$pkg_cv_STOPCOND_CFLAGS + STOPCOND_LIBS=$pkg_cv_STOPCOND_LIBS + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } + + FOUND_STOPCOND="yes" + +fi + ;; +esac +fi + + + ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + + + PKG_CONFIG_PATH="$amuse_save_PKG_CONFIG_PATH" + LIBS="$amuse_save_LIBS" + + # If we have an active CONDA environment, assume that the lib is coming from + # there and add an additional flag so that .mod files can be found. Only really + # needed for stopcond and forsockets, and hopefully conda-forge will give us a + # better solution soon. + if test "${FOUND_STOPCOND}" == "yes" -a "x$CONDA_PREFIX" != "x" + then + STOPCOND_CFLAGS="${STOPCOND_CFLAGS} -I${CONDA_PREFIX}/include" + fi + + # If the user overrode the variables, go with what they set instead of + # what we just detected. + if test "x$amuse_save_LIB_CFLAGS" != "x" +then : + + STOPCOND_CFLAGS="$amuse_save_LIB_CFLAGS" + +fi + if test "x$amuse_save_LIB_LIBS" != "x" +then : + + STOPCOND_LIBS="$amuse_save_LIB_LIBS" + +fi + + + + + + + + + amuse_save_LIBS="$LIBS" + amuse_save_LIB_CFLAGS="$STOPCONDMPI_CFLAGS" + amuse_save_LIB_LIBS="$STOPCONDMPI_LIBS" + amuse_save_PKG_CONFIG_PATH="$PKG_CONFIG_PATH" + + # If we have an active virtualenv, make sure pkg-config searches it + if test "a${VIRTUAL_ENV}" != "a" + then + PKG_CONFIG_PATH="${VIRTUAL_ENV}/lib/pkgconfig:${PKG_CONFIG_PATH}" + fi + + # All AMUSE libs export C symbols + ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + # Search for the library, first directly then fall back to pkg-config + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for library containing get_set_conditions_" >&5 +printf %s "checking for library containing get_set_conditions_... " >&6; } +if test ${ac_cv_search_get_set_conditions_+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_func_search_save_LIBS=$LIBS +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. + The 'extern "C"' is for builds by C++ compilers; + although this is not generally supported in C code supporting it here + has little cost and some practical benefit (sr 110532). */ +#ifdef __cplusplus +extern "C" +#endif +char get_set_conditions_ (void); +int +main (void) +{ +return get_set_conditions_ (); + ; + return 0; +} +_ACEOF +for ac_lib in '' stopcondmpi +do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO" +then : + ac_cv_search_get_set_conditions_=$ac_res +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext + if test ${ac_cv_search_get_set_conditions_+y} +then : + break +fi +done +if test ${ac_cv_search_get_set_conditions_+y} +then : + +else case e in #( + e) ac_cv_search_get_set_conditions_=no ;; +esac +fi +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_get_set_conditions_" >&5 +printf "%s\n" "$ac_cv_search_get_set_conditions_" >&6; } +ac_res=$ac_cv_search_get_set_conditions_ +if test "$ac_res" != no +then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + + FOUND_STOPCONDMPI="yes" + STOPCONDMPI_LIBS="$LIBS" + STOPCONDMPI_CFLAGS="" + +else case e in #( + e) + +pkg_failed=no +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for STOPCONDMPI" >&5 +printf %s "checking for STOPCONDMPI... " >&6; } + +if test -n "$STOPCONDMPI_CFLAGS"; then + pkg_cv_STOPCONDMPI_CFLAGS="$STOPCONDMPI_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"stopcondmpi\""; } >&5 + ($PKG_CONFIG --exists --print-errors "stopcondmpi") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_STOPCONDMPI_CFLAGS=`$PKG_CONFIG --cflags "stopcondmpi" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$STOPCONDMPI_LIBS"; then + pkg_cv_STOPCONDMPI_LIBS="$STOPCONDMPI_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"stopcondmpi\""; } >&5 + ($PKG_CONFIG --exists --print-errors "stopcondmpi") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_STOPCONDMPI_LIBS=`$PKG_CONFIG --libs "stopcondmpi" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + STOPCONDMPI_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "stopcondmpi" 2>&1` + else + STOPCONDMPI_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "stopcondmpi" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$STOPCONDMPI_PKG_ERRORS" >&5 + + + FOUND_STOPCONDMPI="no" + +elif test $pkg_failed = untried; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + + FOUND_STOPCONDMPI="no" + +else + STOPCONDMPI_CFLAGS=$pkg_cv_STOPCONDMPI_CFLAGS + STOPCONDMPI_LIBS=$pkg_cv_STOPCONDMPI_LIBS + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } + + FOUND_STOPCONDMPI="yes" + +fi + ;; +esac +fi + + + ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + + + PKG_CONFIG_PATH="$amuse_save_PKG_CONFIG_PATH" + LIBS="$amuse_save_LIBS" + + # If we have an active CONDA environment, assume that the lib is coming from + # there and add an additional flag so that .mod files can be found. Only really + # needed for stopcond and forsockets, and hopefully conda-forge will give us a + # better solution soon. + if test "${FOUND_STOPCONDMPI}" == "yes" -a "x$CONDA_PREFIX" != "x" + then + STOPCONDMPI_CFLAGS="${STOPCONDMPI_CFLAGS} -I${CONDA_PREFIX}/include" + fi + + # If the user overrode the variables, go with what they set instead of + # what we just detected. + if test "x$amuse_save_LIB_CFLAGS" != "x" +then : + + STOPCONDMPI_CFLAGS="$amuse_save_LIB_CFLAGS" + +fi + if test "x$amuse_save_LIB_LIBS" != "x" +then : + + STOPCONDMPI_LIBS="$amuse_save_LIB_LIBS" + +fi + + + + + + + + + amuse_save_LIBS="$LIBS" + amuse_save_LIB_CFLAGS="$AMUSE_MPI_CFLAGS" + amuse_save_LIB_LIBS="$AMUSE_MPI_LIBS" + amuse_save_PKG_CONFIG_PATH="$PKG_CONFIG_PATH" + + # If we have an active virtualenv, make sure pkg-config searches it + if test "a${VIRTUAL_ENV}" != "a" + then + PKG_CONFIG_PATH="${VIRTUAL_ENV}/lib/pkgconfig:${PKG_CONFIG_PATH}" + fi + + # All AMUSE libs export C symbols + ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + # Search for the library, first directly then fall back to pkg-config + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for library containing get_comm_world" >&5 +printf %s "checking for library containing get_comm_world... " >&6; } +if test ${ac_cv_search_get_comm_world+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_func_search_save_LIBS=$LIBS +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. + The 'extern "C"' is for builds by C++ compilers; + although this is not generally supported in C code supporting it here + has little cost and some practical benefit (sr 110532). */ +#ifdef __cplusplus +extern "C" +#endif +char get_comm_world (void); +int +main (void) +{ +return get_comm_world (); + ; + return 0; +} +_ACEOF +for ac_lib in '' amuse_mpi +do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO" +then : + ac_cv_search_get_comm_world=$ac_res +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext + if test ${ac_cv_search_get_comm_world+y} +then : + break +fi +done +if test ${ac_cv_search_get_comm_world+y} +then : + +else case e in #( + e) ac_cv_search_get_comm_world=no ;; +esac +fi +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_get_comm_world" >&5 +printf "%s\n" "$ac_cv_search_get_comm_world" >&6; } +ac_res=$ac_cv_search_get_comm_world +if test "$ac_res" != no +then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + + FOUND_AMUSE_MPI="yes" + AMUSE_MPI_LIBS="$LIBS" + AMUSE_MPI_CFLAGS="" + +else case e in #( + e) + +pkg_failed=no +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for AMUSE_MPI" >&5 +printf %s "checking for AMUSE_MPI... " >&6; } + +if test -n "$AMUSE_MPI_CFLAGS"; then + pkg_cv_AMUSE_MPI_CFLAGS="$AMUSE_MPI_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"amuse_mpi\""; } >&5 + ($PKG_CONFIG --exists --print-errors "amuse_mpi") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_AMUSE_MPI_CFLAGS=`$PKG_CONFIG --cflags "amuse_mpi" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$AMUSE_MPI_LIBS"; then + pkg_cv_AMUSE_MPI_LIBS="$AMUSE_MPI_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"amuse_mpi\""; } >&5 + ($PKG_CONFIG --exists --print-errors "amuse_mpi") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_AMUSE_MPI_LIBS=`$PKG_CONFIG --libs "amuse_mpi" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + AMUSE_MPI_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "amuse_mpi" 2>&1` + else + AMUSE_MPI_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "amuse_mpi" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$AMUSE_MPI_PKG_ERRORS" >&5 + + + FOUND_AMUSE_MPI="no" + +elif test $pkg_failed = untried; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + + FOUND_AMUSE_MPI="no" + +else + AMUSE_MPI_CFLAGS=$pkg_cv_AMUSE_MPI_CFLAGS + AMUSE_MPI_LIBS=$pkg_cv_AMUSE_MPI_LIBS + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } + + FOUND_AMUSE_MPI="yes" + +fi + ;; +esac +fi + + + ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + + + PKG_CONFIG_PATH="$amuse_save_PKG_CONFIG_PATH" + LIBS="$amuse_save_LIBS" + + # If we have an active CONDA environment, assume that the lib is coming from + # there and add an additional flag so that .mod files can be found. Only really + # needed for stopcond and forsockets, and hopefully conda-forge will give us a + # better solution soon. + if test "${FOUND_AMUSE_MPI}" == "yes" -a "x$CONDA_PREFIX" != "x" + then + AMUSE_MPI_CFLAGS="${AMUSE_MPI_CFLAGS} -I${CONDA_PREFIX}/include" + fi + + # If the user overrode the variables, go with what they set instead of + # what we just detected. + if test "x$amuse_save_LIB_CFLAGS" != "x" +then : + + AMUSE_MPI_CFLAGS="$amuse_save_LIB_CFLAGS" + +fi + if test "x$amuse_save_LIB_LIBS" != "x" +then : + + AMUSE_MPI_LIBS="$amuse_save_LIB_LIBS" + +fi + + + + + + + + + amuse_save_LIBS="$LIBS" + amuse_save_LIB_CFLAGS="$FORSOCKETS_CFLAGS" + amuse_save_LIB_LIBS="$FORSOCKETS_LIBS" + amuse_save_PKG_CONFIG_PATH="$PKG_CONFIG_PATH" + + # If we have an active virtualenv, make sure pkg-config searches it + if test "a${VIRTUAL_ENV}" != "a" + then + PKG_CONFIG_PATH="${VIRTUAL_ENV}/lib/pkgconfig:${PKG_CONFIG_PATH}" + fi + + # All AMUSE libs export C symbols + ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + # Search for the library, first directly then fall back to pkg-config + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for library containing forsockets_close" >&5 +printf %s "checking for library containing forsockets_close... " >&6; } +if test ${ac_cv_search_forsockets_close+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_func_search_save_LIBS=$LIBS +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. + The 'extern "C"' is for builds by C++ compilers; + although this is not generally supported in C code supporting it here + has little cost and some practical benefit (sr 110532). */ +#ifdef __cplusplus +extern "C" +#endif +char forsockets_close (void); +int +main (void) +{ +return forsockets_close (); + ; + return 0; +} +_ACEOF +for ac_lib in '' forsockets +do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO" +then : + ac_cv_search_forsockets_close=$ac_res +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext + if test ${ac_cv_search_forsockets_close+y} +then : + break +fi +done +if test ${ac_cv_search_forsockets_close+y} +then : + +else case e in #( + e) ac_cv_search_forsockets_close=no ;; +esac +fi +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_forsockets_close" >&5 +printf "%s\n" "$ac_cv_search_forsockets_close" >&6; } +ac_res=$ac_cv_search_forsockets_close +if test "$ac_res" != no +then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + + FOUND_FORSOCKETS="yes" + FORSOCKETS_LIBS="$LIBS" + FORSOCKETS_CFLAGS="" + +else case e in #( + e) + +pkg_failed=no +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for FORSOCKETS" >&5 +printf %s "checking for FORSOCKETS... " >&6; } + +if test -n "$FORSOCKETS_CFLAGS"; then + pkg_cv_FORSOCKETS_CFLAGS="$FORSOCKETS_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"forsockets\""; } >&5 + ($PKG_CONFIG --exists --print-errors "forsockets") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_FORSOCKETS_CFLAGS=`$PKG_CONFIG --cflags "forsockets" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$FORSOCKETS_LIBS"; then + pkg_cv_FORSOCKETS_LIBS="$FORSOCKETS_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"forsockets\""; } >&5 + ($PKG_CONFIG --exists --print-errors "forsockets") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_FORSOCKETS_LIBS=`$PKG_CONFIG --libs "forsockets" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + FORSOCKETS_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "forsockets" 2>&1` + else + FORSOCKETS_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "forsockets" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$FORSOCKETS_PKG_ERRORS" >&5 + + + FOUND_FORSOCKETS="no" + +elif test $pkg_failed = untried; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + + FOUND_FORSOCKETS="no" + +else + FORSOCKETS_CFLAGS=$pkg_cv_FORSOCKETS_CFLAGS + FORSOCKETS_LIBS=$pkg_cv_FORSOCKETS_LIBS + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } + + FOUND_FORSOCKETS="yes" + +fi + ;; +esac +fi + + + ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + + + PKG_CONFIG_PATH="$amuse_save_PKG_CONFIG_PATH" + LIBS="$amuse_save_LIBS" + + # If we have an active CONDA environment, assume that the lib is coming from + # there and add an additional flag so that .mod files can be found. Only really + # needed for stopcond and forsockets, and hopefully conda-forge will give us a + # better solution soon. + if test "${FOUND_FORSOCKETS}" == "yes" -a "x$CONDA_PREFIX" != "x" + then + FORSOCKETS_CFLAGS="${FORSOCKETS_CFLAGS} -I${CONDA_PREFIX}/include" + fi + + # If the user overrode the variables, go with what they set instead of + # what we just detected. + if test "x$amuse_save_LIB_CFLAGS" != "x" +then : + + FORSOCKETS_CFLAGS="$amuse_save_LIB_CFLAGS" + +fi + if test "x$amuse_save_LIB_LIBS" != "x" +then : + + FORSOCKETS_LIBS="$amuse_save_LIB_LIBS" + +fi + + + + + + + + + amuse_save_LIBS="$LIBS" + amuse_save_LIB_CFLAGS="$SIMPLE_HASH_CFLAGS" + amuse_save_LIB_LIBS="$SIMPLE_HASH_LIBS" + amuse_save_PKG_CONFIG_PATH="$PKG_CONFIG_PATH" + + # If we have an active virtualenv, make sure pkg-config searches it + if test "a${VIRTUAL_ENV}" != "a" + then + PKG_CONFIG_PATH="${VIRTUAL_ENV}/lib/pkgconfig:${PKG_CONFIG_PATH}" + fi + + # All AMUSE libs export C symbols + ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + # Search for the library, first directly then fall back to pkg-config + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for library containing init_hash" >&5 +printf %s "checking for library containing init_hash... " >&6; } +if test ${ac_cv_search_init_hash+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_func_search_save_LIBS=$LIBS +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. + The 'extern "C"' is for builds by C++ compilers; + although this is not generally supported in C code supporting it here + has little cost and some practical benefit (sr 110532). */ +#ifdef __cplusplus +extern "C" +#endif +char init_hash (void); +int +main (void) +{ +return init_hash (); + ; + return 0; +} +_ACEOF +for ac_lib in '' simple_hash +do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO" +then : + ac_cv_search_init_hash=$ac_res +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext + if test ${ac_cv_search_init_hash+y} +then : + break +fi +done +if test ${ac_cv_search_init_hash+y} +then : + +else case e in #( + e) ac_cv_search_init_hash=no ;; +esac +fi +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_init_hash" >&5 +printf "%s\n" "$ac_cv_search_init_hash" >&6; } +ac_res=$ac_cv_search_init_hash +if test "$ac_res" != no +then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + + FOUND_SIMPLE_HASH="yes" + SIMPLE_HASH_LIBS="$LIBS" + SIMPLE_HASH_CFLAGS="" + +else case e in #( + e) + +pkg_failed=no +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for SIMPLE_HASH" >&5 +printf %s "checking for SIMPLE_HASH... " >&6; } + +if test -n "$SIMPLE_HASH_CFLAGS"; then + pkg_cv_SIMPLE_HASH_CFLAGS="$SIMPLE_HASH_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"simple_hash\""; } >&5 + ($PKG_CONFIG --exists --print-errors "simple_hash") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_SIMPLE_HASH_CFLAGS=`$PKG_CONFIG --cflags "simple_hash" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$SIMPLE_HASH_LIBS"; then + pkg_cv_SIMPLE_HASH_LIBS="$SIMPLE_HASH_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"simple_hash\""; } >&5 + ($PKG_CONFIG --exists --print-errors "simple_hash") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_SIMPLE_HASH_LIBS=`$PKG_CONFIG --libs "simple_hash" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + SIMPLE_HASH_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "simple_hash" 2>&1` + else + SIMPLE_HASH_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "simple_hash" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$SIMPLE_HASH_PKG_ERRORS" >&5 + + + FOUND_SIMPLE_HASH="no" + +elif test $pkg_failed = untried; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + + FOUND_SIMPLE_HASH="no" + +else + SIMPLE_HASH_CFLAGS=$pkg_cv_SIMPLE_HASH_CFLAGS + SIMPLE_HASH_LIBS=$pkg_cv_SIMPLE_HASH_LIBS + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } + + FOUND_SIMPLE_HASH="yes" + +fi + ;; +esac +fi + + + ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + + + PKG_CONFIG_PATH="$amuse_save_PKG_CONFIG_PATH" + LIBS="$amuse_save_LIBS" + + # If we have an active CONDA environment, assume that the lib is coming from + # there and add an additional flag so that .mod files can be found. Only really + # needed for stopcond and forsockets, and hopefully conda-forge will give us a + # better solution soon. + if test "${FOUND_SIMPLE_HASH}" == "yes" -a "x$CONDA_PREFIX" != "x" + then + SIMPLE_HASH_CFLAGS="${SIMPLE_HASH_CFLAGS} -I${CONDA_PREFIX}/include" + fi + + # If the user overrode the variables, go with what they set instead of + # what we just detected. + if test "x$amuse_save_LIB_CFLAGS" != "x" +then : + + SIMPLE_HASH_CFLAGS="$amuse_save_LIB_CFLAGS" + +fi + if test "x$amuse_save_LIB_LIBS" != "x" +then : + + SIMPLE_HASH_LIBS="$amuse_save_LIB_LIBS" + +fi + + + + + + + + + amuse_save_LIBS="$LIBS" + amuse_save_LIB_CFLAGS="$G6LIB_CFLAGS" + amuse_save_LIB_LIBS="$G6LIB_LIBS" + amuse_save_PKG_CONFIG_PATH="$PKG_CONFIG_PATH" + + # If we have an active virtualenv, make sure pkg-config searches it + if test "a${VIRTUAL_ENV}" != "a" + then + PKG_CONFIG_PATH="${VIRTUAL_ENV}/lib/pkgconfig:${PKG_CONFIG_PATH}" + fi + + # All AMUSE libs export C symbols + ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + # Search for the library, first directly then fall back to pkg-config + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for library containing g6_npipes" >&5 +printf %s "checking for library containing g6_npipes... " >&6; } +if test ${ac_cv_search_g6_npipes+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_func_search_save_LIBS=$LIBS +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. + The 'extern "C"' is for builds by C++ compilers; + although this is not generally supported in C code supporting it here + has little cost and some practical benefit (sr 110532). */ +#ifdef __cplusplus +extern "C" +#endif +char g6_npipes (void); +int +main (void) +{ +return g6_npipes (); + ; + return 0; +} +_ACEOF +for ac_lib in '' g6 +do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO" +then : + ac_cv_search_g6_npipes=$ac_res +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext + if test ${ac_cv_search_g6_npipes+y} +then : + break +fi +done +if test ${ac_cv_search_g6_npipes+y} +then : + +else case e in #( + e) ac_cv_search_g6_npipes=no ;; +esac +fi +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_g6_npipes" >&5 +printf "%s\n" "$ac_cv_search_g6_npipes" >&6; } +ac_res=$ac_cv_search_g6_npipes +if test "$ac_res" != no +then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + + FOUND_G6LIB="yes" + G6LIB_LIBS="$LIBS" + G6LIB_CFLAGS="" + +else case e in #( + e) + +pkg_failed=no +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for G6LIB" >&5 +printf %s "checking for G6LIB... " >&6; } + +if test -n "$G6LIB_CFLAGS"; then + pkg_cv_G6LIB_CFLAGS="$G6LIB_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"g6lib\""; } >&5 + ($PKG_CONFIG --exists --print-errors "g6lib") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_G6LIB_CFLAGS=`$PKG_CONFIG --cflags "g6lib" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$G6LIB_LIBS"; then + pkg_cv_G6LIB_LIBS="$G6LIB_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"g6lib\""; } >&5 + ($PKG_CONFIG --exists --print-errors "g6lib") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_G6LIB_LIBS=`$PKG_CONFIG --libs "g6lib" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + G6LIB_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "g6lib" 2>&1` + else + G6LIB_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "g6lib" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$G6LIB_PKG_ERRORS" >&5 + + + FOUND_G6LIB="no" + +elif test $pkg_failed = untried; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + + FOUND_G6LIB="no" + +else + G6LIB_CFLAGS=$pkg_cv_G6LIB_CFLAGS + G6LIB_LIBS=$pkg_cv_G6LIB_LIBS + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } + + FOUND_G6LIB="yes" + +fi + ;; +esac +fi + + + ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + + + PKG_CONFIG_PATH="$amuse_save_PKG_CONFIG_PATH" + LIBS="$amuse_save_LIBS" + + # If we have an active CONDA environment, assume that the lib is coming from + # there and add an additional flag so that .mod files can be found. Only really + # needed for stopcond and forsockets, and hopefully conda-forge will give us a + # better solution soon. + if test "${FOUND_G6LIB}" == "yes" -a "x$CONDA_PREFIX" != "x" + then + G6LIB_CFLAGS="${G6LIB_CFLAGS} -I${CONDA_PREFIX}/include" + fi + + # If the user overrode the variables, go with what they set instead of + # what we just detected. + if test "x$amuse_save_LIB_CFLAGS" != "x" +then : + + G6LIB_CFLAGS="$amuse_save_LIB_CFLAGS" + +fi + if test "x$amuse_save_LIB_LIBS" != "x" +then : + + G6LIB_LIBS="$amuse_save_LIB_LIBS" + +fi + + + + + + + + + amuse_save_LIBS="$LIBS" + amuse_save_LIB_CFLAGS="$SAPPORO_LIGHT_CFLAGS" + amuse_save_LIB_LIBS="$SAPPORO_LIGHT_LIBS" + amuse_save_PKG_CONFIG_PATH="$PKG_CONFIG_PATH" + + # If we have an active virtualenv, make sure pkg-config searches it + if test "a${VIRTUAL_ENV}" != "a" + then + PKG_CONFIG_PATH="${VIRTUAL_ENV}/lib/pkgconfig:${PKG_CONFIG_PATH}" + fi + + # All AMUSE libs export C symbols + ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + # Search for the library, first directly then fall back to pkg-config + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for library containing get_device_count" >&5 +printf %s "checking for library containing get_device_count... " >&6; } +if test ${ac_cv_search_get_device_count+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_func_search_save_LIBS=$LIBS +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. + The 'extern "C"' is for builds by C++ compilers; + although this is not generally supported in C code supporting it here + has little cost and some practical benefit (sr 110532). */ +#ifdef __cplusplus +extern "C" +#endif +char get_device_count (void); +int +main (void) +{ +return get_device_count (); + ; + return 0; +} +_ACEOF +for ac_lib in '' sapporo +do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO" +then : + ac_cv_search_get_device_count=$ac_res +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext + if test ${ac_cv_search_get_device_count+y} +then : + break +fi +done +if test ${ac_cv_search_get_device_count+y} +then : + +else case e in #( + e) ac_cv_search_get_device_count=no ;; +esac +fi +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_get_device_count" >&5 +printf "%s\n" "$ac_cv_search_get_device_count" >&6; } +ac_res=$ac_cv_search_get_device_count +if test "$ac_res" != no +then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + + FOUND_SAPPORO_LIGHT="yes" + SAPPORO_LIGHT_LIBS="$LIBS" + SAPPORO_LIGHT_CFLAGS="" + +else case e in #( + e) + +pkg_failed=no +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for SAPPORO_LIGHT" >&5 +printf %s "checking for SAPPORO_LIGHT... " >&6; } + +if test -n "$SAPPORO_LIGHT_CFLAGS"; then + pkg_cv_SAPPORO_LIGHT_CFLAGS="$SAPPORO_LIGHT_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"sapporo_light\""; } >&5 + ($PKG_CONFIG --exists --print-errors "sapporo_light") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_SAPPORO_LIGHT_CFLAGS=`$PKG_CONFIG --cflags "sapporo_light" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$SAPPORO_LIGHT_LIBS"; then + pkg_cv_SAPPORO_LIGHT_LIBS="$SAPPORO_LIGHT_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"sapporo_light\""; } >&5 + ($PKG_CONFIG --exists --print-errors "sapporo_light") 2>&5 + ac_status=$? + printf "%s\n" "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_SAPPORO_LIGHT_LIBS=`$PKG_CONFIG --libs "sapporo_light" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + SAPPORO_LIGHT_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "sapporo_light" 2>&1` + else + SAPPORO_LIGHT_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "sapporo_light" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$SAPPORO_LIGHT_PKG_ERRORS" >&5 + + + FOUND_SAPPORO_LIGHT="no" + +elif test $pkg_failed = untried; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } + + FOUND_SAPPORO_LIGHT="no" + +else + SAPPORO_LIGHT_CFLAGS=$pkg_cv_SAPPORO_LIGHT_CFLAGS + SAPPORO_LIGHT_LIBS=$pkg_cv_SAPPORO_LIGHT_LIBS + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } + + FOUND_SAPPORO_LIGHT="yes" + +fi + ;; +esac +fi + + + ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + + + PKG_CONFIG_PATH="$amuse_save_PKG_CONFIG_PATH" + LIBS="$amuse_save_LIBS" + + # If we have an active CONDA environment, assume that the lib is coming from + # there and add an additional flag so that .mod files can be found. Only really + # needed for stopcond and forsockets, and hopefully conda-forge will give us a + # better solution soon. + if test "${FOUND_SAPPORO_LIGHT}" == "yes" -a "x$CONDA_PREFIX" != "x" + then + SAPPORO_LIGHT_CFLAGS="${SAPPORO_LIGHT_CFLAGS} -I${CONDA_PREFIX}/include" + fi + + # If the user overrode the variables, go with what they set instead of + # what we just detected. + if test "x$amuse_save_LIB_CFLAGS" != "x" +then : + + SAPPORO_LIGHT_CFLAGS="$amuse_save_LIB_CFLAGS" + +fi + if test "x$amuse_save_LIB_LIBS" != "x" +then : + + SAPPORO_LIGHT_LIBS="$amuse_save_LIB_LIBS" + +fi + + + + + + + + +# Find external dependencies +##### These select a language to use to detect a library ##### +##### Wrap the library tests below in these depending on ##### +##### from which language the library will be used by the code ##### +ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + +ac_ext=${ac_fc_srcext-f} +ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' +ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_fc_compiler_gnu + + +##### Wrap these into language macros as needed ##### +##### MPI is always needed to build the worker ##### + + + + + + for ac_prog in mpif90 mpxlf95_r mpxlf90_r mpxlf95 mpxlf90 mpf90 cmpif90c +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +printf %s "checking for $ac_word... " >&6; } +if test ${ac_cv_prog_MPIFC+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) if test -n "$MPIFC"; then + ac_cv_prog_MPIFC="$MPIFC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir$ac_word$ac_exec_ext"; then + ac_cv_prog_MPIFC="$ac_prog" + printf "%s\n" "$as_me:${as_lineno-$LINENO}: found $as_dir$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi ;; +esac +fi +MPIFC=$ac_cv_prog_MPIFC +if test -n "$MPIFC"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $MPIFC" >&5 +printf "%s\n" "$MPIFC" >&6; } +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } +fi + + + test -n "$MPIFC" && break +done +test -n "$MPIFC" || MPIFC="$FC" + + ax_mpi_save_FC="$FC" + FC="$MPIFC" + + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking checking MPI Fortran flags" >&5 +printf %s "checking checking MPI Fortran flags... " >&6; } + ax_mpi_fc_flags="`$MPIFC -showme:compile 2>/dev/null| cut -d\ -f2-`" + ax_mpi_fc_libs="`$MPIFC -showme:link 2>/dev/null| cut -d\ -f2-`" + if test "x$ax_mpi_fc_flags" = "x" +then : + + ax_mpi_fc_flags="`$MPIFC -show -c 2>/dev/null| cut -d\ -f2-|sed s/-c\ //`" + ax_mpi_fc_libs="`$MPIFC -show 2>/dev/null| cut -d\ -f2-`" + if test "x$ax_mpi_fc_flags" = "x" +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: could not determine c flags from show functions" >&5 +printf "%s\n" "could not determine c flags from show functions" >&6; } +else case e in #( + e) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: flags found" >&5 +printf "%s\n" "flags found" >&6; } ;; +esac +fi + + +else case e in #( + e) + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: flags found" >&5 +printf "%s\n" "flags found" >&6; } ;; +esac +fi + MPI_FCFLAGS="$ax_mpi_fc_flags" + MPI_FCLIBS="$ax_mpi_fc_libs" + + + + +if test x = x"$MPILIBS"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for MPI_Init" >&5 +printf %s "checking for MPI_Init... " >&6; } + cat > conftest.$ac_ext <<_ACEOF + program main + call MPI_Init + end +_ACEOF +if ac_fn_fc_try_link "$LINENO" +then : + MPILIBS=" " + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } +else case e in #( + e) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } ;; +esac +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext conftest.$ac_ext +fi + + if test x = x"$MPILIBS"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for MPI_Init in -lfmpi" >&5 +printf %s "checking for MPI_Init in -lfmpi... " >&6; } +if test ${ac_cv_lib_fmpi_MPI_Init+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_check_lib_save_LIBS=$LIBS +LIBS="-lfmpi $LIBS" +cat > conftest.$ac_ext <<_ACEOF + program main + call MPI_Init + end +_ACEOF +if ac_fn_fc_try_link "$LINENO" +then : + ac_cv_lib_fmpi_MPI_Init=yes +else case e in #( + e) ac_cv_lib_fmpi_MPI_Init=no ;; +esac +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_fmpi_MPI_Init" >&5 +printf "%s\n" "$ac_cv_lib_fmpi_MPI_Init" >&6; } +if test "x$ac_cv_lib_fmpi_MPI_Init" = xyes +then : + MPILIBS="-lfmpi" +fi + + fi + if test x = x"$MPILIBS"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for MPI_Init in -lmpichf90" >&5 +printf %s "checking for MPI_Init in -lmpichf90... " >&6; } +if test ${ac_cv_lib_mpichf90_MPI_Init+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_check_lib_save_LIBS=$LIBS +LIBS="-lmpichf90 $LIBS" +cat > conftest.$ac_ext <<_ACEOF + program main + call MPI_Init + end +_ACEOF +if ac_fn_fc_try_link "$LINENO" +then : + ac_cv_lib_mpichf90_MPI_Init=yes +else case e in #( + e) ac_cv_lib_mpichf90_MPI_Init=no ;; +esac +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_mpichf90_MPI_Init" >&5 +printf "%s\n" "$ac_cv_lib_mpichf90_MPI_Init" >&6; } +if test "x$ac_cv_lib_mpichf90_MPI_Init" = xyes +then : + MPILIBS="-lmpichf90" +fi + + fi + + +if test x = x"$MPILIBS"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for MPI_Init in -lmpi" >&5 +printf %s "checking for MPI_Init in -lmpi... " >&6; } +if test ${ac_cv_lib_mpi_MPI_Init+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_check_lib_save_LIBS=$LIBS +LIBS="-lmpi $LIBS" +cat > conftest.$ac_ext <<_ACEOF + program main + call MPI_Init + end +_ACEOF +if ac_fn_fc_try_link "$LINENO" +then : + ac_cv_lib_mpi_MPI_Init=yes +else case e in #( + e) ac_cv_lib_mpi_MPI_Init=no ;; +esac +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_mpi_MPI_Init" >&5 +printf "%s\n" "$ac_cv_lib_mpi_MPI_Init" >&6; } +if test "x$ac_cv_lib_mpi_MPI_Init" = xyes +then : + MPILIBS="-lmpi" +fi + +fi +if test x = x"$MPILIBS"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for MPI_Init in -lmpich" >&5 +printf %s "checking for MPI_Init in -lmpich... " >&6; } +if test ${ac_cv_lib_mpich_MPI_Init+y} +then : + printf %s "(cached) " >&6 +else case e in #( + e) ac_check_lib_save_LIBS=$LIBS +LIBS="-lmpich $LIBS" +cat > conftest.$ac_ext <<_ACEOF + program main + call MPI_Init + end +_ACEOF +if ac_fn_fc_try_link "$LINENO" +then : + ac_cv_lib_mpich_MPI_Init=yes +else case e in #( + e) ac_cv_lib_mpich_MPI_Init=no ;; +esac +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS ;; +esac +fi +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_mpich_MPI_Init" >&5 +printf "%s\n" "$ac_cv_lib_mpich_MPI_Init" >&6; } +if test "x$ac_cv_lib_mpich_MPI_Init" = xyes +then : + MPILIBS="-lmpich" +fi + +fi + +if test x != x"$MPILIBS"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for mpif.h" >&5 +printf %s "checking for mpif.h... " >&6; } + cat > conftest.$ac_ext <<_ACEOF + program main + include 'mpif.h' + end +_ACEOF +if ac_fn_fc_try_compile "$LINENO" +then : + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +printf "%s\n" "yes" >&6; } +else case e in #( + e) MPILIBS="" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 +printf "%s\n" "no" >&6; } ;; +esac +fi +rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext +fi + +FC="$ax_mpi_save_FC" + + + +# Finally, execute ACTION-IF-FOUND/ACTION-IF-NOT-FOUND: +if test x = x"$MPILIBS"; then + + : +else + +printf "%s\n" "#define HAVE_MPI 1" >>confdefs.h + + : +fi + + + +# Generate output +ac_config_files="$ac_config_files config.mk" + +cat >confcache <<\_ACEOF +# This file is a shell script that caches the results of configure +# tests run on this system so they can be shared between configure +# scripts and configure runs, see configure's option --config-cache. +# It is not useful on other systems. If it contains results you don't +# want to keep, you may remove or edit it. +# +# config.status only pays attention to the cache file if you give it +# the --recheck option to rerun configure. +# +# 'ac_cv_env_foo' variables (set or unset) will be overridden when +# loading this file, other *unset* 'ac_cv_foo' will be assigned the +# following values. + +_ACEOF + +# The following way of writing the cache mishandles newlines in values, +# but we know of no workaround that is simple, portable, and efficient. +# So, we kill variables containing newlines. +# Ultrix sh set writes to stderr and can't be redirected directly, +# and sets the high bit in the cache file unless we assign to the vars. +( + for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + + (set) 2>&1 | + case $as_nl`(ac_space=' '; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + # 'set' does not quote correctly, so add quotes: double-quote + # substitution turns \\\\ into \\, and sed turns \\ into \. + sed -n \ + "s/'/'\\\\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" + ;; #( + *) + # 'set' quotes correctly as required by POSIX, so do not add quotes. + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) | + sed ' + /^ac_cv_env_/b end + t clear + :clear + s/^\([^=]*\)=\(.*[{}].*\)$/test ${\1+y} || &/ + t end + s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ + :end' >>confcache +if diff "$cache_file" confcache >/dev/null 2>&1; then :; else + if test -w "$cache_file"; then + if test "x$cache_file" != "x/dev/null"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 +printf "%s\n" "$as_me: updating cache $cache_file" >&6;} + if test ! -f "$cache_file" || test -h "$cache_file"; then + cat confcache >"$cache_file" + else + case $cache_file in #( + */* | ?:*) + mv -f confcache "$cache_file"$$ && + mv -f "$cache_file"$$ "$cache_file" ;; #( + *) + mv -f confcache "$cache_file" ;; + esac + fi + fi + else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 +printf "%s\n" "$as_me: not updating unwritable cache $cache_file" >&6;} + fi +fi +rm -f confcache + +test "x$prefix" = xNONE && prefix=$ac_default_prefix +# Let make expand exec_prefix. +test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' + +# Transform confdefs.h into DEFS. +# Protect against shell expansion while executing Makefile rules. +# Protect against Makefile macro expansion. +# +# If the first sed substitution is executed (which looks for macros that +# take arguments), then branch to the quote section. Otherwise, +# look for a macro that doesn't take arguments. +ac_script=' +:mline +/\\$/{ + N + s,\\\n,, + b mline +} +t clear +:clear +s/^[ ]*#[ ]*define[ ][ ]*\([^ (][^ (]*([^)]*)\)[ ]*\(.*\)/-D\1=\2/g +t quote +s/^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)/-D\1=\2/g +t quote +b any +:quote +s/[][ `~#$^&*(){}\\|;'\''"<>?]/\\&/g +s/\$/$$/g +H +:any +${ + g + s/^\n// + s/\n/ /g + p +} +' +DEFS=`sed -n "$ac_script" confdefs.h` + + +ac_libobjs= +ac_ltlibobjs= +U= +for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue + # 1. Remove the extension, and $U if already installed. + ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' + ac_i=`printf "%s\n" "$ac_i" | sed "$ac_script"` + # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR + # will be set to the directory where LIBOBJS objects are built. + as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" + as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' +done +LIBOBJS=$ac_libobjs + +LTLIBOBJS=$ac_ltlibobjs + + + +: "${CONFIG_STATUS=./config.status}" +ac_write_fail=0 +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files $CONFIG_STATUS" +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 +printf "%s\n" "$as_me: creating $CONFIG_STATUS" >&6;} +as_write_fail=0 +cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 +#! $SHELL +# Generated by $as_me. +# Run this file to recreate the current configuration. +# Compiler output produced by configure, useful for debugging +# configure, is in config.log if it exists. + +debug=false +ac_cs_recheck=false +ac_cs_silent=false + +SHELL=\${CONFIG_SHELL-$SHELL} +export SHELL +_ASEOF +cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +if test ${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 +then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else case e in #( + e) case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac ;; +esac +fi + + + +# Reset variables that may have inherited troublesome values from +# the environment. + +# IFS needs to be set, to space, tab, and newline, in precisely that order. +# (If _AS_PATH_WALK were called with IFS unset, it would have the +# side effect of setting IFS to empty, thus disabling word splitting.) +# Quoting is to prevent editors from complaining about space-tab. +as_nl=' +' +export as_nl +IFS=" "" $as_nl" + +PS1='$ ' +PS2='> ' +PS4='+ ' + +# Ensure predictable behavior from utilities with locale-dependent output. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# We cannot yet rely on "unset" to work, but we need these variables +# to be unset--not just set to an empty or harmless value--now, to +# avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct +# also avoids known problems related to "unset" and subshell syntax +# in other old shells (e.g. bash 2.01 and pdksh 5.2.14). +for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH +do eval test \${$as_var+y} \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done + +# Ensure that fds 0, 1, and 2 are open. +if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi +if (exec 3>&2) ; then :; else exec 2>/dev/null; fi + +# The user is always right. +if ${PATH_SEPARATOR+false} :; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + test -r "$as_dir$0" && as_myself=$as_dir$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as 'sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + printf "%s\n" "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset + +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null +then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else case e in #( + e) as_fn_append () + { + eval $1=\$$1\$2 + } ;; +esac +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null +then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else case e in #( + e) as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } ;; +esac +fi # as_fn_arith + + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + + +# Determine whether it's possible to make 'echo' print without a newline. +# These variables are no longer used directly by Autoconf, but are AC_SUBSTed +# for compatibility with existing Makefiles. +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +# For backward compatibility with old third-party macros, we provide +# the shell variables $as_echo and $as_echo_n. New code should use +# AS_ECHO(["message"]) and AS_ECHO_N(["message"]), respectively. +as_echo='printf %s\n' +as_echo_n='printf %s' + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both 'ln -s file dir' and 'ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; 'ln -s' creates a wrapper executable. + # In both cases, we have to default to 'cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`printf "%s\n" "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_sed_cpp="y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g" +as_tr_cpp="eval sed '$as_sed_cpp'" # deprecated + +# Sed expression to map a string onto a valid variable name. +as_sed_sh="y%*+%pp%;s%[^_$as_cr_alnum]%_%g" +as_tr_sh="eval sed '$as_sed_sh'" # deprecated + + +exec 6>&1 +## ----------------------------------- ## +## Main body of $CONFIG_STATUS script. ## +## ----------------------------------- ## +_ASEOF +test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# Save the log message, to keep $0 and so on meaningful, and to +# report actual input values of CONFIG_FILES etc. instead of their +# values after options handling. +ac_log=" +This file was extended by amuse-metisse $as_me 1.0, which was +generated by GNU Autoconf 2.72. Invocation command line was + + CONFIG_FILES = $CONFIG_FILES + CONFIG_HEADERS = $CONFIG_HEADERS + CONFIG_LINKS = $CONFIG_LINKS + CONFIG_COMMANDS = $CONFIG_COMMANDS + $ $0 $@ + +on `(hostname || uname -n) 2>/dev/null | sed 1q` +" + +_ACEOF + +case $ac_config_files in *" +"*) set x $ac_config_files; shift; ac_config_files=$*;; +esac + + + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +# Files that config.status was made for. +config_files="$ac_config_files" + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +ac_cs_usage="\ +'$as_me' instantiates files and other configuration actions +from templates according to the current configuration. Unless the files +and actions are specified as TAGs, all are instantiated by default. + +Usage: $0 [OPTION]... [TAG]... + + -h, --help print this help, then exit + -V, --version print version number and configuration settings, then exit + --config print configuration, then exit + -q, --quiet, --silent + do not print progress messages + -d, --debug don't remove temporary files + --recheck update $as_me by reconfiguring in the same conditions + --file=FILE[:TEMPLATE] + instantiate the configuration file FILE + +Configuration files: +$config_files + +Report bugs to the package provider." + +_ACEOF +ac_cs_config=`printf "%s\n" "$ac_configure_args" | sed "$ac_safe_unquote"` +ac_cs_config_escaped=`printf "%s\n" "$ac_cs_config" | sed "s/^ //; s/'/'\\\\\\\\''/g"` +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_cs_config='$ac_cs_config_escaped' +ac_cs_version="\\ +amuse-metisse config.status 1.0 +configured by $0, generated by GNU Autoconf 2.72, + with options \\"\$ac_cs_config\\" + +Copyright (C) 2023 Free Software Foundation, Inc. +This config.status script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it." + +ac_pwd='$ac_pwd' +srcdir='$srcdir' +test -n "\$AWK" || AWK=awk +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# The default lists apply if the user does not specify any file. +ac_need_defaults=: +while test $# != 0 +do + case $1 in + --*=?*) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` + ac_shift=: + ;; + --*=) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg= + ac_shift=: + ;; + *) + ac_option=$1 + ac_optarg=$2 + ac_shift=shift + ;; + esac + + case $ac_option in + # Handling of the options. + -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) + ac_cs_recheck=: ;; + --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) + printf "%s\n" "$ac_cs_version"; exit ;; + --config | --confi | --conf | --con | --co | --c ) + printf "%s\n" "$ac_cs_config"; exit ;; + --debug | --debu | --deb | --de | --d | -d ) + debug=: ;; + --file | --fil | --fi | --f ) + $ac_shift + case $ac_optarg in + *\'*) ac_optarg=`printf "%s\n" "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + '') as_fn_error $? "missing file argument" ;; + esac + as_fn_append CONFIG_FILES " '$ac_optarg'" + ac_need_defaults=false;; + --he | --h | --help | --hel | -h ) + printf "%s\n" "$ac_cs_usage"; exit ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil | --si | --s) + ac_cs_silent=: ;; + + # This is an error. + -*) as_fn_error $? "unrecognized option: '$1' +Try '$0 --help' for more information." ;; + + *) as_fn_append ac_config_targets " $1" + ac_need_defaults=false ;; + + esac + shift +done + +ac_configure_extra_args= + +if $ac_cs_silent; then + exec 6>/dev/null + ac_configure_extra_args="$ac_configure_extra_args --silent" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +if \$ac_cs_recheck; then + set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + shift + \printf "%s\n" "running CONFIG_SHELL=$SHELL \$*" >&6 + CONFIG_SHELL='$SHELL' + export CONFIG_SHELL + exec "\$@" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +exec 5>>config.log +{ + echo + sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX +## Running $as_me. ## +_ASBOX + printf "%s\n" "$ac_log" +} >&5 + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + +# Handling of arguments. +for ac_config_target in $ac_config_targets +do + case $ac_config_target in + "config.mk") CONFIG_FILES="$CONFIG_FILES config.mk" ;; + + *) as_fn_error $? "invalid argument: '$ac_config_target'" "$LINENO" 5;; + esac +done + + +# If the user did not use the arguments to specify the items to instantiate, +# then the envvar interface is used. Set only those that are not. +# We use the long form for the default assignment because of an extremely +# bizarre bug on SunOS 4.1.3. +if $ac_need_defaults; then + test ${CONFIG_FILES+y} || CONFIG_FILES=$config_files +fi + +# Have a temporary directory for convenience. Make it in the build tree +# simply because there is no reason against having it here, and in addition, +# creating and moving files from /tmp can sometimes cause problems. +# Hook for its removal unless debugging. +# Note that there is a small window in which the directory will not be cleaned: +# after its creation but before its name has been assigned to '$tmp'. +$debug || +{ + tmp= ac_tmp= + trap 'exit_status=$? + : "${ac_tmp:=$tmp}" + { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status +' 0 + trap 'as_fn_exit 1' 1 2 13 15 +} +# Create a (secure) tmp directory for tmp files. + +{ + tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && + test -d "$tmp" +} || +{ + tmp=./conf$$-$RANDOM + (umask 077 && mkdir "$tmp") +} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 +ac_tmp=$tmp + +# Set up the scripts for CONFIG_FILES section. +# No need to generate them if there are no CONFIG_FILES. +# This happens for instance with './config.status config.h'. +if test -n "$CONFIG_FILES"; then + + +ac_cr=`echo X | tr X '\015'` +# On cygwin, bash can eat \r inside `` if the user requested igncr. +# But we know of no other shell where ac_cr would be empty at this +# point, so we can use a bashism as a fallback. +if test "x$ac_cr" = x; then + eval ac_cr=\$\'\\r\' +fi +ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` +if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then + ac_cs_awk_cr='\\r' +else + ac_cs_awk_cr=$ac_cr +fi + +echo 'BEGIN {' >"$ac_tmp/subs1.awk" && +_ACEOF + + +{ + echo "cat >conf$$subs.awk <<_ACEOF" && + echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && + echo "_ACEOF" +} >conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 +ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` +ac_delim='%!_!# ' +for ac_last_try in false false false false false :; do + . ./conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + + ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` + if test $ac_delim_n = $ac_delim_num; then + break + elif $ac_last_try; then + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done +rm -f conf$$subs.sh + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && +_ACEOF +sed -n ' +h +s/^/S["/; s/!.*/"]=/ +p +g +s/^[^!]*!// +:repl +t repl +s/'"$ac_delim"'$// +t delim +:nl +h +s/\(.\{148\}\)..*/\1/ +t more1 +s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ +p +n +b repl +:more1 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t nl +:delim +h +s/\(.\{148\}\)..*/\1/ +t more2 +s/["\\]/\\&/g; s/^/"/; s/$/"/ +p +b +:more2 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t delim +' >$CONFIG_STATUS || ac_write_fail=1 +rm -f conf$$subs.awk +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACAWK +cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && + for (key in S) S_is_set[key] = 1 + FS = "" + +} +{ + line = $ 0 + nfields = split(line, field, "@") + substed = 0 + len = length(field[1]) + for (i = 2; i < nfields; i++) { + key = field[i] + keylen = length(key) + if (S_is_set[key]) { + value = S[key] + line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) + len += length(value) + length(field[++i]) + substed = 1 + } else + len += 1 + keylen + } + + print line +} + +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then + sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" +else + cat +fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ + || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 +_ACEOF + +# VPATH may cause trouble with some makes, so we remove sole $(srcdir), +# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and +# trailing colons and then remove the whole line if VPATH becomes empty +# (actually we leave an empty line to preserve line numbers). +if test "x$srcdir" = x.; then + ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ +h +s/// +s/^/:/ +s/[ ]*$/:/ +s/:\$(srcdir):/:/g +s/:\${srcdir}:/:/g +s/:@srcdir@:/:/g +s/^:*// +s/:*$// +x +s/\(=[ ]*\).*/\1/ +G +s/\n// +s/^[^=]*=[ ]*$// +}' +fi + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +fi # test -n "$CONFIG_FILES" + + +eval set X " :F $CONFIG_FILES " +shift +for ac_tag +do + case $ac_tag in + :[FHLC]) ac_mode=$ac_tag; continue;; + esac + case $ac_mode$ac_tag in + :[FHL]*:*);; + :L* | :C*:*) as_fn_error $? "invalid tag '$ac_tag'" "$LINENO" 5;; + :[FH]-) ac_tag=-:-;; + :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; + esac + ac_save_IFS=$IFS + IFS=: + set x $ac_tag + IFS=$ac_save_IFS + shift + ac_file=$1 + shift + + case $ac_mode in + :L) ac_source=$1;; + :[FH]) + ac_file_inputs= + for ac_f + do + case $ac_f in + -) ac_f="$ac_tmp/stdin";; + *) # Look for the file first in the build tree, then in the source tree + # (if the path is not absolute). The absolute path cannot be DOS-style, + # because $ac_f cannot contain ':'. + test -f "$ac_f" || + case $ac_f in + [\\/$]*) false;; + *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; + esac || + as_fn_error 1 "cannot find input file: '$ac_f'" "$LINENO" 5;; + esac + case $ac_f in *\'*) ac_f=`printf "%s\n" "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac + as_fn_append ac_file_inputs " '$ac_f'" + done + + # Let's still pretend it is 'configure' which instantiates (i.e., don't + # use $as_me), people would be surprised to read: + # /* config.h. Generated by config.status. */ + configure_input='Generated from '` + printf "%s\n" "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' + `' by configure.' + if test x"$ac_file" != x-; then + configure_input="$ac_file. $configure_input" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 +printf "%s\n" "$as_me: creating $ac_file" >&6;} + fi + # Neutralize special characters interpreted by sed in replacement strings. + case $configure_input in #( + *\&* | *\|* | *\\* ) + ac_sed_conf_input=`printf "%s\n" "$configure_input" | + sed 's/[\\\\&|]/\\\\&/g'`;; #( + *) ac_sed_conf_input=$configure_input;; + esac + + case $ac_tag in + *:-:* | *:-) cat >"$ac_tmp/stdin" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; + esac + ;; + esac + + ac_dir=`$as_dirname -- "$ac_file" || +$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$ac_file" : 'X\(//\)[^/]' \| \ + X"$ac_file" : 'X\(//\)$' \| \ + X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X"$ac_file" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + as_dir="$ac_dir"; as_fn_mkdir_p + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`printf "%s\n" "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`printf "%s\n" "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + + case $ac_mode in + :F) + # + # CONFIG_FILE + # + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# If the template does not know about datarootdir, expand it. +# FIXME: This hack should be removed a few years after 2.60. +ac_datarootdir_hack=; ac_datarootdir_seen= +ac_sed_dataroot=' +/datarootdir/ { + p + q +} +/@datadir@/p +/@docdir@/p +/@infodir@/p +/@localedir@/p +/@mandir@/p' +case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in +*datarootdir*) ac_datarootdir_seen=yes;; +*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 +printf "%s\n" "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + ac_datarootdir_hack=' + s&@datadir@&$datadir&g + s&@docdir@&$docdir&g + s&@infodir@&$infodir&g + s&@localedir@&$localedir&g + s&@mandir@&$mandir&g + s&\\\${datarootdir}&$datarootdir&g' ;; +esac +_ACEOF + +# Neutralize VPATH when '$srcdir' = '.'. +# Shell code in configure.ac might set extrasub. +# FIXME: do we really want to maintain this feature? +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_sed_extra="$ac_vpsub +$extrasub +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +:t +/@[a-zA-Z_][a-zA-Z_0-9]*@/!b +s|@configure_input@|$ac_sed_conf_input|;t t +s&@top_builddir@&$ac_top_builddir_sub&;t t +s&@top_build_prefix@&$ac_top_build_prefix&;t t +s&@srcdir@&$ac_srcdir&;t t +s&@abs_srcdir@&$ac_abs_srcdir&;t t +s&@top_srcdir@&$ac_top_srcdir&;t t +s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t +s&@builddir@&$ac_builddir&;t t +s&@abs_builddir@&$ac_abs_builddir&;t t +s&@abs_top_builddir@&$ac_abs_top_builddir&;t t +$ac_datarootdir_hack +" +eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ + >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + +test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && + { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && + { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ + "$ac_tmp/out"`; test -z "$ac_out"; } && + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable 'datarootdir' +which seems to be undefined. Please make sure it is defined" >&5 +printf "%s\n" "$as_me: WARNING: $ac_file contains a reference to the variable 'datarootdir' +which seems to be undefined. Please make sure it is defined" >&2;} + + rm -f "$ac_tmp/stdin" + case $ac_file in + -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; + *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; + esac \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + ;; + + + + esac + +done # for ac_tag + + +as_fn_exit 0 +_ACEOF +ac_clean_files=$ac_clean_files_save + +test $ac_write_fail = 0 || + as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 + + +# configure is writing to config.log, and then calls config.status. +# config.status does its own redirection, appending to config.log. +# Unfortunately, on DOS this fails, as config.log is still kept open +# by configure, so config.status won't be able to write to it; its +# output is simply discarded. So we exec the FD to /dev/null, +# effectively closing config.log, so it can be properly (re)opened and +# appended to by config.status. When coming back to configure, we +# need to make the FD available again. +if test "$no_create" != yes; then + ac_cs_success=: + ac_config_status_args= + test "$silent" = yes && + ac_config_status_args="$ac_config_status_args --quiet" + exec 5>/dev/null + $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false + exec 5>>config.log + # Use ||, not &&, to avoid exiting from the if with $? = 1, which + # would make configure fail if this is the last instruction. + $ac_cs_success || as_fn_exit 1 +fi +if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 +printf "%s\n" "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} +fi + + diff --git a/src/amuse/community/metisse/support/configure.ac b/src/amuse/community/metisse/support/configure.ac new file mode 100644 index 0000000000..0bf98efe38 --- /dev/null +++ b/src/amuse/community/metisse/support/configure.ac @@ -0,0 +1,62 @@ +AC_CONFIG_AUX_DIR([shared]) +AC_CONFIG_MACRO_DIRS([shared/m4]) + +AC_INIT([amuse-metisse], [1.0]) + +# Pick up Conda env or virtualenv +AMUSE_VENV() + +# Set the worker language +AC_LANG([Fortran]) + +# Set CPU_COUNT +AX_COUNT_CPUS() +AC_SUBST(CPU_COUNT) + +# Find the compiler(s) +AC_PROG_FC() +AX_FC_ISO_C_BINDING() + +# Find tools for creating static libraries +AC_CHECK_TARGET_TOOL([AR], [ar], [:]) +AS_IF([test $AR = ":"], [AC_MSG_ERROR([ar command not found.])]) + +AC_PROG_RANLIB() +AS_IF([test $RANLIB = ":"], [AC_MSG_ERROR([ranlib command not found.])]) + + +# Find tools to download and unpack with +AMUSE_DOWNLOAD() +AC_CHECK_TOOL(TAR, tar) +AC_CHECK_TOOL(UNZIP, unzip) +AC_CHECK_TOOL(GUNZIP, gunzip) +AC_CHECK_TOOL(BUNZIP2, bunzip2) +AC_CHECK_TOOL(UNXZ, unxz) +AC_CHECK_TOOL(PERL, perl) + + +# Find AMUSE libraries +AMUSE_LIB_STOPCOND() +AMUSE_LIB_STOPCONDMPI() +AMUSE_LIB_AMUSE_MPI() +AMUSE_LIB_FORSOCKETS() +AMUSE_LIB_SIMPLE_HASH() +AMUSE_LIB_G6LIB() +AMUSE_LIB_SAPPORO_LIGHT() + + +# Find external dependencies +##### These select a language to use to detect a library ##### +##### Wrap the library tests below in these depending on ##### +##### from which language the library will be used by the code ##### +AC_LANG_PUSH([Fortran]) +AC_LANG_POP([Fortran]) + +##### Wrap these into language macros as needed ##### +##### MPI is always needed to build the worker ##### +AX_MPI() + + +# Generate output +AC_CONFIG_FILES([config.mk]) +AC_OUTPUT diff --git a/src/amuse/community/metisse/support/shared b/src/amuse/community/metisse/support/shared new file mode 120000 index 0000000000..77c5e6f8ed --- /dev/null +++ b/src/amuse/community/metisse/support/shared @@ -0,0 +1 @@ +../../../../../support/shared \ No newline at end of file diff --git a/src/amuse/community/metisse/tests/test_metisse.py b/src/amuse/community/metisse/tests/test_metisse.py new file mode 100644 index 0000000000..f7045f1ae0 --- /dev/null +++ b/src/amuse/community/metisse/tests/test_metisse.py @@ -0,0 +1,19 @@ +from amuse.test.amusetest import TestWithMPI + +from .interface import MetisseInterface +from .interface import Metisse + +class MetisseInterfaceTests(TestWithMPI): + def test_initialize(self): + instance = MetisseInterface() + error = instance.initialize() + self.assertEqual(error, 0) + instance.stop() + + +class MetisseTests(TestWithMPI): + def test_initialize(self): + instance = Metisse() + error = instance.initialize() + self.assertEqual(error, 0) + instance.stop() From aee7cc5d0cd920a95fb61538840b4b81918e9647 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 13 Feb 2025 10:31:27 +0100 Subject: [PATCH 02/40] remove file --- src/amuse/community/metisse/packages/.DS_Store | Bin 6148 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 src/amuse/community/metisse/packages/.DS_Store diff --git a/src/amuse/community/metisse/packages/.DS_Store b/src/amuse/community/metisse/packages/.DS_Store deleted file mode 100644 index c6a944795a2c2cdb863daf6997a2455f2864afbe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeH~Jx&BM427Rzixo60D#~1df*XWoRumKV9_ez#tNT5?@e`%v!CEK;eFYT05~q|;wAuBP^(kq&E>c}`Zg z>`<&#r@yptSPj)I0wORba2m^%-~ZmP|I+`Dle7{65%^aGY|%Y+YyMK+t&^YgyS8vV qb9@_PTR5lDBY+FPiZ2Io#lIO>Q~S`CPJc_M>LK7 Date: Thu, 13 Feb 2025 13:54:57 +0100 Subject: [PATCH 03/40] update interface --- src/amuse/community/metisse/interface.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/amuse/community/metisse/interface.py b/src/amuse/community/metisse/interface.py index 071fb80eac..5914ddfe0a 100644 --- a/src/amuse/community/metisse/interface.py +++ b/src/amuse/community/metisse/interface.py @@ -33,10 +33,6 @@ def __init__(self, **keyword_arguments): **keyword_arguments ) - @remote_function - def initialize(): - returns (result="i") - @remote_function def teststar(mass_in="d", time_in="d"): returns (mass_out="d", result="i") From b87f8b0554246ed6f85fdc014bbc5b7d1e714c8d Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 13 Feb 2025 14:50:50 +0100 Subject: [PATCH 04/40] Stub interface --- src/amuse/community/metisse/interface.f90 | 143 ++++++++++++++++++++++ src/amuse/community/metisse/interface.py | 4 +- 2 files changed, 146 insertions(+), 1 deletion(-) diff --git a/src/amuse/community/metisse/interface.f90 b/src/amuse/community/metisse/interface.f90 index 764d243cd2..dc7b97cb75 100644 --- a/src/amuse/community/metisse/interface.f90 +++ b/src/amuse/community/metisse/interface.f90 @@ -25,5 +25,148 @@ function teststar(mass_in, time, mass_out, error) teststar = 0 end function + function cleanup_code() + implicit none + integer :: cleanup_code + cleanup_code=0 + end function + + function commit_parameters() + implicit none + integer :: commit_parameters + commit_parameters=0 + end function + + function commit_particles() + implicit none + integer :: commit_particles + commit_particles=0 + end function + + function delete_star(index_of_the_star) + implicit none + integer :: index_of_the_star + integer :: delete_star + delete_star=0 + end function + + function evolve_for(index_of_the_star, delta_t) + implicit none + integer :: index_of_the_star + double precision :: delta_t + integer :: evolve_for + evolve_for=0 + end function + + function evolve_one_step(index_of_the_star) + implicit none + integer :: index_of_the_star + integer :: evolve_one_step + evolve_one_step=0 + end function + + function get_age(index_of_the_star, age) + implicit none + integer :: index_of_the_star + double precision :: age + integer :: get_age + get_age=0 + end function + + function get_luminosity(index_of_the_star, luminosity) + implicit none + integer :: index_of_the_star + double precision :: luminosity + integer :: get_luminosity + get_luminosity=0 + end function + + function get_mass(index_of_the_star, mass) + implicit none + integer :: index_of_the_star + double precision :: mass + integer :: get_mass + get_mass=0 + end function + + function get_metallicity(metallicity) + implicit none + double precision :: metallicity + integer :: get_metallicity + get_metallicity=0 + end function + + function get_number_of_particles(number_of_particles) + implicit none + integer :: number_of_particles + integer :: get_number_of_particles + get_number_of_particles=0 + end function + + function get_radius(index_of_the_star, radius) + implicit none + integer :: index_of_the_star + double precision :: radius + integer :: get_radius + get_radius=0 + end function + + function get_stellar_type(index_of_the_star, stellar_type) + implicit none + integer :: index_of_the_star, stellar_type + integer :: get_stellar_type + get_stellar_type=0 + end function + + function get_temperature(index_of_the_star, temperature) + implicit none + integer :: index_of_the_star + double precision :: temperature + integer :: get_temperature + get_temperature=0 + end function + + function get_time_step(index_of_the_star, time_step) + implicit none + integer :: index_of_the_star + double precision :: time_step + integer :: get_time_step + get_time_step=0 + end function + + function initialize_code() + implicit none + integer :: initialize_code + initialize_code=0 + end function + + function new_particle(index_of_the_star, mass) + implicit none + integer :: index_of_the_star + double precision :: mass + integer :: new_particle + new_particle=0 + end function + + function recommit_parameters() + implicit none + integer :: recommit_parameters + recommit_parameters=0 + end function + + function recommit_particles() + implicit none + integer :: recommit_particles + recommit_particles=0 + end function + + function set_metallicity(metallicity) + implicit none + double precision :: metallicity + integer :: set_metallicity + set_metallicity=0 + end function + + end module diff --git a/src/amuse/community/metisse/interface.py b/src/amuse/community/metisse/interface.py index 5914ddfe0a..93331cdeb3 100644 --- a/src/amuse/community/metisse/interface.py +++ b/src/amuse/community/metisse/interface.py @@ -9,13 +9,15 @@ legacy_function, remote_function, ) +from amuse.community.interface.se import StellarEvolutionInterface from amuse.datamodel import Particles # low level interface class class MetisseInterface( CodeInterface, - LiteratureReferencesMixIn + StellarEvolutionInterface, + LiteratureReferencesMixIn, ): """ Low level interface for metisse From 112cce794f6f42bb1b58680e3ddab405ca0764a3 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 13 Feb 2025 14:51:15 +0100 Subject: [PATCH 05/40] standard imports --- src/amuse/community/metisse/__init__.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/amuse/community/metisse/__init__.py b/src/amuse/community/metisse/__init__.py index abe3ba85b6..3b70039b06 100644 --- a/src/amuse/community/metisse/__init__.py +++ b/src/amuse/community/metisse/__init__.py @@ -1 +1,5 @@ -# generated file \ No newline at end of file +""" +Interface for METISSE +""" +from .interface import MetisseInterface +from .interface import Metisse From b69bd9469184092919598c2cdbe46a805a3ab2f7 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 13 Feb 2025 16:58:26 +0100 Subject: [PATCH 06/40] update interface with particle set --- src/amuse/community/metisse/interface.py | 140 ++++++++++++++++++++--- 1 file changed, 121 insertions(+), 19 deletions(-) diff --git a/src/amuse/community/metisse/interface.py b/src/amuse/community/metisse/interface.py index 93331cdeb3..c50cd6037d 100644 --- a/src/amuse/community/metisse/interface.py +++ b/src/amuse/community/metisse/interface.py @@ -10,7 +10,8 @@ remote_function, ) from amuse.community.interface.se import StellarEvolutionInterface -from amuse.datamodel import Particles +from amuse.datamodel import Particles, ParticlesSubset +from amuse.units import units, constants # low level interface class @@ -72,33 +73,134 @@ def define_parameters(self, handler): # "name_of_the_getter", # "name_of_the_setter", # "parameter_name", - # "description", + # "description", # default_value = # ) pass -# the definition of the code data stores, either particle sets: def define_particle_sets(self, handler): - # handler.define_set("particles", "index_of_the_particle") - # handler.set_new("particles", "new_particle") - # handler.set_delete("particles", "delete_particle") - # handler.add_setter("particles", "set_state") - # handler.add_getter("particles", "get_state") - # handler.add_setter("particles", "set_mass") - # handler.add_getter("particles", "get_mass", names=("mass",)) - pass + handler.define_inmemory_set("particles", MetisseParticles) + + handler.add_attribute( + "particles", + "time_step", + "get_time_step", + ( + "stellar_type", + "initial_mass", + "age", + "mass", + "main_sequence_lifetime", + "epoch", + ), + ) -# and/or grids: - def define_grids(self, handler): - # handler.define_grid("grid",axes_names = ["x", "y"], grid_class=StructuredGrid) - # handler.set_grid_range("grid", "_grid_range") - # handler.add_getter("grid", "get_grid_position", names=["x", "y"]) - # handler.add_getter("grid", "get_rho", names=["density"]) - # handler.add_setter("grid", "set_rho", names=["density"]) - pass + handler.add_attribute( + "particles", + "mass_loss_wind", + "get_mass_loss_wind", + ("stellar_type", "luminosity", "radius", "mass", "CO_core_mass"), + ) + + handler.add_attribute( + "particles", + "gyration_radius", + "get_gyration_radius", + ( + "stellar_type", + "initial_mass", + "mass", + "radius", + "luminosity", + "epoch", + "main_sequence_lifetime", + "age", + ), + ) class MetisseParticles(Particles): + def __init__(self, code_interface, storage=None): Particles.__init__(self, storage=storage) self._private.code_interface = code_interface + self.add_calculated_attribute( + "temperature", + self.calculate_effective_temperature, + ["luminosity", "radius"], + ) + self.add_function_attribute( + "evolve_one_step", self.particleset_evolve_one_step, self.evolve_one_step + ) + self.add_function_attribute( + "evolve_for", + self.particleset_evolve_for, + self.evolve_for + ) + + def calculate_effective_temperature(self, luminosity, radius): + return ( + (luminosity / (constants.four_pi_stefan_boltzmann * radius**2)) ** 0.25 + ).in_( + units.K + ) + + def add_particles_to_store(self, keys, attributes=[], values=[]): + if len(keys) == 0: + return + + all_attributes = [] + all_attributes.extend(attributes) + all_values = [] + all_values.extend(values) + + mapping_from_attribute_to_default_value = { + "stellar_type": 1 | units.stellar_type, + "radius": 0 | units.RSun, + "luminosity": 0 | units.LSun, + "core_mass": 0 | units.MSun, + "CO_core_mass": 0 | units.MSun, + "core_radius": 0 | units.RSun, + "convective_envelope_mass": 0 | units.MSun, + "convective_envelope_radius": 0 | units.RSun, + "epoch": 0 | units.Myr, + "spin": 0 | units.yr**-1, + "main_sequence_lifetime": 0 | units.Myr, + "age": 0 | units.Myr, + } + + given_attributes = set(attributes) + + if "initial_mass" not in given_attributes: + index_of_mass_attibute = attributes.index("mass") + all_attributes.append("initial_mass") + all_values.append(values[index_of_mass_attibute] * 1.0) + + for attribute, default_value in mapping_from_attribute_to_default_value.items(): + if attribute not in given_attributes: + all_attributes.append(attribute) + all_values.append(default_value.as_vector_with_length(len(keys))) + + super().add_particles_to_store(keys, all_attributes, all_values) + + added_particles = ParticlesSubset(self, keys) + self._private.code_interface._evolve_particles(added_particles, 0 | units.yr) + + def evolve_one_step(self, particles, subset): + self._private.code_interface._evolve_particles( + subset.as_set(), subset.age + subset.time_step + ) + + def particleset_evolve_one_step(self, particles): + self._private.code_interface._evolve_particles( + particles, particles.age + particles.time_step + ) + + def evolve_for(self, particles, subset, delta_time): + self._private.code_interface._evolve_particles(subset.as_set(), subset.age + delta_time) + + def particleset_evolve_for(self, particles, delta_time): + self._private.code_interface._evolve_particles(particles, particles.age + delta_time) + + def get_defined_attribute_names(self): + return ["mass", "radius"] From ad1aaff5ba37a2f2d9dd48f158fe33eb7e69cc15 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Fri, 14 Feb 2025 17:21:34 +0100 Subject: [PATCH 07/40] Updates to Metisse interface --- src/amuse/community/metisse/Makefile | 7 +- src/amuse/community/metisse/interface.f90 | 52 ++- src/amuse/community/metisse/interface.py | 63 +--- src/amuse/community/metisse/storage.f90 | 376 +++++++++++++++++++ src/amuse/community/metisse/test_storage.f90 | 43 +++ 5 files changed, 474 insertions(+), 67 deletions(-) create mode 100644 src/amuse/community/metisse/storage.f90 create mode 100644 src/amuse/community/metisse/test_storage.f90 diff --git a/src/amuse/community/metisse/Makefile b/src/amuse/community/metisse/Makefile index 3592cb3bac..6dddff4634 100644 --- a/src/amuse/community/metisse/Makefile +++ b/src/amuse/community/metisse/Makefile @@ -51,10 +51,13 @@ src/METISSE/$(CODELIB): | src/METISSE/make metisse_worker.f90: interface.py amusifier --type=f90 interface.py MetisseInterface -o $@ -metisse_worker.o: metisse_worker.f90 +storage.o: storage.f90 $(MPIFC) -c -o $@ $(FCFLAGS) $< -metisse_worker: interface.o metisse_worker.o src/METISSE/$(CODELIB) +metisse_worker.o: metisse_worker.f90 storage.o + $(MPIFC) -c -o $@ $(FCFLAGS) $< + +metisse_worker: interface.o metisse_worker.o storage.o src/METISSE/$(CODELIB) $(MPIFC) -o $@ $(LDFLAGS) $^ $(LDLIBS) %.o: %.f90 diff --git a/src/amuse/community/metisse/interface.f90 b/src/amuse/community/metisse/interface.f90 index dc7b97cb75..ca1da5966c 100644 --- a/src/amuse/community/metisse/interface.f90 +++ b/src/amuse/community/metisse/interface.f90 @@ -1,12 +1,22 @@ module metisseInterface use iso_c_binding + use store_stars, only: stars implicit none + type(stars) :: star_system + contains function initialize(error) + use track_support + use z_support implicit none integer :: error integer :: initialize + + real(dp) :: zpars(20) + + call initialize_front_end('main') + initialize = 0 end function @@ -53,7 +63,7 @@ function delete_star(index_of_the_star) function evolve_for(index_of_the_star, delta_t) implicit none integer :: index_of_the_star - double precision :: delta_t + real(c_double) :: delta_t integer :: evolve_for evolve_for=0 end function @@ -68,30 +78,30 @@ function evolve_one_step(index_of_the_star) function get_age(index_of_the_star, age) implicit none integer :: index_of_the_star - double precision :: age + real(c_double) :: age integer :: get_age - get_age=0 + call star_system%get_age(index_of_the_star, age, get_age) end function function get_luminosity(index_of_the_star, luminosity) implicit none integer :: index_of_the_star - double precision :: luminosity + real(c_double) :: luminosity integer :: get_luminosity - get_luminosity=0 + call star_system%get_luminosity(index_of_the_star, luminosity, get_luminosity) end function function get_mass(index_of_the_star, mass) implicit none integer :: index_of_the_star - double precision :: mass + real(c_double) :: mass integer :: get_mass - get_mass=0 + call star_system%get_mass(index_of_the_star, mass, get_mass) end function - + function get_metallicity(metallicity) implicit none - double precision :: metallicity + real(c_double) :: metallicity integer :: get_metallicity get_metallicity=0 end function @@ -100,38 +110,39 @@ function get_number_of_particles(number_of_particles) implicit none integer :: number_of_particles integer :: get_number_of_particles - get_number_of_particles=0 + call star_system%get_number_of_stars(number_of_particles) + get_number_of_particles = 0 end function function get_radius(index_of_the_star, radius) implicit none integer :: index_of_the_star - double precision :: radius + real(c_double) :: radius integer :: get_radius - get_radius=0 + call star_system%get_radius(index_of_the_star, radius, get_radius) end function function get_stellar_type(index_of_the_star, stellar_type) implicit none integer :: index_of_the_star, stellar_type integer :: get_stellar_type - get_stellar_type=0 + call star_system%get_stellar_type(index_of_the_star, stellar_type, get_stellar_type) end function function get_temperature(index_of_the_star, temperature) implicit none integer :: index_of_the_star - double precision :: temperature + real(c_double) :: temperature integer :: get_temperature - get_temperature=0 + call star_system%get_temperature(index_of_the_star, temperature, get_temperature) end function function get_time_step(index_of_the_star, time_step) implicit none integer :: index_of_the_star - double precision :: time_step + real(c_double) :: time_step integer :: get_time_step - get_time_step=0 + call star_system%get_time_step(index_of_the_star, time_step, get_time_step) end function function initialize_code() @@ -142,9 +153,10 @@ function initialize_code() function new_particle(index_of_the_star, mass) implicit none - integer :: index_of_the_star - double precision :: mass + integer, intent(inout) :: index_of_the_star + real(c_double), intent(inout) :: mass integer :: new_particle + index_of_the_star = star_system%new_star(mass) new_particle=0 end function @@ -162,7 +174,7 @@ function recommit_particles() function set_metallicity(metallicity) implicit none - double precision :: metallicity + real(c_double) :: metallicity integer :: set_metallicity set_metallicity=0 end function diff --git a/src/amuse/community/metisse/interface.py b/src/amuse/community/metisse/interface.py index c50cd6037d..c021d4cd80 100644 --- a/src/amuse/community/metisse/interface.py +++ b/src/amuse/community/metisse/interface.py @@ -9,7 +9,7 @@ legacy_function, remote_function, ) -from amuse.community.interface.se import StellarEvolutionInterface +from amuse.community.interface import se from amuse.datamodel import Particles, ParticlesSubset from amuse.units import units, constants @@ -17,7 +17,7 @@ # low level interface class class MetisseInterface( CodeInterface, - StellarEvolutionInterface, + se.StellarEvolutionInterface, LiteratureReferencesMixIn, ): """ @@ -42,14 +42,13 @@ def teststar(mass_in="d", time_in="d"): # high level interface class -class Metisse(InCodeComponentImplementation): +class Metisse(se.StellarEvolution): + __interface__ = MetisseInterface def __init__(self, **options): - InCodeComponentImplementation.__init__( - self, - MetisseInterface(**options), - **options - ) + # self.stopping_conditions = StoppingConditions(self) + # self.stopping_conditions.supernova_detection = code.StoppingCondition('supernova_detection') + se.StellarEvolution.__init__(self, MetisseInterface(**options), **options) # the definition of the state model of the code def define_state(self, handler): @@ -79,44 +78,18 @@ def define_parameters(self, handler): pass def define_particle_sets(self, handler): - handler.define_inmemory_set("particles", MetisseParticles) - - handler.add_attribute( - "particles", - "time_step", - "get_time_step", - ( - "stellar_type", - "initial_mass", - "age", - "mass", - "main_sequence_lifetime", - "epoch", - ), - ) + handler.define_set("particles", "index_of_the_star") + handler.set_new("particles", "new_particle") + handler.set_delete("particles", "delete_star") + + handler.add_getter("particles", "mass", "get_mass", names=("mass",)) + handler.add_getter("particles", "radius", "get_radius", names=("radius",)) + handler.add_getter("particles", "luminosity", "get_luminosity", names=("luminosity",)) + handler.add_getter("particles", "age", "get_age", names=("age",)) + handler.add_getter("particles", "stellar_type", "get_stellar_type", names=("stellar_type",)) + handler.add_getter("particles", "temperature", "get_temperature", names=("temperature",)) + handler.add_getter("particles", "time_step", "get_time_step", names=("time_step",)) - handler.add_attribute( - "particles", - "mass_loss_wind", - "get_mass_loss_wind", - ("stellar_type", "luminosity", "radius", "mass", "CO_core_mass"), - ) - - handler.add_attribute( - "particles", - "gyration_radius", - "get_gyration_radius", - ( - "stellar_type", - "initial_mass", - "mass", - "radius", - "luminosity", - "epoch", - "main_sequence_lifetime", - "age", - ), - ) class MetisseParticles(Particles): diff --git a/src/amuse/community/metisse/storage.f90 b/src/amuse/community/metisse/storage.f90 new file mode 100644 index 0000000000..5220c91ad5 --- /dev/null +++ b/src/amuse/community/metisse/storage.f90 @@ -0,0 +1,376 @@ +module store_stars + use iso_c_binding + implicit none + + type, public :: star + private + integer :: id + real(c_double) :: mass + real(c_double) :: age + real(c_double) :: initial_mass + real(c_double) :: time_step + real(c_double) :: luminosity + real(c_double) :: temperature + real(c_double) :: metallicity + real(c_double) :: radius + integer :: stellar_type + end type star + + type, public :: stars + private + type(star), allocatable :: star_array(:) + integer :: num_stars = 0 ! number of stars in the system + integer :: next_star_id = 1 ! the id of the next star, should only ever increase + contains + procedure, public :: new_star + procedure, public :: remove_star + procedure, private :: resize + procedure, private :: lookup_star_id + procedure, private :: get_property_double + procedure, private :: get_property_int + procedure, public :: get_mass + procedure, public :: get_radius + procedure, public :: get_age + procedure, public :: get_time_step + procedure, public :: get_temperature + procedure, public :: get_luminosity + procedure, public :: get_stellar_type + procedure, public :: get_metallicity + procedure, public :: get_number_of_stars + + end type stars + +contains + + subroutine get_number_of_stars(self, number_of_stars) + class(stars), intent(in) :: self + integer, intent(out) :: number_of_stars + number_of_stars = self%num_stars + end subroutine + + function new_star(self, initial_mass) result(new_id) + class(stars), intent(inout) :: self + real(c_double), intent(in) :: initial_mass + integer :: new_id + integer :: i + + self%num_stars = self%num_stars + 1 + call self%resize(self%num_stars) + i = self%num_stars + new_id = self%next_star_id + + self%star_array(i)%id = new_id + self%star_array(i)%mass = initial_mass + self%star_array(i)%age = 0.0_c_double + self%star_array(i)%initial_mass = initial_mass + self%star_array(i)%time_step = 0.0_c_double + self%star_array(i)%luminosity = 0.0_c_double + self%star_array(i)%temperature = 0.0_c_double + self%star_array(i)%metallicity = 0.0_c_double + self%star_array(i)%radius = 0.0_c_double + self%star_array(i)%stellar_type = 0 + + self%next_star_id = new_id + 1 + + write(*,*) "new star with id: ", new_id + write(*,*) "mass: ", initial_mass + + end function new_star + + subroutine remove_star(self, id) + class(stars), intent(inout) :: self + integer, intent(in) :: id + + integer :: i + + write(*,*) "removing star with id: ", id + do i = 1, self%num_stars + write(*,*) "i: ", i, " id: ", self%star_array(i)%id + if (self%star_array(i)%id == id) then + write(*,*) "removing star with id: ", id + if (i /= self%num_stars) then + self%star_array(i:self%num_stars-1) = self%star_array(i+1:self%num_stars) + end if + self%num_stars = self%num_stars - 1 + write(*,*) "num_stars: ", self%num_stars + call self%resize(self%num_stars) + exit + end if + end do + + end subroutine remove_star + + subroutine resize(self, new_size) + class(stars), intent(inout) :: self + integer, intent(in) :: new_size + type(star), allocatable :: temp(:) + integer :: current_size, new_capacity + + if (new_size <= 0) then + self%num_stars = 0 + if (allocated(self%star_array)) deallocate(self%star_array) + return + end if + + current_size = self%num_stars + new_capacity = max(100, int((1.1 ** ceiling(log10(real(new_size)))) * 10)) + + if (.not. allocated(self%star_array) .or. new_capacity > size(self%star_array)) then + if (allocated(self%star_array)) then + allocate(temp(current_size)) + temp = self%star_array + deallocate(self%star_array) + end if + allocate(self%star_array(new_capacity)) + if (allocated(temp)) then + self%star_array(1:current_size) = temp + deallocate(temp) + end if + end if + + end subroutine resize + + function lookup_star_id(self, id) result(index_of_the_star) + class(stars), intent(in) :: self + integer, intent(in) :: id + integer :: index_of_the_star + integer :: i + + do i = 1, self%num_stars + if (self%star_array(i)%id == id) then + index_of_the_star = i + return + end if + end do + + index_of_the_star = 0 + end function + + ! Getters for all the stellar properties + subroutine get_property_double(self, id, property_name, value, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + character(len=*), intent(in) :: property_name + real(c_double), intent(out) :: value + integer :: i, error + + i = lookup_star_id(self, id) + if (i == 0) then + value = 0.0_c_double + error = -1 ! star not found + return + end if + + select case (trim(property_name)) + case ('mass') + value = self%star_array(i)%mass + case ('age') + value = self%star_array(i)%age + case ('luminosity') + value = self%star_array(i)%luminosity + case ('temperature') + value = self%star_array(i)%temperature + case ('time_step') + value = self%star_array(i)%time_step + case ('metallicity') + value = self%star_array(i)%metallicity + case ('radius') + value = self%star_array(i)%radius + case default + value = 0.0_c_double + error = -2 ! property not found + end select + error = 0 + write(*,*) "get_property_double: ", id, property_name, value + end subroutine get_property_double + + subroutine get_property_int(self, id, property_name, value, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + character(len=*), intent(in) :: property_name + integer, intent(out) :: value + integer :: i, error + + i = lookup_star_id(self, id) + if (i == 0) then + value = 0.0_c_double + error = -1 ! star not found + return + end if + + select case (trim(property_name)) + case ('stellar_type') + value = self%star_array(i)%stellar_type + case default + value = 0 + error = -2 ! property not found + return + end select + error = 0 + write(*,*) "get_property_int: ", id, property_name, value + end subroutine get_property_int + + ! Setters for all the stellar properties + subroutine set_property_double(self, id, property_name, value, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + character(len=*), intent(in) :: property_name + real(c_double), intent(in) :: value + integer :: i, error + + i = lookup_star_id(self, id) + if (i == 0) then + error = -1 ! star not found + return + end if + + select case (trim(property_name)) + case ('mass') + self%star_array(i)%mass = value + case ('age') + error = -3 ! not settable + return + case ('luminosity') + error = -3 ! not settable + return + case ('temperature') + error = -3 ! not settable + return + case ('time_step') + self%star_array(i)%time_step = value + case ('metallicity') + if (self%star_array(i)%age > 0.0_c_double) then + error = -4 ! not settable after having evolved + return + else + self%star_array(i)%metallicity = value + end if + case ('radius') + error = -3 ! not settable + return + case default + error = -2 ! property not found + return + end select + error = 0 + write(*,*) "set_property_double: ", id, property_name, value + end subroutine + + subroutine set_property_int(self, id, property_name, value, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + character(len=*), intent(in) :: property_name + integer, intent(in) :: value + integer :: i, error + + i = lookup_star_id(self, id) + if (i == 0) then + error = -1 ! star not found + return + end if + + select case (trim(property_name)) + case ('stellar_type') + error = -3 ! not settable + return + case default + error = -2 ! property not found + return + end select + error = 0 + write(*,*) "set_property_int: ", id, property_name, value + end subroutine + + + ! getters for all the stellar properties + subroutine get_mass(self, id, mass, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: mass + integer :: error + call get_property_double(self, id, 'mass', mass, error) + end subroutine + + subroutine get_radius(self, id, radius, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: radius + integer :: error + call get_property_double(self, id, 'radius', radius, error) + end subroutine + + subroutine get_age(self, id, age, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: age + integer :: error + call get_property_double(self, id, 'age', age, error) + end subroutine + + subroutine get_luminosity(self, id, luminosity, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: luminosity + integer :: error + call get_property_double(self, id, 'luminosity', luminosity, error) + end subroutine + + subroutine get_temperature(self, id, temperature, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: temperature + integer :: error + call get_property_double(self, id, 'temperature', temperature, error) + end subroutine + + subroutine get_time_step(self, id, time_step, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: time_step + integer :: error + call get_property_double(self, id, 'time_step', time_step, error) + end subroutine + + subroutine get_metallicity(self, id, metallicity, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: metallicity + integer :: error + call get_property_double(self, id, 'metallicity', metallicity, error) + end subroutine + + subroutine get_stellar_type(self, id, stellar_type, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + integer, intent(out) :: stellar_type + integer :: error + call get_property_int(self, id, 'stellar_type', stellar_type, error) + end subroutine + + ! setters for all the stellar properties that are settable + subroutine set_mass(self, id, mass, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: mass + integer :: error + call set_property_double(self, id, 'mass', mass, error) + end subroutine + + subroutine set_metallicity(self, id, metallicity, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: metallicity + integer :: error + call set_property_double(self, id, 'metallicity', metallicity, error) + end subroutine + + subroutine set_time_step(self, id, time_step, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: time_step + integer :: error + call set_property_double(self, id, 'time_step', time_step, error) + end subroutine + + +end module store_stars diff --git a/src/amuse/community/metisse/test_storage.f90 b/src/amuse/community/metisse/test_storage.f90 new file mode 100644 index 0000000000..4dd8a0c45b --- /dev/null +++ b/src/amuse/community/metisse/test_storage.f90 @@ -0,0 +1,43 @@ +program test_store_stars + use iso_c_binding + use store_stars + implicit none + + type(stars) :: star_system + integer :: new_ids(2) + integer :: ids_to_remove(2) + integer :: i, number_of_stars + + ! Test new_star + new_ids(1) = star_system%new_star(1.0_c_double) + new_ids(2) = star_system%new_star(2.0_c_double) + if (new_ids(1) /= 1 .or. new_ids(2) /= 2) then + error stop "new_star failed" + end if + + ! Test remove_star + ids_to_remove = [1, 2] + call star_system%remove_star(ids_to_remove(1)) + call star_system%remove_star(ids_to_remove(2)) + call star_system%get_number_of_stars(number_of_stars) + if (number_of_stars /= 0) then + write (*, *) "Number of stars: ", number_of_stars + error stop "remove_star failed" + end if + + ! Test edge cases + new_ids = star_system%new_star(1.0_c_double) + if (new_ids(1) /= 1) then + error stop "new_star failed with single star" + end if + + ids_to_remove(1) = 1 + call star_system%remove_star(ids_to_remove(1)) + call star_system%get_number_of_stars(number_of_stars) + if (number_of_stars /= 0) then + error stop "remove_star failed with single star" + end if + + write (*, *) "All tests passed" + +end program test_store_stars From 9c7de5a5fab1363c312780e35316254138c505d0 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 17 Feb 2025 10:03:28 +0100 Subject: [PATCH 08/40] Interface updates --- src/amuse/community/metisse/interface.py | 94 +++++++++++++------ src/amuse/community/metisse/storage.f90 | 114 +++++++++++++++++++++++ 2 files changed, 179 insertions(+), 29 deletions(-) diff --git a/src/amuse/community/metisse/interface.py b/src/amuse/community/metisse/interface.py index c021d4cd80..703bc364ae 100644 --- a/src/amuse/community/metisse/interface.py +++ b/src/amuse/community/metisse/interface.py @@ -1,12 +1,12 @@ """ Interface for metisse """ + from amuse.community import ( CodeInterface, - InCodeComponentImplementation, - LegacyFunctionSpecification, LiteratureReferencesMixIn, - legacy_function, + # LegacyFunctionSpecification, + # legacy_function, remote_function, ) from amuse.community.interface import se @@ -21,7 +21,7 @@ class MetisseInterface( LiteratureReferencesMixIn, ): """ - Low level interface for metisse + Low level interface for METISSE Details in publication: .. [#] Agrawal, P. et al. 202x @@ -31,18 +31,15 @@ class MetisseInterface( def __init__(self, **keyword_arguments): CodeInterface.__init__( - self, - name_of_the_worker="metisse_worker", - **keyword_arguments - ) - - @remote_function - def teststar(mass_in="d", time_in="d"): - returns (mass_out="d", result="i") - + self, name_of_the_worker="metisse_worker", **keyword_arguments + ) + LiteratureReferencesMixIn.__init__(self) # high level interface class class Metisse(se.StellarEvolution): + """ + High level interface for METISSE + """ __interface__ = MetisseInterface def __init__(self, **options): @@ -50,7 +47,7 @@ def __init__(self, **options): # self.stopping_conditions.supernova_detection = code.StoppingCondition('supernova_detection') se.StellarEvolution.__init__(self, MetisseInterface(**options), **options) -# the definition of the state model of the code + # the definition of the state model of the code def define_state(self, handler): # for example: # handler.set_initial_state("UNINITIALIZED") @@ -61,12 +58,12 @@ def define_state(self, handler): # handler.add_method("STOPPED", "stop") pass -# the definition of any properties + # the definition of any properties def define_properties(self, handler): # handler.add_property("name_of_the_getter", public_name="name_of_the_property") pass -# the definition of the parameters + # the definition of the parameters def define_parameters(self, handler): # handler.add_method_parameter( # "name_of_the_getter", @@ -84,12 +81,51 @@ def define_particle_sets(self, handler): handler.add_getter("particles", "mass", "get_mass", names=("mass",)) handler.add_getter("particles", "radius", "get_radius", names=("radius",)) - handler.add_getter("particles", "luminosity", "get_luminosity", names=("luminosity",)) handler.add_getter("particles", "age", "get_age", names=("age",)) - handler.add_getter("particles", "stellar_type", "get_stellar_type", names=("stellar_type",)) - handler.add_getter("particles", "temperature", "get_temperature", names=("temperature",)) - handler.add_getter("particles", "time_step", "get_time_step", names=("time_step",)) - + handler.add_getter( + "particles", "time_step", "get_time_step", names=("time_step",) + ) + handler.add_getter( + "particles", "temperature", "get_temperature", names=("temperature",) + ) + handler.add_getter( + "particles", "luminosity", "get_luminosity", names=("luminosity",) + ) + handler.add_getter( + "particles", "stellar_type", "get_stellar_type", names=("stellar_type",) + ) + handler.add_getter("particles", "spin", "get_spin", names=("spin",)) + handler.add_getter("particles", "epoch", "get_epoch", names=("epoch",)) + handler.add_getter( + "particles", + "main_sequence_lifetime", + "get_main_sequence_lifetime", + names=("main_sequence_lifetime",), + ) + handler.add_getter( + "particles", "core_mass", "get_core_mass", names=("core_mass",) + ) + handler.add_getter( + "particles", "CO_core_mass", "get_CO_core_mass", names=("CO_core_mass",) + ) + handler.add_getter( + "particles", "core_radius", "get_core_radius", names=("core_radius",) + ) + handler.add_getter( + "particles", + "convective_envelope_mass", + "get_convective_envelope_mass", + names=("convective_envelope_mass",), + ) + handler.add_getter( + "particles", + "convective_envelope_radius", + "get_convective_envelope_radius", + names=("convective_envelope_radius",), + ) + handler.add_getter( + "particles", "initial_mass", "get_initial_mass", names=("initial_mass",) + ) class MetisseParticles(Particles): @@ -106,17 +142,13 @@ def __init__(self, code_interface, storage=None): "evolve_one_step", self.particleset_evolve_one_step, self.evolve_one_step ) self.add_function_attribute( - "evolve_for", - self.particleset_evolve_for, - self.evolve_for + "evolve_for", self.particleset_evolve_for, self.evolve_for ) def calculate_effective_temperature(self, luminosity, radius): return ( (luminosity / (constants.four_pi_stefan_boltzmann * radius**2)) ** 0.25 - ).in_( - units.K - ) + ).in_(units.K) def add_particles_to_store(self, keys, attributes=[], values=[]): if len(keys) == 0: @@ -170,10 +202,14 @@ def particleset_evolve_one_step(self, particles): ) def evolve_for(self, particles, subset, delta_time): - self._private.code_interface._evolve_particles(subset.as_set(), subset.age + delta_time) + self._private.code_interface._evolve_particles( + subset.as_set(), subset.age + delta_time + ) def particleset_evolve_for(self, particles, delta_time): - self._private.code_interface._evolve_particles(particles, particles.age + delta_time) + self._private.code_interface._evolve_particles( + particles, particles.age + delta_time + ) def get_defined_attribute_names(self): return ["mass", "radius"] diff --git a/src/amuse/community/metisse/storage.f90 b/src/amuse/community/metisse/storage.f90 index 5220c91ad5..efc5e218f1 100644 --- a/src/amuse/community/metisse/storage.f90 +++ b/src/amuse/community/metisse/storage.f90 @@ -13,6 +13,14 @@ module store_stars real(c_double) :: temperature real(c_double) :: metallicity real(c_double) :: radius + real(c_double) :: core_mass + real(c_double) :: CO_core_mass + real(c_double) :: core_radius + real(c_double) :: convective_envelope_mass + real(c_double) :: convective_envelope_radius + real(c_double) :: epoch + real(c_double) :: spin + real(c_double) :: main_sequence_lifetime integer :: stellar_type end type star @@ -36,6 +44,15 @@ module store_stars procedure, public :: get_luminosity procedure, public :: get_stellar_type procedure, public :: get_metallicity + procedure, public :: get_spin + procedure, public :: get_epoch + procedure, public :: get_main_sequence_lifetime + procedure, public :: get_core_mass + procedure, public :: get_core_radius + procedure, public :: get_CO_core_mass + procedure, public :: get_convective_envelope_mass + procedure, public :: get_convective_envelope_radius + procedure, public :: get_initial_mass procedure, public :: get_number_of_stars end type stars @@ -68,6 +85,15 @@ function new_star(self, initial_mass) result(new_id) self%star_array(i)%temperature = 0.0_c_double self%star_array(i)%metallicity = 0.0_c_double self%star_array(i)%radius = 0.0_c_double + self%star_array(i)%core_mass = 0.0_c_double + self%star_array(i)%CO_core_mass = 0.0_c_double + self%star_array(i)%core_radius = 0.0_c_double + self%star_array(i)%convective_envelope_mass = 0.0_c_double + self%star_array(i)%convective_envelope_radius = 0.0_c_double + self%star_array(i)%epoch = 0.0_c_double + self%star_array(i)%spin = 0.0_c_double + self%star_array(i)%main_sequence_lifetime = 0.0_c_double + self%star_array(i)%stellar_type = 0 self%next_star_id = new_id + 1 @@ -176,6 +202,22 @@ subroutine get_property_double(self, id, property_name, value, error) value = self%star_array(i)%metallicity case ('radius') value = self%star_array(i)%radius + case ('core_mass') + value = self%star_array(i)%core_mass + case ('CO_core_mass') + value = self%star_array(i)%CO_core_mass + case ('core_radius') + value = self%star_array(i)%core_radius + case ('convective_envelope_mass') + value = self%star_array(i)%convective_envelope_mass + case ('convective_envelope_radius') + value = self%star_array(i)%convective_envelope_radius + case ('epoch') + value = self%star_array(i)%epoch + case ('spin') + value = self%star_array(i)%spin + case ('main_sequence_lifetime') + value = self%star_array(i)%main_sequence_lifetime case default value = 0.0_c_double error = -2 ! property not found @@ -339,6 +381,62 @@ subroutine get_metallicity(self, id, metallicity, error) call get_property_double(self, id, 'metallicity', metallicity, error) end subroutine + subroutine get_spin(self, id, spin, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: spin + integer :: error + call get_property_double(self, id, 'spin', spin, error) + end subroutine + + subroutine get_epoch(self, id, epoch, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: epoch + integer :: error + call get_property_double(self, id, 'epoch', epoch, error) + end subroutine + + subroutine get_core_mass(self, id, core_mass, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: core_mass + integer :: error + call get_property_double(self, id, 'core_mass', core_mass, error) + end subroutine + + subroutine get_core_radius(self, id, core_radius, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: core_radius + integer :: error + call get_property_double(self, id, 'core_radius', core_radius, error) + end subroutine + + subroutine get_CO_core_mass(self, id, CO_core_mass, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: CO_core_mass + integer :: error + call get_property_double(self, id, 'CO_core_mass', CO_core_mass, error) + end subroutine + + subroutine get_convective_envelope_mass(self, id, convective_envelope_mass, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: convective_envelope_mass + integer :: error + call get_property_double(self, id, 'convective_envelope_mass', convective_envelope_mass, error) + end subroutine + + subroutine get_convective_envelope_radius(self, id, convective_envelope_radius, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: convective_envelope_radius + integer :: error + call get_property_double(self, id, 'convective_envelope_radius', convective_envelope_radius, error) + end subroutine + subroutine get_stellar_type(self, id, stellar_type, error) class(stars), intent(in) :: self integer, intent(in) :: id @@ -347,6 +445,22 @@ subroutine get_stellar_type(self, id, stellar_type, error) call get_property_int(self, id, 'stellar_type', stellar_type, error) end subroutine + subroutine get_initial_mass(self, id, initial_mass, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: initial_mass + integer :: error + call get_property_double(self, id, 'initial_mass', initial_mass, error) + end subroutine + + subroutine get_main_sequence_lifetime(self, id, main_sequence_lifetime, error) + class(stars), intent(in) :: self + integer, intent(in) :: id + real(c_double), intent(out) :: main_sequence_lifetime + integer :: error + call get_property_double(self, id, 'main_sequence_lifetime', main_sequence_lifetime, error) + end subroutine + ! setters for all the stellar properties that are settable subroutine set_mass(self, id, mass, error) class(stars), intent(inout) :: self From 22569607fb66f9ca5b1045fdf84e506f6bd8bc74 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 17 Feb 2025 11:13:04 +0100 Subject: [PATCH 09/40] add getters/setters --- src/amuse/community/metisse/interface.f90 | 8 + src/amuse/community/metisse/interface.py | 5 + src/amuse/community/metisse/storage.f90 | 390 +++++++++++++++------- 3 files changed, 283 insertions(+), 120 deletions(-) diff --git a/src/amuse/community/metisse/interface.f90 b/src/amuse/community/metisse/interface.f90 index ca1da5966c..7c76c875a1 100644 --- a/src/amuse/community/metisse/interface.f90 +++ b/src/amuse/community/metisse/interface.f90 @@ -144,6 +144,14 @@ function get_time_step(index_of_the_star, time_step) integer :: get_time_step call star_system%get_time_step(index_of_the_star, time_step, get_time_step) end function + + function get_initial_mass(index_of_the_star, mass) + implicit none + integer :: index_of_the_star + double precision :: mass + integer :: get_initial_mass + call star_system%get_initial_mass(index_of_the_star, mass, get_initial_mass) + end function function initialize_code() implicit none diff --git a/src/amuse/community/metisse/interface.py b/src/amuse/community/metisse/interface.py index 703bc364ae..4044417b6e 100644 --- a/src/amuse/community/metisse/interface.py +++ b/src/amuse/community/metisse/interface.py @@ -35,6 +35,11 @@ def __init__(self, **keyword_arguments): ) LiteratureReferencesMixIn.__init__(self) + @remote_function + def get_initial_mass(index_of_the_star="i"): + returns (mass="d" | units.julianyr) + + # high level interface class class Metisse(se.StellarEvolution): """ diff --git a/src/amuse/community/metisse/storage.f90 b/src/amuse/community/metisse/storage.f90 index efc5e218f1..78f33cae29 100644 --- a/src/amuse/community/metisse/storage.f90 +++ b/src/amuse/community/metisse/storage.f90 @@ -1,3 +1,10 @@ +! Storage module for stars in a stellar evolution code. +! Only used for storing and retrieving, has no checks/calculations. +! +! +! +! - Steven Rieder + module store_stars use iso_c_binding implicit none @@ -5,22 +12,22 @@ module store_stars type, public :: star private integer :: id - real(c_double) :: mass real(c_double) :: age - real(c_double) :: initial_mass - real(c_double) :: time_step - real(c_double) :: luminosity - real(c_double) :: temperature - real(c_double) :: metallicity - real(c_double) :: radius - real(c_double) :: core_mass real(c_double) :: CO_core_mass + real(c_double) :: core_mass real(c_double) :: core_radius real(c_double) :: convective_envelope_mass real(c_double) :: convective_envelope_radius real(c_double) :: epoch - real(c_double) :: spin + real(c_double) :: initial_mass + real(c_double) :: luminosity real(c_double) :: main_sequence_lifetime + real(c_double) :: mass + real(c_double) :: metallicity + real(c_double) :: radius + real(c_double) :: spin + real(c_double) :: temperature + real(c_double) :: time_step integer :: stellar_type end type star @@ -34,27 +41,51 @@ module store_stars procedure, public :: remove_star procedure, private :: resize procedure, private :: lookup_star_id + procedure, private :: get_property_double procedure, private :: get_property_int - procedure, public :: get_mass - procedure, public :: get_radius + procedure, private :: set_property_double + procedure, private :: set_property_int + + procedure, public :: get_number_of_stars + + ! Every property has a public getter and a setter, listed alphabetically here. + ! 'id' is only used internally, so it is not exposed. procedure, public :: get_age - procedure, public :: get_time_step - procedure, public :: get_temperature - procedure, public :: get_luminosity - procedure, public :: get_stellar_type - procedure, public :: get_metallicity - procedure, public :: get_spin - procedure, public :: get_epoch - procedure, public :: get_main_sequence_lifetime + procedure, public :: get_CO_core_mass procedure, public :: get_core_mass procedure, public :: get_core_radius - procedure, public :: get_CO_core_mass procedure, public :: get_convective_envelope_mass procedure, public :: get_convective_envelope_radius + procedure, public :: get_epoch procedure, public :: get_initial_mass - procedure, public :: get_number_of_stars - + procedure, public :: get_luminosity + procedure, public :: get_main_sequence_lifetime + procedure, public :: get_mass + procedure, public :: get_metallicity + procedure, public :: get_radius + procedure, public :: get_spin + procedure, public :: get_stellar_type + procedure, public :: get_temperature + procedure, public :: get_time_step + + procedure, public :: set_age + procedure, public :: set_CO_core_mass + procedure, public :: set_core_mass + procedure, public :: set_core_radius + procedure, public :: set_convective_envelope_mass + procedure, public :: set_convective_envelope_radius + procedure, public :: set_epoch + procedure, public :: set_initial_mass + procedure, public :: set_luminosity + procedure, public :: set_main_sequence_lifetime + procedure, public :: set_mass + procedure, public :: set_metallicity + procedure, public :: set_radius + procedure, public :: set_spin + procedure, public :: set_stellar_type + procedure, public :: set_temperature + procedure, public :: set_time_step end type stars contains @@ -77,24 +108,24 @@ function new_star(self, initial_mass) result(new_id) new_id = self%next_star_id self%star_array(i)%id = new_id - self%star_array(i)%mass = initial_mass + self%star_array(i)%age = 0.0_c_double - self%star_array(i)%initial_mass = initial_mass - self%star_array(i)%time_step = 0.0_c_double - self%star_array(i)%luminosity = 0.0_c_double - self%star_array(i)%temperature = 0.0_c_double - self%star_array(i)%metallicity = 0.0_c_double - self%star_array(i)%radius = 0.0_c_double - self%star_array(i)%core_mass = 0.0_c_double self%star_array(i)%CO_core_mass = 0.0_c_double + self%star_array(i)%core_mass = 0.0_c_double self%star_array(i)%core_radius = 0.0_c_double self%star_array(i)%convective_envelope_mass = 0.0_c_double self%star_array(i)%convective_envelope_radius = 0.0_c_double self%star_array(i)%epoch = 0.0_c_double - self%star_array(i)%spin = 0.0_c_double + self%star_array(i)%initial_mass = initial_mass + self%star_array(i)%luminosity = 0.0_c_double self%star_array(i)%main_sequence_lifetime = 0.0_c_double - + self%star_array(i)%mass = initial_mass + self%star_array(i)%metallicity = 0.0_c_double + self%star_array(i)%radius = 0.0_c_double + self%star_array(i)%spin = 0.0_c_double self%star_array(i)%stellar_type = 0 + self%star_array(i)%time_step = 0.0_c_double + self%star_array(i)%temperature = 0.0_c_double self%next_star_id = new_id + 1 @@ -188,24 +219,12 @@ subroutine get_property_double(self, id, property_name, value, error) end if select case (trim(property_name)) - case ('mass') - value = self%star_array(i)%mass case ('age') value = self%star_array(i)%age - case ('luminosity') - value = self%star_array(i)%luminosity - case ('temperature') - value = self%star_array(i)%temperature - case ('time_step') - value = self%star_array(i)%time_step - case ('metallicity') - value = self%star_array(i)%metallicity - case ('radius') - value = self%star_array(i)%radius - case ('core_mass') - value = self%star_array(i)%core_mass case ('CO_core_mass') value = self%star_array(i)%CO_core_mass + case ('core_mass') + value = self%star_array(i)%core_mass case ('core_radius') value = self%star_array(i)%core_radius case ('convective_envelope_mass') @@ -214,10 +233,24 @@ subroutine get_property_double(self, id, property_name, value, error) value = self%star_array(i)%convective_envelope_radius case ('epoch') value = self%star_array(i)%epoch - case ('spin') - value = self%star_array(i)%spin + case ('initial_mass') + value = self%star_array(i)%initial_mass + case ('luminosity') + value = self%star_array(i)%luminosity case ('main_sequence_lifetime') value = self%star_array(i)%main_sequence_lifetime + case ('mass') + value = self%star_array(i)%mass + case ('metallicity') + value = self%star_array(i)%metallicity + case ('radius') + value = self%star_array(i)%radius + case ('spin') + value = self%star_array(i)%spin + case ('temperature') + value = self%star_array(i)%temperature + case ('time_step') + value = self%star_array(i)%time_step case default value = 0.0_c_double error = -2 ! property not found @@ -267,35 +300,41 @@ subroutine set_property_double(self, id, property_name, value, error) end if select case (trim(property_name)) + case ('age') + self%star_array(i)%age = value + case ('CO_core_mass') + self%star_array(i)%CO_core_mass = value + case ('core_mass') + self%star_array(i)%core_mass = value + case ('core_radius') + self%star_array(i)%core_radius = value + case ('convective_envelope_mass') + self%star_array(i)%convective_envelope_mass = value + case ('convective_envelope_radius') + self%star_array(i)%convective_envelope_radius = value + case ('epoch') + self%star_array(i)%epoch = value + case ('initial_mass') + self%star_array(i)%initial_mass = value + case ('main_sequence_lifetime') + self%star_array(i)%main_sequence_lifetime = value case ('mass') self%star_array(i)%mass = value - case ('age') - error = -3 ! not settable - return - case ('luminosity') - error = -3 ! not settable - return + case ('metallicity') + self%star_array(i)%metallicity = value + case ('radius') + self%star_array(i)%radius = value + case ('spin') + self%star_array(i)%spin = value case ('temperature') - error = -3 ! not settable - return + self%star_array(i)%temperature = value case ('time_step') self%star_array(i)%time_step = value - case ('metallicity') - if (self%star_array(i)%age > 0.0_c_double) then - error = -4 ! not settable after having evolved - return - else - self%star_array(i)%metallicity = value - end if - case ('radius') - error = -3 ! not settable - return case default error = -2 ! property not found return end select error = 0 - write(*,*) "set_property_double: ", id, property_name, value end subroutine subroutine set_property_int(self, id, property_name, value, error) @@ -313,128 +352,126 @@ subroutine set_property_int(self, id, property_name, value, error) select case (trim(property_name)) case ('stellar_type') - error = -3 ! not settable - return + self%star_array(i)%stellar_type = value case default error = -2 ! property not found return end select error = 0 - write(*,*) "set_property_int: ", id, property_name, value end subroutine ! getters for all the stellar properties - subroutine get_mass(self, id, mass, error) + subroutine get_age(self, id, age, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: mass + real(c_double), intent(out) :: age integer :: error - call get_property_double(self, id, 'mass', mass, error) + call get_property_double(self, id, 'age', age, error) end subroutine - subroutine get_radius(self, id, radius, error) + subroutine get_CO_core_mass(self, id, CO_core_mass, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: radius + real(c_double), intent(out) :: CO_core_mass integer :: error - call get_property_double(self, id, 'radius', radius, error) + call get_property_double(self, id, 'CO_core_mass', CO_core_mass, error) end subroutine - subroutine get_age(self, id, age, error) + subroutine get_core_mass(self, id, core_mass, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: age + real(c_double), intent(out) :: core_mass integer :: error - call get_property_double(self, id, 'age', age, error) + call get_property_double(self, id, 'core_mass', core_mass, error) end subroutine - subroutine get_luminosity(self, id, luminosity, error) + subroutine get_core_radius(self, id, core_radius, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: luminosity + real(c_double), intent(out) :: core_radius integer :: error - call get_property_double(self, id, 'luminosity', luminosity, error) + call get_property_double(self, id, 'core_radius', core_radius, error) end subroutine - subroutine get_temperature(self, id, temperature, error) + subroutine get_convective_envelope_mass(self, id, convective_envelope_mass, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: temperature + real(c_double), intent(out) :: convective_envelope_mass integer :: error - call get_property_double(self, id, 'temperature', temperature, error) + call get_property_double(self, id, 'convective_envelope_mass', convective_envelope_mass, error) end subroutine - subroutine get_time_step(self, id, time_step, error) + subroutine get_convective_envelope_radius(self, id, convective_envelope_radius, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: time_step + real(c_double), intent(out) :: convective_envelope_radius integer :: error - call get_property_double(self, id, 'time_step', time_step, error) + call get_property_double(self, id, 'convective_envelope_radius', convective_envelope_radius, error) end subroutine - subroutine get_metallicity(self, id, metallicity, error) + subroutine get_epoch(self, id, epoch, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: metallicity + real(c_double), intent(out) :: epoch integer :: error - call get_property_double(self, id, 'metallicity', metallicity, error) + call get_property_double(self, id, 'epoch', epoch, error) end subroutine - subroutine get_spin(self, id, spin, error) + subroutine get_initial_mass(self, id, initial_mass, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: spin + real(c_double), intent(out) :: initial_mass integer :: error - call get_property_double(self, id, 'spin', spin, error) + call get_property_double(self, id, 'initial_mass', initial_mass, error) end subroutine - subroutine get_epoch(self, id, epoch, error) + subroutine get_luminosity(self, id, luminosity, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: epoch + real(c_double), intent(out) :: luminosity integer :: error - call get_property_double(self, id, 'epoch', epoch, error) + call get_property_double(self, id, 'luminosity', luminosity, error) end subroutine - subroutine get_core_mass(self, id, core_mass, error) + subroutine get_main_sequence_lifetime(self, id, main_sequence_lifetime, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: core_mass + real(c_double), intent(out) :: main_sequence_lifetime integer :: error - call get_property_double(self, id, 'core_mass', core_mass, error) + call get_property_double(self, id, 'main_sequence_lifetime', main_sequence_lifetime, error) end subroutine - subroutine get_core_radius(self, id, core_radius, error) + subroutine get_mass(self, id, mass, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: core_radius + real(c_double), intent(out) :: mass integer :: error - call get_property_double(self, id, 'core_radius', core_radius, error) + call get_property_double(self, id, 'mass', mass, error) end subroutine - subroutine get_CO_core_mass(self, id, CO_core_mass, error) + subroutine get_metallicity(self, id, metallicity, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: CO_core_mass + real(c_double), intent(out) :: metallicity integer :: error - call get_property_double(self, id, 'CO_core_mass', CO_core_mass, error) + call get_property_double(self, id, 'metallicity', metallicity, error) end subroutine - subroutine get_convective_envelope_mass(self, id, convective_envelope_mass, error) + subroutine get_radius(self, id, radius, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: convective_envelope_mass + real(c_double), intent(out) :: radius integer :: error - call get_property_double(self, id, 'convective_envelope_mass', convective_envelope_mass, error) + call get_property_double(self, id, 'radius', radius, error) end subroutine - subroutine get_convective_envelope_radius(self, id, convective_envelope_radius, error) + subroutine get_spin(self, id, spin, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: convective_envelope_radius + real(c_double), intent(out) :: spin integer :: error - call get_property_double(self, id, 'convective_envelope_radius', convective_envelope_radius, error) + call get_property_double(self, id, 'spin', spin, error) end subroutine subroutine get_stellar_type(self, id, stellar_type, error) @@ -445,23 +482,105 @@ subroutine get_stellar_type(self, id, stellar_type, error) call get_property_int(self, id, 'stellar_type', stellar_type, error) end subroutine - subroutine get_initial_mass(self, id, initial_mass, error) + subroutine get_temperature(self, id, temperature, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: initial_mass + real(c_double), intent(out) :: temperature integer :: error - call get_property_double(self, id, 'initial_mass', initial_mass, error) + call get_property_double(self, id, 'temperature', temperature, error) end subroutine - subroutine get_main_sequence_lifetime(self, id, main_sequence_lifetime, error) + subroutine get_time_step(self, id, time_step, error) class(stars), intent(in) :: self integer, intent(in) :: id - real(c_double), intent(out) :: main_sequence_lifetime + real(c_double), intent(out) :: time_step integer :: error - call get_property_double(self, id, 'main_sequence_lifetime', main_sequence_lifetime, error) + call get_property_double(self, id, 'time_step', time_step, error) + end subroutine + + + ! setters for all the stellar properties (in the same order as the getters) + + subroutine set_age(self, id, age, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: age + integer :: error + call set_property_double(self, id, 'age', age, error) + end subroutine + + subroutine set_CO_core_mass(self, id, CO_core_mass, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: CO_core_mass + integer :: error + call set_property_double(self, id, 'CO_core_mass', CO_core_mass, error) + end subroutine + + subroutine set_core_mass(self, id, core_mass, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: core_mass + integer :: error + call set_property_double(self, id, 'core_mass', core_mass, error) + end subroutine + + subroutine set_core_radius(self, id, core_radius, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: core_radius + integer :: error + call set_property_double(self, id, 'core_radius', core_radius, error) + end subroutine + + subroutine set_convective_envelope_mass(self, id, convective_envelope_mass, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: convective_envelope_mass + integer :: error + call set_property_double(self, id, 'convective_envelope_mass', convective_envelope_mass, error) + end subroutine + + subroutine set_convective_envelope_radius(self, id, convective_envelope_radius, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: convective_envelope_radius + integer :: error + call set_property_double(self, id, 'convective_envelope_radius', convective_envelope_radius, error) + end subroutine + + subroutine set_epoch(self, id, epoch, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: epoch + integer :: error + call set_property_double(self, id, 'epoch', epoch, error) + end subroutine + + subroutine set_initial_mass(self, id, initial_mass, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: initial_mass + integer :: error + call set_property_double(self, id, 'initial_mass', initial_mass, error) + end subroutine + + subroutine set_luminosity(self, id, luminosity, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: luminosity + integer :: error + call set_property_double(self, id, 'luminosity', luminosity, error) + end subroutine + + subroutine set_main_sequence_lifetime(self, id, main_sequence_lifetime, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: main_sequence_lifetime + integer :: error + call set_property_double(self, id, 'main_sequence_lifetime', main_sequence_lifetime, error) end subroutine - ! setters for all the stellar properties that are settable subroutine set_mass(self, id, mass, error) class(stars), intent(inout) :: self integer, intent(in) :: id @@ -478,6 +597,38 @@ subroutine set_metallicity(self, id, metallicity, error) call set_property_double(self, id, 'metallicity', metallicity, error) end subroutine + subroutine set_radius(self, id, radius, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: radius + integer :: error + call set_property_double(self, id, 'radius', radius, error) + end subroutine + + subroutine set_spin(self, id, spin, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: spin + integer :: error + call set_property_double(self, id, 'spin', spin, error) + end subroutine + + subroutine set_stellar_type(self, id, stellar_type, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + integer, intent(in) :: stellar_type + integer :: error + call set_property_int(self, id, 'stellar_type', stellar_type, error) + end subroutine + + subroutine set_temperature(self, id, temperature, error) + class(stars), intent(inout) :: self + integer, intent(in) :: id + real(c_double), intent(in) :: temperature + integer :: error + call set_property_double(self, id, 'temperature', temperature, error) + end subroutine + subroutine set_time_step(self, id, time_step, error) class(stars), intent(inout) :: self integer, intent(in) :: id @@ -486,5 +637,4 @@ subroutine set_time_step(self, id, time_step, error) call set_property_double(self, id, 'time_step', time_step, error) end subroutine - end module store_stars From e70640dd77e8aa29e95e578972f2a2f8727f0afb Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 17 Feb 2025 15:47:03 +0100 Subject: [PATCH 10/40] small updates --- src/amuse/community/metisse/interface.f90 | 28 +++++++++-------------- 1 file changed, 11 insertions(+), 17 deletions(-) diff --git a/src/amuse/community/metisse/interface.f90 b/src/amuse/community/metisse/interface.f90 index 7c76c875a1..62f343386c 100644 --- a/src/amuse/community/metisse/interface.f90 +++ b/src/amuse/community/metisse/interface.f90 @@ -1,38 +1,32 @@ module metisseInterface use iso_c_binding use store_stars, only: stars + use track_support + use z_support implicit none type(stars) :: star_system contains function initialize(error) - use track_support - use z_support implicit none integer :: error integer :: initialize - real(dp) :: zpars(20) + real(c_double) :: zpars(20) - call initialize_front_end('main') + initialize = -1 - initialize = 0 - end function + ! Need to define this front end for METISSE + call initialize_front_end("amuse") + initial_Z = -1.0_c_double - function teststar(mass_in, time, mass_out, error) - use track_support - use z_support - implicit none - real(dp) :: mass_in, mass_out, time - integer :: error - integer :: teststar + call METISSE_zcnsts(initial_Z,zpars,'','', error) + if (error/=0) return - real(dp) :: zpars(20) + write(*,*) "Number of tracks: ", number_of_tracks - call initialize_front_end('main') - !call METISSE_zcnsts(initial_Z,zpars,'','',error) - teststar = 0 + initialize = 0 end function function cleanup_code() From 212d9bd7a603660f7a8f3352b0be6b4b8e1450d8 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 17 Feb 2025 15:47:26 +0100 Subject: [PATCH 11/40] update literature --- src/amuse/community/metisse/interface.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/amuse/community/metisse/interface.py b/src/amuse/community/metisse/interface.py index 4044417b6e..1fe0e70740 100644 --- a/src/amuse/community/metisse/interface.py +++ b/src/amuse/community/metisse/interface.py @@ -24,7 +24,8 @@ class MetisseInterface( Low level interface for METISSE Details in publication: - .. [#] Agrawal, P. et al. 202x + .. [#] Agrawal, P. et al., 2020, https://doi.org/10.1093/mnras/staa2264 + .. [#] Agrawal, P. et al., 2023, https://doi.org/10.1093/mnras/stad2334 """ use_modules = ["metisseInterface"] From 1abca6c9cdc201b567e1339d096fdef9f5aec478 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 17 Feb 2025 16:23:15 +0100 Subject: [PATCH 12/40] Updates --- src/amuse/community/metisse/interface.f90 | 66 ++++++++++++++++++++++- src/amuse/community/metisse/interface.py | 65 +++++++++++++++------- 2 files changed, 112 insertions(+), 19 deletions(-) diff --git a/src/amuse/community/metisse/interface.f90 b/src/amuse/community/metisse/interface.f90 index 62f343386c..7060a183fa 100644 --- a/src/amuse/community/metisse/interface.f90 +++ b/src/amuse/community/metisse/interface.f90 @@ -5,6 +5,7 @@ module metisseInterface use z_support implicit none type(stars) :: star_system + real(c_double), allocatable :: mass_array(:) contains @@ -44,6 +45,13 @@ function commit_parameters() function commit_particles() implicit none integer :: commit_particles + integer :: number_of_particles + integer :: error + + error = get_number_of_particles(number_of_particles) + allocate(mass_array(number_of_particles)) + mass_array = 0.0 + commit_particles=0 end function @@ -99,7 +107,63 @@ function get_metallicity(metallicity) integer :: get_metallicity get_metallicity=0 end function - + + function get_epoch(index_of_the_star, epoch) + implicit none + integer :: index_of_the_star + real(c_double) :: epoch + integer :: get_epoch + call star_system%get_epoch(index_of_the_star, epoch, get_epoch) + end function + + function get_core_mass(index_of_the_star, core_mass) + implicit none + integer :: index_of_the_star + real(c_double) :: core_mass + integer :: get_core_mass + call star_system%get_core_mass(index_of_the_star, core_mass, get_core_mass) + end function + + function get_core_radius(index_of_the_star, core_radius) + implicit none + integer :: index_of_the_star + real(c_double) :: core_radius + integer :: get_core_radius + call star_system%get_core_radius(index_of_the_star, core_radius, get_core_radius) + end function + + function get_convective_envelope_mass(index_of_the_star, convective_envelope_mass) + implicit none + integer :: index_of_the_star + real(c_double) :: convective_envelope_mass + integer :: get_convective_envelope_mass + call star_system%get_convective_envelope_mass(index_of_the_star, convective_envelope_mass, get_convective_envelope_mass) + end function + + function get_convective_envelope_radius(index_of_the_star, convective_envelope_radius) + implicit none + integer :: index_of_the_star + real(c_double) :: convective_envelope_radius + integer :: get_convective_envelope_radius + call star_system%get_convective_envelope_radius(index_of_the_star, convective_envelope_radius, get_convective_envelope_radius) + end function + + function get_CO_core_mass(index_of_the_star, CO_core_mass) + implicit none + integer :: index_of_the_star + real(c_double) :: CO_core_mass + integer :: get_CO_core_mass + call star_system%get_CO_core_mass(index_of_the_star, CO_core_mass, get_CO_core_mass) + end function + + function get_main_sequence_lifetime(index_of_the_star, main_sequence_lifetime) + implicit none + integer :: index_of_the_star + real(c_double) :: main_sequence_lifetime + integer :: get_main_sequence_lifetime + call star_system%get_main_sequence_lifetime(index_of_the_star, main_sequence_lifetime, get_main_sequence_lifetime) + end function + function get_number_of_particles(number_of_particles) implicit none integer :: number_of_particles diff --git a/src/amuse/community/metisse/interface.py b/src/amuse/community/metisse/interface.py index 1fe0e70740..c65f22c001 100644 --- a/src/amuse/community/metisse/interface.py +++ b/src/amuse/community/metisse/interface.py @@ -36,9 +36,41 @@ def __init__(self, **keyword_arguments): ) LiteratureReferencesMixIn.__init__(self) - @remote_function + # Remote functions - getters and setters + # Note that we should maybe use SI units rather than derived (MSun etc), at + # least while these are not certain to be the same in the code and in + # AMUSE... + @remote_function(can_handle_array=True) def get_initial_mass(index_of_the_star="i"): - returns (mass="d" | units.julianyr) + returns (mass="d" | units.MSun) + + @remote_function(can_handle_array=True) + def get_epoch(index_of_the_star="i"): + returns (epoch="d" | units.julianyr) + + @remote_function(can_handle_array=True) + def get_core_mass(index_of_the_star="i"): + returns (core_mass="d" | units.MSun) + + @remote_function(can_handle_array=True) + def get_core_radius(index_of_the_star="i"): + returns (core_radius="d" | units.RSun) + + @remote_function(can_handle_array=True) + def get_convective_envelope_mass(index_of_the_star="i"): + returns (convective_envelope_mass="d" | units.MSun) + + @remote_function(can_handle_array=True) + def get_convective_envelope_radius(index_of_the_star="i"): + returns (convective_envelope_radius="d" | units.RSun) + + @remote_function(can_handle_array=True) + def get_CO_core_mass(index_of_the_star="i"): + returns (CO_core_mass="d" | units.MSun) + + @remote_function(can_handle_array=True) + def get_main_sequence_lifetime(index_of_the_star="i"): + returns (main_sequence_lifetime="d" | units.Myr) # high level interface class @@ -85,52 +117,49 @@ def define_particle_sets(self, handler): handler.set_new("particles", "new_particle") handler.set_delete("particles", "delete_star") - handler.add_getter("particles", "mass", "get_mass", names=("mass",)) - handler.add_getter("particles", "radius", "get_radius", names=("radius",)) - handler.add_getter("particles", "age", "get_age", names=("age",)) + handler.add_getter("particles", "get_mass", names=("mass",)) + handler.add_getter("particles", "get_radius", names=("radius",)) + handler.add_getter("particles", "get_age", names=("age",)) handler.add_getter( - "particles", "time_step", "get_time_step", names=("time_step",) + "particles", "get_time_step", names=("time_step",) ) handler.add_getter( - "particles", "temperature", "get_temperature", names=("temperature",) + "particles", "get_temperature", names=("temperature",) ) handler.add_getter( - "particles", "luminosity", "get_luminosity", names=("luminosity",) + "particles", "get_luminosity", names=("luminosity",) ) handler.add_getter( - "particles", "stellar_type", "get_stellar_type", names=("stellar_type",) + "particles", "get_stellar_type", names=("stellar_type",) ) - handler.add_getter("particles", "spin", "get_spin", names=("spin",)) - handler.add_getter("particles", "epoch", "get_epoch", names=("epoch",)) + # handler.add_getter("particles", "get_spin", names=("spin",)) + handler.add_getter("particles", "get_epoch", names=("epoch",)) handler.add_getter( "particles", - "main_sequence_lifetime", "get_main_sequence_lifetime", names=("main_sequence_lifetime",), ) handler.add_getter( - "particles", "core_mass", "get_core_mass", names=("core_mass",) + "particles", "get_core_mass", names=("core_mass",) ) handler.add_getter( - "particles", "CO_core_mass", "get_CO_core_mass", names=("CO_core_mass",) + "particles", "get_CO_core_mass", names=("CO_core_mass",) ) handler.add_getter( - "particles", "core_radius", "get_core_radius", names=("core_radius",) + "particles", "get_core_radius", names=("core_radius",) ) handler.add_getter( "particles", - "convective_envelope_mass", "get_convective_envelope_mass", names=("convective_envelope_mass",), ) handler.add_getter( "particles", - "convective_envelope_radius", "get_convective_envelope_radius", names=("convective_envelope_radius",), ) handler.add_getter( - "particles", "initial_mass", "get_initial_mass", names=("initial_mass",) + "particles", "get_initial_mass", names=("initial_mass",) ) From 186fd73295ddcb5d4170354a9638a8fa1933f00b Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 19 Feb 2025 17:46:43 +0100 Subject: [PATCH 13/40] add parameter setters/getters --- src/amuse/community/metisse/interface.f90 | 450 +++++++++++++++++++--- src/amuse/community/metisse/interface.py | 218 ++++++++++- 2 files changed, 614 insertions(+), 54 deletions(-) diff --git a/src/amuse/community/metisse/interface.f90 b/src/amuse/community/metisse/interface.f90 index 7060a183fa..7b92dd7e51 100644 --- a/src/amuse/community/metisse/interface.f90 +++ b/src/amuse/community/metisse/interface.f90 @@ -9,6 +9,12 @@ module metisseInterface contains + ! standard AMUSE interface functions: + ! initialize + ! commit_parameters + ! commit_particles + ! cleanup_code + function initialize(error) implicit none integer :: error @@ -19,7 +25,7 @@ function initialize(error) initialize = -1 ! Need to define this front end for METISSE - call initialize_front_end("amuse") + call initialize_front_end("COSMIC") initial_Z = -1.0_c_double call METISSE_zcnsts(initial_Z,zpars,'','', error) @@ -30,52 +36,397 @@ function initialize(error) initialize = 0 end function - function cleanup_code() - implicit none - integer :: cleanup_code - cleanup_code=0 - end function - - function commit_parameters() - implicit none - integer :: commit_parameters - commit_parameters=0 - end function - - function commit_particles() - implicit none - integer :: commit_particles - integer :: number_of_particles - integer :: error + function cleanup_code() + implicit none + integer :: cleanup_code + cleanup_code=0 + end function + + function commit_parameters() + implicit none + integer :: commit_parameters + commit_parameters=0 + end function + + function commit_particles() + implicit none + integer :: commit_particles + integer :: number_of_particles + integer :: error + + error = get_number_of_particles(number_of_particles) + allocate(mass_array(number_of_particles)) + mass_array = 0.0 + + commit_particles=0 + end function - error = get_number_of_particles(number_of_particles) - allocate(mass_array(number_of_particles)) - mass_array = 0.0 + ! setters / getters for tracks + ! metallicity_dir (string) + ! metallicity_dir_he (string) + ! z_accuracy_limit (real) + ! mass_accuracy_limit (real) - commit_particles=0 - end function - - function delete_star(index_of_the_star) - implicit none - integer :: index_of_the_star - integer :: delete_star - delete_star=0 - end function - - function evolve_for(index_of_the_star, delta_t) - implicit none - integer :: index_of_the_star - real(c_double) :: delta_t - integer :: evolve_for - evolve_for=0 - end function + function set_metallicity_dir(metallicity_dir_in) + implicit none + character(len=256) :: metallicity_dir_in + integer :: set_metallicity_dir + METALLICITY_DIR = metallicity_dir_in + set_metallicity_dir = 0 + end function + + function get_metallicity_dir(metallicity_dir_out) + implicit none + character(len=256) :: metallicity_dir_out + integer :: get_metallicity_dir + metallicity_dir_out = METALLICITY_DIR + get_metallicity_dir = 0 + end function + + function set_metallicity_dir_he(metallicity_dir_he_in) + implicit none + character(len=256) :: metallicity_dir_he_in + integer :: set_metallicity_dir_he + METALLICITY_DIR_HE = metallicity_dir_he_in + set_metallicity_dir_he = 0 + end function + + function get_metallicity_dir_he(metallicity_dir_he_out) + implicit none + character(len=256) :: metallicity_dir_he_out + integer :: get_metallicity_dir_he + metallicity_dir_he_out = METALLICITY_DIR_HE + get_metallicity_dir_he = 0 + end function + + function set_z_accuracy_limit(z_accuracy_limit_in) + implicit none + real(c_double) :: z_accuracy_limit_in + integer :: set_z_accuracy_limit + z_accuracy_limit = z_accuracy_limit_in + set_z_accuracy_limit = 0 + end function + + function get_z_accuracy_limit(z_accuracy_limit_out) + implicit none + real(c_double) :: z_accuracy_limit_out + integer :: get_z_accuracy_limit + z_accuracy_limit_out = z_accuracy_limit + get_z_accuracy_limit = 0 + end function + + function set_mass_accuracy_limit(mass_accuracy_limit_in) + implicit none + real(c_double) :: mass_accuracy_limit_in + integer :: set_mass_accuracy_limit + mass_accuracy_limit = mass_accuracy_limit_in + set_mass_accuracy_limit = 0 + end function + + function get_mass_accuracy_limit(mass_accuracy_limit_out) + implicit none + real(c_double) :: mass_accuracy_limit_out + integer :: get_mass_accuracy_limit + mass_accuracy_limit_out = mass_accuracy_limit + get_mass_accuracy_limit = 0 + end function + + ! setters / getters for misc controls + ! verbose (bool) + ! construct_postagb_track (bool) + + function set_verbose(verbose_in) + implicit none + logical :: verbose_in + integer :: set_verbose + verbose = verbose_in + set_verbose = 0 + end function + + function get_verbose(verbose_out) + implicit none + logical :: verbose_out + integer :: get_verbose + verbose_out = verbose + get_verbose = 0 + end function + + function set_construct_postagb_track(construct_postagb_track_in) + implicit none + logical :: construct_postagb_track_in + integer :: set_construct_postagb_track + construct_postagb_track = construct_postagb_track_in + set_construct_postagb_track = 0 + end function + + function get_construct_postagb_track(construct_postagb_track_out) + implicit none + logical :: construct_postagb_track_out + integer :: get_construct_postagb_track + construct_postagb_track_out = construct_postagb_track + get_construct_postagb_track = 0 + end function + + ! setters/getters for parameters + ! initial_metallicity(real) + ! wd_mass_scheme (string, 256) + ! use_initial_final_mass_relation(bool) + ! bhns_mass_scheme (string, 256) + ! max_ns_mass (real) + ! allow_electron_capture (bool) + + function set_initial_metallicity(initial_metallicity_in) + implicit none + real(c_double) :: initial_metallicity_in + integer :: set_initial_metallicity + initial_Z = initial_metallicity_in + set_initial_metallicity = 0 + end function + + function get_initial_metallicity(initial_metallicity_out) + implicit none + real(c_double) :: initial_metallicity_out + integer :: get_initial_metallicity + initial_metallicity_out = initial_Z + get_initial_metallicity = 0 + end function + + function set_wd_mass_scheme(wd_mass_scheme_in) + implicit none + character(len=256) :: wd_mass_scheme_in + integer :: set_wd_mass_scheme + WD_mass_scheme = wd_mass_scheme_in + set_wd_mass_scheme = 0 + end function + + function get_wd_mass_scheme(wd_mass_scheme_out) + implicit none + character(len=256) :: wd_mass_scheme_out + integer :: get_wd_mass_scheme + wd_mass_scheme_out = WD_mass_scheme + get_wd_mass_scheme = 0 + end function + + function set_use_initial_final_mass_relation(use_initial_final_mass_relation_in) + implicit none + logical :: use_initial_final_mass_relation_in + integer :: set_use_initial_final_mass_relation + use_initial_final_mass_relation = use_initial_final_mass_relation_in + set_use_initial_final_mass_relation = 0 + end function + + function get_use_initial_final_mass_relation(use_initial_final_mass_relation_out) + implicit none + logical :: use_initial_final_mass_relation_out + integer :: get_use_initial_final_mass_relation + use_initial_final_mass_relation_out = use_initial_final_mass_relation + get_use_initial_final_mass_relation = 0 + end function + + function set_bhns_mass_scheme(bhns_mass_scheme_in) + implicit none + character(len=256) :: bhns_mass_scheme_in + integer :: set_bhns_mass_scheme + BHNS_mass_scheme = bhns_mass_scheme_in + set_bhns_mass_scheme = 0 + end function + + function get_bhns_mass_scheme(bhns_mass_scheme_out) + implicit none + character(len=256) :: bhns_mass_scheme_out + integer :: get_bhns_mass_scheme + bhns_mass_scheme_out = BHNS_mass_scheme + get_bhns_mass_scheme = 0 + end function + + function set_max_ns_mass(max_ns_mass_in) + implicit none + real(c_double) :: max_ns_mass_in + integer :: set_max_ns_mass + max_NS_mass = max_ns_mass_in + set_max_ns_mass = 0 + end function + + function get_max_ns_mass(max_ns_mass_out) + implicit none + real(c_double) :: max_ns_mass_out + integer :: get_max_ns_mass + max_ns_mass_out = max_NS_mass + get_max_ns_mass = 0 + end function + + function set_allow_electron_capture(allow_electron_capture_in) + implicit none + logical :: allow_electron_capture_in + integer :: set_allow_electron_capture + allow_electron_capture = allow_electron_capture_in + set_allow_electron_capture = 0 + end function + + function get_allow_electron_capture(allow_electron_capture_out) + implicit none + logical :: allow_electron_capture_out + integer :: get_allow_electron_capture + allow_electron_capture_out = allow_electron_capture + get_allow_electron_capture = 0 + end function + + ! setters/getters for timestep control + ! pts_1 to pts_3 (real) + + function set_time_step_pts_1(pts_1_in) + implicit none + real(c_double) :: pts_1_in + integer :: set_time_step_pts_1 + pts_1 = pts_1_in + set_time_step_pts_1 = 0 + end function + + function get_time_step_pts_1(pts_1_out) + implicit none + real(c_double) :: pts_1_out + integer :: get_time_step_pts_1 + pts_1_out = pts_1 + get_time_step_pts_1 = 0 + end function + + function set_time_step_pts_2(pts_2_in) + implicit none + real(c_double) :: pts_2_in + integer :: set_time_step_pts_2 + pts_2 = pts_2_in + set_time_step_pts_2 = 0 + end function + + function get_time_step_pts_2(pts_2_out) + implicit none + real(c_double) :: pts_2_out + integer :: get_time_step_pts_2 + pts_2_out = pts_2 + get_time_step_pts_2 = 0 + end function + + function set_time_step_pts_3(pts_3_in) + implicit none + real(c_double) :: pts_3_in + integer :: set_time_step_pts_3 + pts_3 = pts_3_in + set_time_step_pts_3 = 0 + end function + + function get_time_step_pts_3(pts_3_out) + implicit none + real(c_double) :: pts_3_out + integer :: get_time_step_pts_3 + pts_3_out = pts_3 + get_time_step_pts_3 = 0 + end function + + ! particle management: + ! new_particle, delete_particle + + function new_particle(index_of_the_particle, mass) + implicit none + integer, intent(inout) :: index_of_the_particle + real(c_double), intent(inout) :: mass + integer :: new_particle + index_of_the_particle = star_system%new_star(mass) + end function + + function delete_star(index_of_the_star) + implicit none + integer :: index_of_the_star + integer :: delete_star + call star_system%remove_star(index_of_the_star) + delete_star = 0 + end function + + ! evolving stars: + ! evolve_for, evolve_one_step + ! evolve_model, evolve_stars - function evolve_one_step(index_of_the_star) - implicit none - integer :: index_of_the_star - integer :: evolve_one_step - evolve_one_step=0 - end function + function evolve_for(index_of_the_star, delta_t) + implicit none + integer :: index_of_the_star + real(c_double) :: delta_t + integer :: evolve_for + evolve_for = 0 + end function + + function evolve_one_step(index_of_the_star) + implicit none + integer :: index_of_the_star + integer :: evolve_one_step + integer :: error + real(c_double) :: time_step + real(c_double) :: mass + real(c_double) :: age + + write(*,*) 'evolve_one_step', index_of_the_star + call star_system%get_time_step(index_of_the_star, time_step, error) + call star_system%get_initial_mass(index_of_the_star, mass, error) + call star_system%get_age(index_of_the_star, age, error) + write(*,*) 'age, mass, time_step', age, mass, time_step + call allocate_track(1, mass) + write(*,*) 'allocate_track done' + call evolv_metisse(mass, age + time_step, error, 1) + write(*,*) 'evolv_metisse done' + call dealloc_track() + write(*,*) 'dealloc_track done' + if (error /= 0) then + call star_system%set_mass(index_of_the_star, mass, error) + call star_system%set_age(index_of_the_star, age + time_step, error) + end if + write(*,*) 'evolve_one_step done' + evolve_one_step = 0 + end function + + function evolve_model(t_end) + implicit none + real(c_double) :: t_end + integer :: evolve_model + integer :: number_of_stars + integer :: ierr + integer :: i + real(c_double) :: mass + + call star_system%get_number_of_stars(number_of_stars) + + do i = 1, number_of_stars + call star_system%get_initial_mass(i, mass, ierr) + call allocate_track(1, mass) + call evolv_metisse(mass, t_end, ierr, 1) + call dealloc_track() + if (ierr /= 0) then + call star_system%set_mass(i, mass, ierr) + call star_system%set_age(i, t_end, ierr) + end if + end do + evolve_model = 0 + end function + + function evolve_stars(indices_of_stars, delta_time, error, n) + implicit none + integer, intent(in) :: n + integer :: error + integer, intent(in), dimension(n) :: indices_of_stars + real(c_double) :: delta_time + integer :: evolve_stars + integer :: i + integer :: number_of_stars_to_evolve + + number_of_stars_to_evolve = size(indices_of_stars) + + do i = 1, number_of_stars_to_evolve + error = evolve_for(indices_of_stars(i), delta_time) + if (error /= 0) then + evolve_stars = error + return + end if + end do + + evolve_stars = 0 + end function function get_age(index_of_the_star, age) implicit none @@ -105,6 +456,7 @@ function get_metallicity(metallicity) implicit none real(c_double) :: metallicity integer :: get_metallicity + ! what to do here depends on whether metallicity can be set for individual stars or only globally get_metallicity=0 end function @@ -216,15 +568,7 @@ function initialize_code() integer :: initialize_code initialize_code=0 end function - - function new_particle(index_of_the_star, mass) - implicit none - integer, intent(inout) :: index_of_the_star - real(c_double), intent(inout) :: mass - integer :: new_particle - index_of_the_star = star_system%new_star(mass) - new_particle=0 - end function + function recommit_parameters() implicit none diff --git a/src/amuse/community/metisse/interface.py b/src/amuse/community/metisse/interface.py index c65f22c001..c48bfc5503 100644 --- a/src/amuse/community/metisse/interface.py +++ b/src/amuse/community/metisse/interface.py @@ -35,6 +35,7 @@ def __init__(self, **keyword_arguments): self, name_of_the_worker="metisse_worker", **keyword_arguments ) LiteratureReferencesMixIn.__init__(self) + self.model_time = 0.0 | units.julianyr # Remote functions - getters and setters # Note that we should maybe use SI units rather than derived (MSun etc), at @@ -72,6 +73,124 @@ def get_CO_core_mass(index_of_the_star="i"): def get_main_sequence_lifetime(index_of_the_star="i"): returns (main_sequence_lifetime="d" | units.Myr) + @remote_function(must_handle_array=True) + def evolve_stars(index_of_the_star="i", time_delta="d" | units.Myr): + returns (error="i") + + # getters and setters for tracks + # metallicity_dir (string) + # metallicity_dir_he (string) + # z_accuracy_limit (float) + # mass_accuracy_limit (float) + + @remote_function + def get_metallicity_dir(): + returns (metallicity_dir="s") + + @remote_function + def set_metallicity_dir(metallicity_dir="s"): + returns () + + @remote_function + def get_metallicity_dir_he(): + returns (metallicity_dir_he="s") + + @remote_function + def set_metallicity_dir_he(metallicity_dir_he="s"): + returns () + + @remote_function + def get_z_accuracy_limit(): + returns (z_accuracy_limit="d") + + @remote_function + def set_z_accuracy_limit(z_accuracy_limit="d"): + returns () + + @remote_function + def get_mass_accuracy_limit(): + returns (mass_accuracy_limit="d") + + @remote_function + def set_mass_accuracy_limit(mass_accuracy_limit="d"): + returns () + + # getters and setters for miscellaneous controls + # verbose (bool) + # construct_postagb_track (bool) + + @remote_function + def get_verbose(): + returns (verbose="b") + + @remote_function + def set_verbose(verbose="b"): + returns () + + @remote_function + def get_construct_postagb_track(): + returns (construct_postagb_track="b") + + @remote_function + def set_construct_postagb_track(construct_postagb_track="b"): + returns () + + # getters and setters for parameters + # initial_metallicity(real) + # wd_mass_scheme (string, 256) + # use_initial_final_mass_relation(bool) + # bhns_mass_scheme (string, 256) + # max_ns_mass (real) + # allow_electron_capture (bool) + + @remote_function + def get_initial_metallicity(): + returns (initial_metallicity="d") + + @remote_function + def set_initial_metallicity(initial_metallicity="d"): + returns () + + @remote_function + def get_wd_mass_scheme(): + returns (wd_mass_scheme="s") + + @remote_function + def set_wd_mass_scheme(wd_mass_scheme="s"): + returns () + + @remote_function + def get_use_initial_final_mass_relation(): + returns (use_initial_final_mass_relation="b") + + @remote_function + def set_use_initial_final_mass_relation(use_initial_final_mass_relation="b"): + returns () + + @remote_function + def get_bhns_mass_scheme(): + returns (bhns_mass_scheme="s") + + @remote_function + def set_bhns_mass_scheme(bhns_mass_scheme="s"): + returns () + + @remote_function + def get_max_ns_mass(): + returns (max_ns_mass="d") + + @remote_function + def set_max_ns_mass(max_ns_mass="d"): + returns () + + @remote_function + def get_allow_electron_capture(): + returns (allow_electron_capture="b") + + @remote_function + def set_allow_electron_capture(allow_electron_capture="b"): + returns () + # high level interface class class Metisse(se.StellarEvolution): @@ -110,7 +229,94 @@ def define_parameters(self, handler): # "description", # default_value = # ) - pass + + + + # Track parameters + handler.add_method_parameter( + "get_metallicity_dir", + "set_metallicity_dir", + "metallicity_dir", + "Location of the tracks", + default_value="./", + ) + + handler.add_method_parameter( + "get_metallicity_dir_he", + "set_metallicity_dir_he", + "metallicity_dir_he", + "Location of the He tracks", + default_value="./", + ) + + handler.add_method_parameter( + "get_z_accuracy_limit", + "set_z_accuracy_limit", + "z_accuracy_limit", + "Metallicity accuracy limit", + default_value=1.0e-2, + ) + + handler.add_method_parameter( + "get_mass_accuracy_limit", + "set_mass_accuracy_limit", + "mass_accuracy_limit", + "Mass accuracy limit", + default_value=1.0e-4, + ) + + # handlers for parameters: + # initial_metallicity + # wd_mass_scheme + # use_initial_final_mass_relation + # bhns_mass_scheme + # max_ns_mass + # allow_electron_capture + + handler.add_method_parameter( + "get_initial_metallicity", + "set_initial_metallicity", + "initial_metallicity", + "Initial metallicity", + default_value=-1.0, + ) + + handler.add_method_parameter( + "get_wd_mass_scheme", + "set_wd_mass_scheme", + "wd_mass_scheme", + ( + "White Dwarf (WD) luminosity calculation method:\n" + "(1) \"Mestel\" - Shapiro S. L., Teukolsky S. A., 1983\n" + "(2) \"Modified_mestel\" - Hurley J. R., Shara M. M., 2003" + ), + default_value="Modified_mestel", + ) + + handler.add_method_parameter( + "get_use_initial_final_mass_relation", + "set_use_initial_final_mass_relation", + "use_initial_final_mass_relation", + ( + "If True use the initial final mass relation for white dwarfs " + "from Han, Z., Posialowski, P., Eggleton, P. P., 1995." + ), + default_value=False, + ) + + handler.add_method_parameter( + "get_bhns_mass_scheme", + "set_bhns_mass_scheme", + "bhns_mass_scheme", + ( + "Neutron Star/Black Hole (NS/BH) type and mass calculation method:\n" + "(1) \"original_SSE\" - Hurley et al. 2000\n" + "(2) \"Belczynski2002\" - Belczynski et al. 2002\n" + "(3) \"Belczynski2008\" - Belczynski et al. 2008\n" + "(4) \"Eldridge_Tout2004\" - Eldridge J. J., Tout C. A., 2004" + ), + default_value="Belczynski2008", + ) def define_particle_sets(self, handler): handler.define_set("particles", "index_of_the_star") @@ -162,6 +368,16 @@ def define_particle_sets(self, handler): "particles", "get_initial_mass", names=("initial_mass",) ) + def evolve_model(self, end_time=None, keep_synchronous=True): + if not keep_synchronous: + self._evolve_particles(self.particles, self.particles.time_step + self.particles.age) + return + + if end_time is None: + end_time = self.model_time + min(self.particles.time_step) + self.evolve_stars(self.particles, end_time - self.model_time + self.particles.age) + self.model_time = end_time + class MetisseParticles(Particles): From 8363f303229787a3ce9429c970ee205af81dcc44 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 26 Feb 2025 14:19:46 +0100 Subject: [PATCH 14/40] cleanup and fixes to resize --- src/amuse/community/metisse/storage.f90 | 30 +++++++++++-------------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/src/amuse/community/metisse/storage.f90 b/src/amuse/community/metisse/storage.f90 index 78f33cae29..ff5f3e6d2a 100644 --- a/src/amuse/community/metisse/storage.f90 +++ b/src/amuse/community/metisse/storage.f90 @@ -123,15 +123,12 @@ function new_star(self, initial_mass) result(new_id) self%star_array(i)%metallicity = 0.0_c_double self%star_array(i)%radius = 0.0_c_double self%star_array(i)%spin = 0.0_c_double - self%star_array(i)%stellar_type = 0 + self%star_array(i)%stellar_type = 0_c_int self%star_array(i)%time_step = 0.0_c_double self%star_array(i)%temperature = 0.0_c_double self%next_star_id = new_id + 1 - write(*,*) "new star with id: ", new_id - write(*,*) "mass: ", initial_mass - end function new_star subroutine remove_star(self, id) @@ -140,16 +137,12 @@ subroutine remove_star(self, id) integer :: i - write(*,*) "removing star with id: ", id do i = 1, self%num_stars - write(*,*) "i: ", i, " id: ", self%star_array(i)%id if (self%star_array(i)%id == id) then - write(*,*) "removing star with id: ", id if (i /= self%num_stars) then self%star_array(i:self%num_stars-1) = self%star_array(i+1:self%num_stars) end if self%num_stars = self%num_stars - 1 - write(*,*) "num_stars: ", self%num_stars call self%resize(self%num_stars) exit end if @@ -157,21 +150,26 @@ subroutine remove_star(self, id) end subroutine remove_star - subroutine resize(self, new_size) + subroutine resize(self, required_size) class(stars), intent(inout) :: self - integer, intent(in) :: new_size + integer, intent(in) :: required_size type(star), allocatable :: temp(:) integer :: current_size, new_capacity - - if (new_size <= 0) then + + if (required_size <= 0) then self%num_stars = 0 if (allocated(self%star_array)) deallocate(self%star_array) return end if + + current_size = size(self%star_array) + if (required_size .lt. current_size) return - current_size = self%num_stars - new_capacity = max(100, int((1.1 ** ceiling(log10(real(new_size)))) * 10)) - + new_capacity = current_size + do while (required_size .gt. new_capacity) + new_capacity = max(100, int(new_capacity * 1.1)) + end do + if (.not. allocated(self%star_array) .or. new_capacity > size(self%star_array)) then if (allocated(self%star_array)) then allocate(temp(current_size)) @@ -256,7 +254,6 @@ subroutine get_property_double(self, id, property_name, value, error) error = -2 ! property not found end select error = 0 - write(*,*) "get_property_double: ", id, property_name, value end subroutine get_property_double subroutine get_property_int(self, id, property_name, value, error) @@ -282,7 +279,6 @@ subroutine get_property_int(self, id, property_name, value, error) return end select error = 0 - write(*,*) "get_property_int: ", id, property_name, value end subroutine get_property_int ! Setters for all the stellar properties From 4549b19f3c607e4a1f889c91f80b5dc63af27597 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 26 Feb 2025 14:20:19 +0100 Subject: [PATCH 15/40] interface updates --- src/amuse/community/metisse/interface.f90 | 143 +++++++++++++--------- src/amuse/community/metisse/interface.py | 85 +++++++++++-- 2 files changed, 163 insertions(+), 65 deletions(-) diff --git a/src/amuse/community/metisse/interface.f90 b/src/amuse/community/metisse/interface.f90 index 7b92dd7e51..6ffaa3a536 100644 --- a/src/amuse/community/metisse/interface.f90 +++ b/src/amuse/community/metisse/interface.f90 @@ -15,27 +15,39 @@ module metisseInterface ! commit_particles ! cleanup_code - function initialize(error) - implicit none - integer :: error - integer :: initialize + subroutine assign_commons_amuse() - real(c_double) :: zpars(20) + end subroutine - initialize = -1 + function initialize_code() + implicit none + integer :: initialize_code ! Need to define this front end for METISSE - call initialize_front_end("COSMIC") + call initialize_front_end("AMUSE") initial_Z = -1.0_c_double + write_output_to_file = .false. - call METISSE_zcnsts(initial_Z,zpars,'','', error) - if (error/=0) return - - write(*,*) "Number of tracks: ", number_of_tracks - - initialize = 0 + initialize_code=0 end function - + + function recommit_parameters() + implicit none + integer :: recommit_parameters + recommit_parameters=0 + end function + + function recommit_particles() + implicit none + integer :: number_of_particles + integer :: error + integer :: recommit_particles + deallocate(mass_array) + error = get_number_of_particles(number_of_particles) + allocate(mass_array(number_of_particles)) + recommit_particles=0 + end function + function cleanup_code() implicit none integer :: cleanup_code @@ -45,18 +57,32 @@ function cleanup_code() function commit_parameters() implicit none integer :: commit_parameters + integer :: error + real(c_double) :: zpars(20) + commit_parameters=0 + write(*,*) "committing parameters" + + ! This will read the tracks - so need to have set the paths before + call METISSE_zcnsts(initial_Z,zpars,'','', error) + if (error/=0) return + + call assign_commons_main() + + write(*,*) "Number of tracks: ", number_of_tracks end function function commit_particles() implicit none integer :: commit_particles integer :: number_of_particles + integer :: i integer :: error + real(c_double) :: mass error = get_number_of_particles(number_of_particles) + allocate(mass_array(number_of_particles)) - mass_array = 0.0 commit_particles=0 end function @@ -348,9 +374,23 @@ function delete_star(index_of_the_star) function evolve_for(index_of_the_star, delta_t) implicit none integer :: index_of_the_star - real(c_double) :: delta_t + integer :: error + real(c_double) :: delta_t, time_step integer :: evolve_for evolve_for = 0 + + write(*,*) 'evolving star ', index_of_the_star, ' for ', delta_t + do while (delta_t > 0) + call star_system%get_time_step(index_of_the_star, time_step, error) + if (delta_t < time_step) then + write(*,*) 'setting time step of star ', index_of_the_star, ' to ', delta_t + call star_system%set_time_step(index_of_the_star, delta_t, error) + time_step = delta_t + end if + evolve_for = evolve_one_step(index_of_the_star) + if (evolve_for /= 0) return + delta_t = delta_t - time_step + end do end function function evolve_one_step(index_of_the_star) @@ -361,23 +401,29 @@ function evolve_one_step(index_of_the_star) real(c_double) :: time_step real(c_double) :: mass real(c_double) :: age + type(track), pointer :: t - write(*,*) 'evolve_one_step', index_of_the_star call star_system%get_time_step(index_of_the_star, time_step, error) call star_system%get_initial_mass(index_of_the_star, mass, error) call star_system%get_age(index_of_the_star, age, error) - write(*,*) 'age, mass, time_step', age, mass, time_step call allocate_track(1, mass) - write(*,*) 'allocate_track done' call evolv_metisse(mass, age + time_step, error, 1) - write(*,*) 'evolv_metisse done' - call dealloc_track() - write(*,*) 'dealloc_track done' - if (error /= 0) then - call star_system%set_mass(index_of_the_star, mass, error) - call star_system%set_age(index_of_the_star, age + time_step, error) + t => tarr(1) + if (error == 0) then + call star_system%set_mass(index_of_the_star, t % pars % mass, error) + call star_system%set_age(index_of_the_star, t % pars % age, error) + call star_system%set_time_step(index_of_the_star, t % pars % dt, error) + call star_system%set_luminosity(index_of_the_star, t % pars % luminosity, error) + call star_system%set_temperature(index_of_the_star, t % pars % Teff, error) + call star_system%set_radius(index_of_the_star, t % pars % radius, error) + call star_system%set_epoch(index_of_the_star, t % pars % epoch, error) + call star_system%set_core_mass(index_of_the_star, t % pars % core_mass, error) + call star_system%set_core_radius(index_of_the_star, t % pars % core_radius, error) + call star_system%set_stellar_type(index_of_the_star, t % pars % phase, error) + call star_system%set_co_core_mass(index_of_the_star, t % pars % McCO, error) + call star_system%set_spin(index_of_the_star, t % pars % bhspin, error) end if - write(*,*) 'evolve_one_step done' + call dealloc_track() evolve_one_step = 0 end function @@ -385,23 +431,23 @@ function evolve_model(t_end) implicit none real(c_double) :: t_end integer :: evolve_model - integer :: number_of_stars - integer :: ierr + integer :: number_of_particles + integer :: error integer :: i real(c_double) :: mass - call star_system%get_number_of_stars(number_of_stars) - - do i = 1, number_of_stars - call star_system%get_initial_mass(i, mass, ierr) - call allocate_track(1, mass) - call evolv_metisse(mass, t_end, ierr, 1) - call dealloc_track() - if (ierr /= 0) then - call star_system%set_mass(i, mass, ierr) - call star_system%set_age(i, t_end, ierr) + call star_system%get_number_of_stars(number_of_particles) + do i = 1, number_of_particles + call star_system%get_initial_mass(i, mass, error) + call allocate_track(1,mass) + call evolv_metisse(mass, t_end, error, 1) + if (error /= 0) then + call star_system%set_age(i, t_end, error) + call star_system%set_mass(i, mass, error) end if + call dealloc_track() end do + evolve_model = 0 end function @@ -434,6 +480,7 @@ function get_age(index_of_the_star, age) real(c_double) :: age integer :: get_age call star_system%get_age(index_of_the_star, age, get_age) + age = age * 1.0d+06 end function function get_luminosity(index_of_the_star, luminosity) @@ -553,6 +600,7 @@ function get_time_step(index_of_the_star, time_step) real(c_double) :: time_step integer :: get_time_step call star_system%get_time_step(index_of_the_star, time_step, get_time_step) + time_step = time_step * 1.0d+06 end function function get_initial_mass(index_of_the_star, mass) @@ -563,25 +611,6 @@ function get_initial_mass(index_of_the_star, mass) call star_system%get_initial_mass(index_of_the_star, mass, get_initial_mass) end function - function initialize_code() - implicit none - integer :: initialize_code - initialize_code=0 - end function - - - function recommit_parameters() - implicit none - integer :: recommit_parameters - recommit_parameters=0 - end function - - function recommit_particles() - implicit none - integer :: recommit_particles - recommit_particles=0 - end function - function set_metallicity(metallicity) implicit none real(c_double) :: metallicity diff --git a/src/amuse/community/metisse/interface.py b/src/amuse/community/metisse/interface.py index c48bfc5503..74cbd4136b 100644 --- a/src/amuse/community/metisse/interface.py +++ b/src/amuse/community/metisse/interface.py @@ -73,9 +73,9 @@ def get_CO_core_mass(index_of_the_star="i"): def get_main_sequence_lifetime(index_of_the_star="i"): returns (main_sequence_lifetime="d" | units.Myr) - @remote_function(must_handle_array=True) - def evolve_stars(index_of_the_star="i", time_delta="d" | units.Myr): - returns (error="i") + @remote_function(can_handle_array=True) + def evolve_for(index_of_the_star="i", time_delta="d" | units.Myr): + returns () # getters and setters for tracks # metallicity_dir (string) @@ -191,6 +191,30 @@ def get_allow_electron_capture(): def set_allow_electron_capture(allow_electron_capture="b"): returns () + @remote_function + def get_time_step_pts_1(): + returns (fractional_time_step_1="d") + + @remote_function + def set_time_step_pts_1(fractional_time_step_1="d"): + returns () + + @remote_function + def get_time_step_pts_2(): + returns (fractional_time_step_2="d") + + @remote_function + def set_time_step_pts_2(fractional_time_step_2="d"): + returns () + + @remote_function + def get_time_step_pts_3(): + returns (fractional_time_step_3="d") + + @remote_function + def set_time_step_pts_3(fractional_time_step_3="d"): + returns () + # high level interface class class Metisse(se.StellarEvolution): @@ -239,6 +263,7 @@ def define_parameters(self, handler): "metallicity_dir", "Location of the tracks", default_value="./", + must_set_before_get=True, ) handler.add_method_parameter( @@ -247,6 +272,7 @@ def define_parameters(self, handler): "metallicity_dir_he", "Location of the He tracks", default_value="./", + must_set_before_get=True, ) handler.add_method_parameter( @@ -255,6 +281,7 @@ def define_parameters(self, handler): "z_accuracy_limit", "Metallicity accuracy limit", default_value=1.0e-2, + must_set_before_get=True, ) handler.add_method_parameter( @@ -263,6 +290,7 @@ def define_parameters(self, handler): "mass_accuracy_limit", "Mass accuracy limit", default_value=1.0e-4, + must_set_before_get=True, ) # handlers for parameters: @@ -279,6 +307,7 @@ def define_parameters(self, handler): "initial_metallicity", "Initial metallicity", default_value=-1.0, + must_set_before_get=True, ) handler.add_method_parameter( @@ -291,6 +320,7 @@ def define_parameters(self, handler): "(2) \"Modified_mestel\" - Hurley J. R., Shara M. M., 2003" ), default_value="Modified_mestel", + must_set_before_get=True, ) handler.add_method_parameter( @@ -302,6 +332,7 @@ def define_parameters(self, handler): "from Han, Z., Posialowski, P., Eggleton, P. P., 1995." ), default_value=False, + must_set_before_get=True, ) handler.add_method_parameter( @@ -316,6 +347,34 @@ def define_parameters(self, handler): "(4) \"Eldridge_Tout2004\" - Eldridge J. J., Tout C. A., 2004" ), default_value="Belczynski2008", + must_set_before_get=True, + ) + + handler.add_method_parameter( + "get_time_step_pts_1", + "set_time_step_pts_1", + "fractional_time_step_1", + "Determine timestep for 95% of MS, and HeMS", + default_value=0.05, + must_set_before_get=True, + ) + + handler.add_method_parameter( + "get_time_step_pts_2", + "set_time_step_pts_2", + "fractional_time_step_2", + "Determine timestep for last 5% of MS, cHeBurn, HeHG, and HeGB", + default_value=0.01, + must_set_before_get=True, + ) + + handler.add_method_parameter( + "get_time_step_pts_3", + "set_time_step_pts_3", + "fractional_time_step_3", + "Determine timestep for HG, RGB, EAGB, and TPAGB", + default_value=0.02, + must_set_before_get=True, ) def define_particle_sets(self, handler): @@ -368,15 +427,25 @@ def define_particle_sets(self, handler): "particles", "get_initial_mass", names=("initial_mass",) ) + handler.add_method("particles", "evolve_one_step") + handler.add_method("particles", "evolve_for") + def evolve_model(self, end_time=None, keep_synchronous=True): + print("evolve_model", end_time, keep_synchronous) if not keep_synchronous: - self._evolve_particles(self.particles, self.particles.time_step + self.particles.age) + for particle in self.particles: + particle.evolve_one_step() return - if end_time is None: - end_time = self.model_time + min(self.particles.time_step) - self.evolve_stars(self.particles, end_time - self.model_time + self.particles.age) - self.model_time = end_time + delta_time = ( + end_time-self.model_time + if end_time + else 0.99*min(self.particles.time_step) + ) + print(f"{delta_time=}") + for particle in self.particles: + particle.evolve_for(particle.age + delta_time) + self.model_time += delta_time class MetisseParticles(Particles): From c0042f6eed341881b0469a71158bb7fa01c6ddb2 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 4 Mar 2025 14:18:43 +0100 Subject: [PATCH 16/40] Prevent hanging when t_end > t_nuc --- src/amuse/community/metisse/interface.f90 | 46 ++++++++++++++++------- 1 file changed, 33 insertions(+), 13 deletions(-) diff --git a/src/amuse/community/metisse/interface.f90 b/src/amuse/community/metisse/interface.f90 index 6ffaa3a536..2856b3874c 100644 --- a/src/amuse/community/metisse/interface.f90 +++ b/src/amuse/community/metisse/interface.f90 @@ -61,15 +61,12 @@ function commit_parameters() real(c_double) :: zpars(20) commit_parameters=0 - write(*,*) "committing parameters" ! This will read the tracks - so need to have set the paths before call METISSE_zcnsts(initial_Z,zpars,'','', error) if (error/=0) return call assign_commons_main() - - write(*,*) "Number of tracks: ", number_of_tracks end function function commit_particles() @@ -375,21 +372,27 @@ function evolve_for(index_of_the_star, delta_t) implicit none integer :: index_of_the_star integer :: error - real(c_double) :: delta_t, time_step + real(c_double) :: delta_t, time_step, age, previous_time_step integer :: evolve_for evolve_for = 0 - write(*,*) 'evolving star ', index_of_the_star, ' for ', delta_t - do while (delta_t > 0) + !write(*,*) 'evolving star ', index_of_the_star, ' for ', delta_t + call star_system%get_time_step(index_of_the_star, previous_time_step, error) + ! When the previous time step is 0, the star can not be evolved forward anymore, so we stop. + ! Possibly, the tracks don't advance further than the current age of the star. + do while (delta_t > 0.0_c_double .and. previous_time_step > 0.0_c_double) call star_system%get_time_step(index_of_the_star, time_step, error) if (delta_t < time_step) then - write(*,*) 'setting time step of star ', index_of_the_star, ' to ', delta_t + !write(*,*) 'setting time step of star ', index_of_the_star, ' to ', delta_t call star_system%set_time_step(index_of_the_star, delta_t, error) time_step = delta_t end if evolve_for = evolve_one_step(index_of_the_star) if (evolve_for /= 0) return delta_t = delta_t - time_step + call star_system%get_age(index_of_the_star, age, error) + !write(*,*) "age, step: ", age, time_step + previous_time_step = time_step end do end function @@ -398,21 +401,39 @@ function evolve_one_step(index_of_the_star) integer :: index_of_the_star integer :: evolve_one_step integer :: error - real(c_double) :: time_step + real(c_double) :: time_step, nuclear_time_scale real(c_double) :: mass real(c_double) :: age type(track), pointer :: t + evolve_one_step = 0 call star_system%get_time_step(index_of_the_star, time_step, error) + if (time_step <= 0.0_c_double) then + evolve_one_step = 1 ! 1: cannot evolve forward anymore + return + end if call star_system%get_initial_mass(index_of_the_star, mass, error) call star_system%get_age(index_of_the_star, age, error) - call allocate_track(1, mass) - call evolv_metisse(mass, age + time_step, error, 1) + call allocate_track(1, mass) ! allocates tarr. mass is ignored... t => tarr(1) + nuclear_time_scale = t % nuc_time + if (age + time_step > nuclear_time_scale) then + time_step = nuclear_time_scale - age + call star_system%set_time_step(index_of_the_star, time_step, error) + write(*,*) "reaching end of the nuclear time scale, setting time step to: ", time_step + evolve_one_step = 1 + end if + call evolv_metisse(mass, age + time_step, error, 1) if (error == 0) then call star_system%set_mass(index_of_the_star, t % pars % mass, error) - call star_system%set_age(index_of_the_star, t % pars % age, error) - call star_system%set_time_step(index_of_the_star, t % pars % dt, error) + call star_system%set_age(& + index_of_the_star,& + t % pars % age,& ! METISSE uses Myr internally, we store years + error) + call star_system%set_time_step(& + index_of_the_star,& + t % pars % dt,& + error) call star_system%set_luminosity(index_of_the_star, t % pars % luminosity, error) call star_system%set_temperature(index_of_the_star, t % pars % Teff, error) call star_system%set_radius(index_of_the_star, t % pars % radius, error) @@ -424,7 +445,6 @@ function evolve_one_step(index_of_the_star) call star_system%set_spin(index_of_the_star, t % pars % bhspin, error) end if call dealloc_track() - evolve_one_step = 0 end function function evolve_model(t_end) From 8d7535841ed85b28323da2e0998c883d56cc1bf4 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 4 Mar 2025 14:18:56 +0100 Subject: [PATCH 17/40] remove python particleset --- src/amuse/community/metisse/interface.py | 96 +----------------------- 1 file changed, 3 insertions(+), 93 deletions(-) diff --git a/src/amuse/community/metisse/interface.py b/src/amuse/community/metisse/interface.py index 74cbd4136b..677f8dc52c 100644 --- a/src/amuse/community/metisse/interface.py +++ b/src/amuse/community/metisse/interface.py @@ -73,10 +73,6 @@ def get_CO_core_mass(index_of_the_star="i"): def get_main_sequence_lifetime(index_of_the_star="i"): returns (main_sequence_lifetime="d" | units.Myr) - @remote_function(can_handle_array=True) - def evolve_for(index_of_the_star="i", time_delta="d" | units.Myr): - returns () - # getters and setters for tracks # metallicity_dir (string) # metallicity_dir_he (string) @@ -431,7 +427,7 @@ def define_particle_sets(self, handler): handler.add_method("particles", "evolve_for") def evolve_model(self, end_time=None, keep_synchronous=True): - print("evolve_model", end_time, keep_synchronous) + print("evolve_model", end_time, keep_synchronous) if not keep_synchronous: for particle in self.particles: particle.evolve_one_step() @@ -443,93 +439,7 @@ def evolve_model(self, end_time=None, keep_synchronous=True): else 0.99*min(self.particles.time_step) ) print(f"{delta_time=}") - for particle in self.particles: + for i, particle in enumerate(self.particles): + print(f"{i} {particle.age} {particle.mass}") particle.evolve_for(particle.age + delta_time) self.model_time += delta_time - - -class MetisseParticles(Particles): - - def __init__(self, code_interface, storage=None): - Particles.__init__(self, storage=storage) - self._private.code_interface = code_interface - self.add_calculated_attribute( - "temperature", - self.calculate_effective_temperature, - ["luminosity", "radius"], - ) - self.add_function_attribute( - "evolve_one_step", self.particleset_evolve_one_step, self.evolve_one_step - ) - self.add_function_attribute( - "evolve_for", self.particleset_evolve_for, self.evolve_for - ) - - def calculate_effective_temperature(self, luminosity, radius): - return ( - (luminosity / (constants.four_pi_stefan_boltzmann * radius**2)) ** 0.25 - ).in_(units.K) - - def add_particles_to_store(self, keys, attributes=[], values=[]): - if len(keys) == 0: - return - - all_attributes = [] - all_attributes.extend(attributes) - all_values = [] - all_values.extend(values) - - mapping_from_attribute_to_default_value = { - "stellar_type": 1 | units.stellar_type, - "radius": 0 | units.RSun, - "luminosity": 0 | units.LSun, - "core_mass": 0 | units.MSun, - "CO_core_mass": 0 | units.MSun, - "core_radius": 0 | units.RSun, - "convective_envelope_mass": 0 | units.MSun, - "convective_envelope_radius": 0 | units.RSun, - "epoch": 0 | units.Myr, - "spin": 0 | units.yr**-1, - "main_sequence_lifetime": 0 | units.Myr, - "age": 0 | units.Myr, - } - - given_attributes = set(attributes) - - if "initial_mass" not in given_attributes: - index_of_mass_attibute = attributes.index("mass") - all_attributes.append("initial_mass") - all_values.append(values[index_of_mass_attibute] * 1.0) - - for attribute, default_value in mapping_from_attribute_to_default_value.items(): - if attribute not in given_attributes: - all_attributes.append(attribute) - all_values.append(default_value.as_vector_with_length(len(keys))) - - super().add_particles_to_store(keys, all_attributes, all_values) - - added_particles = ParticlesSubset(self, keys) - self._private.code_interface._evolve_particles(added_particles, 0 | units.yr) - - def evolve_one_step(self, particles, subset): - self._private.code_interface._evolve_particles( - subset.as_set(), subset.age + subset.time_step - ) - - def particleset_evolve_one_step(self, particles): - self._private.code_interface._evolve_particles( - particles, particles.age + particles.time_step - ) - - def evolve_for(self, particles, subset, delta_time): - self._private.code_interface._evolve_particles( - subset.as_set(), subset.age + delta_time - ) - - def particleset_evolve_for(self, particles, delta_time): - self._private.code_interface._evolve_particles( - particles, particles.age + delta_time - ) - - def get_defined_attribute_names(self): - return ["mass", "radius"] From 279e79d0b15c93653662af4c3841ae880cd29d05 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 4 Mar 2025 14:19:40 +0100 Subject: [PATCH 18/40] add units used, start with a small timestep --- src/amuse/community/metisse/storage.f90 | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/amuse/community/metisse/storage.f90 b/src/amuse/community/metisse/storage.f90 index ff5f3e6d2a..6ec235a081 100644 --- a/src/amuse/community/metisse/storage.f90 +++ b/src/amuse/community/metisse/storage.f90 @@ -1,7 +1,11 @@ ! Storage module for stars in a stellar evolution code. ! Only used for storing and retrieving, has no checks/calculations. -! -! +! Units used: +! - time: julian years +! - mass: solar masses +! - radius: solar radii +! - luminosity: solar luminosities +! - temperature: Kelvin ! ! - Steven Rieder @@ -124,7 +128,7 @@ function new_star(self, initial_mass) result(new_id) self%star_array(i)%radius = 0.0_c_double self%star_array(i)%spin = 0.0_c_double self%star_array(i)%stellar_type = 0_c_int - self%star_array(i)%time_step = 0.0_c_double + self%star_array(i)%time_step = 1.0_c_double self%star_array(i)%temperature = 0.0_c_double self%next_star_id = new_id + 1 From a57a35f82212db54859305b0229775c86f2838b7 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 8 Apr 2025 15:40:15 +0200 Subject: [PATCH 19/40] fix allocation bug --- src/amuse/community/metisse/storage.f90 | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/amuse/community/metisse/storage.f90 b/src/amuse/community/metisse/storage.f90 index 6ec235a081..22b3fc8a71 100644 --- a/src/amuse/community/metisse/storage.f90 +++ b/src/amuse/community/metisse/storage.f90 @@ -165,8 +165,12 @@ subroutine resize(self, required_size) if (allocated(self%star_array)) deallocate(self%star_array) return end if - - current_size = size(self%star_array) + + if (allocated(self%star_array)) then + current_size = size(self%star_array) + else + current_size = 0 + end if if (required_size .lt. current_size) return new_capacity = current_size @@ -174,7 +178,7 @@ subroutine resize(self, required_size) new_capacity = max(100, int(new_capacity * 1.1)) end do - if (.not. allocated(self%star_array) .or. new_capacity > size(self%star_array)) then + if (.not. allocated(self%star_array) .or. new_capacity > current_size) then if (allocated(self%star_array)) then allocate(temp(current_size)) temp = self%star_array From 3fab0cc81fb2f33dfad8fef4204818d60a3402a7 Mon Sep 17 00:00:00 2001 From: Lourens Veen Date: Tue, 3 Jun 2025 10:26:37 +0200 Subject: [PATCH 20/40] Fix Ubuntu MPI hang --- .github/workflows/test-framework.yml | 59 ++++++++++++++-------------- support/setup/testing.sh | 1 - 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/.github/workflows/test-framework.yml b/.github/workflows/test-framework.yml index acb03c2217..dbf8b0be1c 100644 --- a/.github/workflows/test-framework.yml +++ b/.github/workflows/test-framework.yml @@ -2,11 +2,9 @@ name: Test AMUSE framework on: push: - branches: - - main pull_request: branches: - - main + - main workflow_dispatch: jobs: @@ -39,47 +37,50 @@ jobs: - name: Install dependencies run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake openmpi gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - # - name: Configure OpenMPI - # run: | - # mkdir -p "$HOME/.openmpi" - # echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - # echo "mpi_yield_when_idle = true" >>"$HOME/.openmpi/mca-params.conf" - # echo "btl_tcp_if_include = lo,eth0" >>"$HOME/.openmpi/mca-params.conf" - # mkdir -p "$HOME/.prte" - # echo "rmaps_default_mapping_policy = :oversubscribe" >>"$HOME/.prte/mca-params.conf" - # echo "prte_if_include = lo,eth0" >>"$HOME/.prte/mca-params.conf" + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + # These help with debugging, but produce lots of output + # echo "mpi_yield_when_idle = true" >>"$HOME/.openmpi/mca-params.conf" + # echo "mca_base_verbose = stdout,level:9" >> "$HOME/.openmpi/mca-params.conf" + # echo "mpi_comm_verbose = 100" >> "$HOME/.openmpi/mca-params.conf" + # echo "btl_base_verbose = 100" >> "$HOME/.openmpi/mca-params.conf" + # echo "hwloc_base_verbose = 100" >> "$HOME/.openmpi/mca-params.conf" + # echo "if_base_verbose = 100" >> "$HOME/.openmpi/mca-params.conf" + # echo "pmix_base_verbose = 100" >> "$HOME/.openmpi/mca-params.conf" + # + # These are for OpenMPI 5, which I can't get to work + # mkdir -p "$HOME/.prte" + # echo "rmaps_default_mapping_policy = :oversubscribe" >>"$HOME/.prte/mca-params.conf" + # echo "prte_if_include = lo,eth0" >>"$HOME/.prte/mca-params.conf" - name: Checkout uses: actions/checkout@v4 with: - fetch-depth: 100 + fetch-depth: 0 fetch-tags: true - name: Build framework run: | ./setup install amuse-framework - - name: Test framework - # env: - # OMPI_MCA_rmaps_base_oversubscribe: 1 - # PRTE_MCA_rmaps_base_oversubscribe: 1 - # PRTE_MCA_rmaps_default_mapping_policy: ":oversubscribe" - # OMPI_MCA_mpi_yield_when_idle: 1 - # OMPI_MCA_btl_tcp_if_include: lo,eth0 - # PRTE_MCA_btl_tcp_if_include: lo,eth0 - # PRTE_MCA_if_include: lo,eth0 - # OMPI_MCA_pmix_server_max_wait: 10 + - name: Ensure we test only the installed package run: | - ./setup test amuse-framework + ./setup distclean - - name: Save build logs + - name: Test framework run: | - tar czf logs-${{ matrix.os }}.tar.gz support/logs + ./setup test amuse-framework - name: Archive build logs + if: always() uses: actions/upload-artifact@v4 with: - name: logs-${{ matrix.os }}.tar.gz - path: logs-${{ matrix.os }}.tar.gz + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/support/setup/testing.sh b/support/setup/testing.sh index c948145ea7..a04b6165f3 100644 --- a/support/setup/testing.sh +++ b/support/setup/testing.sh @@ -50,7 +50,6 @@ test_framework() { cd src/tests && pytest --import-mode=append core_tests compile_tests --ignore compile_tests/java_implementation -k 'not TestCDistributedImplementationInterface and not TestAsyncDistributed' ${PYTEST_OPTS} else cd src/tests && pytest --import-mode=append core_tests compile_tests --ignore compile_tests/java_implementation -k 'not TestCDistributedImplementationInterface and not TestAsyncDistributed and not noci' ${PYTEST_OPTS} - fi echo $? >"../../${ec_file}" From d3f86bfb8c1051430449af37151ce1c862a799bc Mon Sep 17 00:00:00 2001 From: Lourens Veen Date: Tue, 10 Jun 2025 17:20:20 +0200 Subject: [PATCH 21/40] Add ext tests --- .github/workflows/test-ext.yml | 92 +++++++++++++++++++ src/amuse/ext/concurrent.py | 92 +++++++++---------- .../ext_tests/test_distributed_particles.py | 10 +- support/setup/testing.sh | 2 +- 4 files changed, 144 insertions(+), 52 deletions(-) create mode 100644 .github/workflows/test-ext.yml diff --git a/.github/workflows/test-ext.yml b/.github/workflows/test-ext.yml new file mode 100644 index 0000000000..8c022dcda3 --- /dev/null +++ b/.github/workflows/test-ext.yml @@ -0,0 +1,92 @@ +name: Test AMUSE ic and ext + +on: + push: + paths: + - .github/workflows/test-ext.yml + - src/amuse/couple + - src/amuse/ext + - src/amuse/ic + - src/amuse/plot + pull_request: + branches: + - main + paths: + - .github/workflows/test-ext.yml + - src/amuse/couple + - src/amuse/ext + - src/amuse/ic + - src/amuse/plot + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' scipy pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_default_mapping_policy = :oversubscribe" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build framework + run: | + ./setup install amuse-framework + + - name: Install required codes + run: | + ./setup install amuse-bhtree amuse-bse amuse-evtwin amuse-fi amuse-fractalcluster amuse-gadget2 amuse-galactics amuse-halogen amuse-hermite amuse-hop amuse-kepler amuse-mesa-r2208 amuse-ph4 amuse-phigrape amuse-seba amuse-sse + + - name: Ensure we test only the installed packages + run: | + ./setup distclean + + - name: Test ext + env: + PYTEST_OPTS: "-s -v -x" + run: | + ./setup test amuse-ext + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/src/amuse/ext/concurrent.py b/src/amuse/ext/concurrent.py index db003ddcf8..95017e5346 100644 --- a/src/amuse/ext/concurrent.py +++ b/src/amuse/ext/concurrent.py @@ -12,46 +12,46 @@ from mpi4py import MPI except ImportError: MPI = None - + class ConcurrentProcesses(object): pass - + class MPIConcurrentProcesses(object): - + ROOT = 0 @classmethod def is_available(self): return not MPI is None - + def init(self): MpiChannel.ensure_mpi_initialized() self.mpi_comm = MPI.COMM_WORLD self.shared_particles_ids = set([]) - + def share(self, particles = None): sendbuffer = numpy.zeros(1, dtype='int64') if self.mpi_comm == self.ROOT: new_id = random.getrandbits(64) sendbuffer[0] = new_id - + self.mpi_comm.Bcast([sendbuffer, MPI.INTEGER8], root=self.ROOT) shared_id = sendbuffer[0] - + self.shared_particles_ids.add(shared_id) - + return MPISharedParticlesProxy(particles, shared_id, self) - + def is_on_root(self): return self.mpi_comm.rank == self.ROOT - + def on_root(self, callable): if self.mpi_comm.rank == self.ROOT: callable() - + def not_on_root(self, callable): if self.mpi_comm.rank != self.ROOT: callable() - + def call(self, on_root, not_on_root): if self.mpi_comm.rank == self.ROOT: on_root() @@ -63,51 +63,51 @@ def __init__(self, particles, shared_id, concurrent_processes): self.shared_id = shared_id self.particles = particles self.concurrent_processes = concurrent_processes - + def __getattr__(self, name): return self.particles.__getattr__(name) - + def distribute(self): self.concurrent_processes.call( self.distribute_on_root, self.distribute_not_on_root ) - + def distribute_on_root(self): attribute_names = self.particles.get_attribute_names_defined_in_store() - + values = self.particles.get_values_in_store( - self.particles.get_all_indices_in_store(), + self.particles.get_all_indices_in_store(), attribute_names ) units = [x.unit for x in values] units_dump = pickle.dumps(units) attributes_dump = pickle.dumps(attribute_names) - - units_dump = numpy.fromstring(units_dump,dtype='uint8') - attributes_dump = numpy.fromstring(attributes_dump,dtype='uint8') - + + units_dump = numpy.frombuffer(units_dump, dtype='uint8') + attributes_dump = numpy.frombuffer(attributes_dump, dtype='uint8') + sendbuffer = numpy.zeros(4, dtype='int64') sendbuffer[0] = self.shared_id sendbuffer[1] = len(self.particles) sendbuffer[2] = len(units_dump) sendbuffer[3] = len(attributes_dump) - + self.concurrent_processes.mpi_comm.Bcast([sendbuffer, MPI.INTEGER8], root=self.concurrent_processes.ROOT) - + sendbuffer = self.particles.key self.concurrent_processes.mpi_comm.Bcast([sendbuffer, MPI.INTEGER8], root=self.concurrent_processes.ROOT) - + attribute_names = self.particles.get_attribute_names_defined_in_store() - + self.concurrent_processes.mpi_comm.Bcast([units_dump, MPI.CHARACTER], root=self.concurrent_processes.ROOT) self.concurrent_processes.mpi_comm.Bcast([attributes_dump, MPI.CHARACTER], root=self.concurrent_processes.ROOT) - + for x, unit in zip(values, units): value = x.value_in(unit) self.concurrent_processes.mpi_comm.Bcast([value, MPI.DOUBLE], root=self.concurrent_processes.ROOT) - - + + def distribute_not_on_root(self): sendbuffer = numpy.zeros(4, dtype='int64') self.concurrent_processes.mpi_comm.Bcast([sendbuffer, MPI.INTEGER8], root=self.concurrent_processes.ROOT) @@ -115,16 +115,16 @@ def distribute_not_on_root(self): number_of_particles = sendbuffer[1] units_dump_len = sendbuffer[2] attributes_dump_len = sendbuffer[3] - + sendbuffer = numpy.zeros(number_of_particles, dtype='int64') self.concurrent_processes.mpi_comm.Bcast([sendbuffer, MPI.INTEGER8], root=self.concurrent_processes.ROOT) - + units_dump = numpy.zeros(units_dump_len, dtype='uint8') self.concurrent_processes.mpi_comm.Bcast([units_dump, MPI.CHARACTER], root=self.concurrent_processes.ROOT) - + attributes_dump = numpy.zeros(attributes_dump_len, dtype='uint8') self.concurrent_processes.mpi_comm.Bcast([attributes_dump, MPI.CHARACTER], root=self.concurrent_processes.ROOT) - + units = pickle.loads(units_dump.tobytes()) attributes = pickle.loads(attributes_dump.tobytes()) values = [] @@ -132,26 +132,26 @@ def distribute_not_on_root(self): value = numpy.zeros(number_of_particles, dtype='float64') self.concurrent_processes.mpi_comm.Bcast([value, MPI.DOUBLE], root=self.concurrent_processes.ROOT) values.append(x.new_quantity(value)) - + self.particles = Particles(keys = sendbuffer) self.particles.set_values_in_store(self.particles.get_all_indices_in_store(), attributes, values) - + def potential_energy(self, smoothing_length_squared = zero, G = constants.G): mpi_comm = self.concurrent_processes.mpi_comm - + mass = self.mass x_vector = self.x y_vector = self.y z_vector = self.z sum_of_energies = zero - + number_of_particles = len(self) block_size = (number_of_particles - 1) // mpi_comm.size start = mpi_comm.rank * block_size if mpi_comm.rank == (mpi_comm.size - 1): block_size = (number_of_particles - 1) - start - + for i in range(start, start + block_size): x = x_vector[i] y = y_vector[i] @@ -165,24 +165,24 @@ def potential_energy(self, smoothing_length_squared = zero, G = constants.G): energy_of_this_particle = (m_m / dr).sum() sum_of_energies -= energy_of_this_particle - + value = sum_of_energies.value_in(sum_of_energies.unit) - # for not assume unit is the same accross processes, + # for not assume unit is the same accross processes, # so units are not distributed! - + input = numpy.zeros(1, dtype='float64') output = numpy.zeros(1, dtype='float64') - + input[0] = value - + mpi_comm.Reduce( - [input, MPI.DOUBLE], + [input, MPI.DOUBLE], [output, MPI.DOUBLE], - op=MPI.SUM, + op=MPI.SUM, root=0 ) - + return G * sum_of_energies.unit.new_quantity(output[0]) - + def __len__(self): return len(self.particles) diff --git a/src/tests/ext_tests/test_distributed_particles.py b/src/tests/ext_tests/test_distributed_particles.py index 4cdf976dc5..bc047b7172 100644 --- a/src/tests/ext_tests/test_distributed_particles.py +++ b/src/tests/ext_tests/test_distributed_particles.py @@ -8,7 +8,7 @@ from amuse.units import nbody_system from amuse.units import units -from amuse.community import * +from amuse.units.quantities import is_quantity from amuse.support.interface import InCodeComponentImplementation from amuse.io import read_set_from_file, write_set_to_file @@ -630,7 +630,7 @@ def test9(self): self.assertEqual(x[2:6].mass, [3, 10, 11, 12]) - def test10(self): + def test10_noci(self): x = DistributedParticles( size=40, number_of_workers=4 @@ -760,7 +760,7 @@ def test16(self): self.assertEqual(y.index, x.index) self.assertEqual(y.mass, x.mass) - def test17(self): + def test17_noci(self): test_results_path = self.get_path_to_results() filebase = os.path.join(test_results_path, "test_distributed_sets") for i in [0, 1]: @@ -789,7 +789,7 @@ def test17(self): # number of workers > number of files # still problematic, because of non-existing attributes if nothing read - def test18(self): + def test18_noci(self): test_results_path = self.get_path_to_results() filebase = os.path.join(test_results_path, "test_distributed_sets") for i in [0, 1]: @@ -816,7 +816,7 @@ def test18(self): self.assertEqual(x.index, z.index) self.assertEqual(x.mass, z.mass) - def test19(self): + def test19_noci(self): from .test_distributed_particles import distributed_king_generator from amuse.ic.kingmodel import MakeKingModel diff --git a/support/setup/testing.sh b/support/setup/testing.sh index a04b6165f3..892fcaba8e 100644 --- a/support/setup/testing.sh +++ b/support/setup/testing.sh @@ -80,7 +80,7 @@ test_amuse_ext() { log_file="$(log_file test amuse-ext)" ( - cd src/tests && pytest ext_tests --import-mode=append ticket_tests ${PYTEST_OPTS} -k "${bad_ext_tests}" + cd src/tests && pytest ext_tests ticket_tests --import-mode=append ${PYTEST_OPTS} -s -v -x -k "not noci and ${bad_ext_tests}" echo $? >"../../${ec_file}" ) 2>&1 | tee "${log_file}" From a438d1876a0d5ebbb59906134b3471f11f6b8f80 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 19 Jun 2025 14:10:21 +0200 Subject: [PATCH 22/40] move files to new structure --- src/amuse/community/metisse/interface.f90 | 643 ---------------- .../metisse => amuse_metisse}/Makefile | 6 +- src/amuse_metisse/interface.f90 | 686 ++++++++++++++++++ .../metisse => amuse_metisse}/interface.py | 25 +- .../metisse => amuse_metisse}/storage.f90 | 507 ++++++------- 5 files changed, 958 insertions(+), 909 deletions(-) delete mode 100644 src/amuse/community/metisse/interface.f90 rename src/{amuse/community/metisse => amuse_metisse}/Makefile (94%) create mode 100644 src/amuse_metisse/interface.f90 rename src/{amuse/community/metisse => amuse_metisse}/interface.py (94%) rename src/{amuse/community/metisse => amuse_metisse}/storage.f90 (58%) diff --git a/src/amuse/community/metisse/interface.f90 b/src/amuse/community/metisse/interface.f90 deleted file mode 100644 index 2856b3874c..0000000000 --- a/src/amuse/community/metisse/interface.f90 +++ /dev/null @@ -1,643 +0,0 @@ -module metisseInterface - use iso_c_binding - use store_stars, only: stars - use track_support - use z_support - implicit none - type(stars) :: star_system - real(c_double), allocatable :: mass_array(:) - - contains - - ! standard AMUSE interface functions: - ! initialize - ! commit_parameters - ! commit_particles - ! cleanup_code - - subroutine assign_commons_amuse() - - end subroutine - - function initialize_code() - implicit none - integer :: initialize_code - - ! Need to define this front end for METISSE - call initialize_front_end("AMUSE") - initial_Z = -1.0_c_double - write_output_to_file = .false. - - initialize_code=0 - end function - - function recommit_parameters() - implicit none - integer :: recommit_parameters - recommit_parameters=0 - end function - - function recommit_particles() - implicit none - integer :: number_of_particles - integer :: error - integer :: recommit_particles - deallocate(mass_array) - error = get_number_of_particles(number_of_particles) - allocate(mass_array(number_of_particles)) - recommit_particles=0 - end function - - function cleanup_code() - implicit none - integer :: cleanup_code - cleanup_code=0 - end function - - function commit_parameters() - implicit none - integer :: commit_parameters - integer :: error - real(c_double) :: zpars(20) - - commit_parameters=0 - - ! This will read the tracks - so need to have set the paths before - call METISSE_zcnsts(initial_Z,zpars,'','', error) - if (error/=0) return - - call assign_commons_main() - end function - - function commit_particles() - implicit none - integer :: commit_particles - integer :: number_of_particles - integer :: i - integer :: error - real(c_double) :: mass - - error = get_number_of_particles(number_of_particles) - - allocate(mass_array(number_of_particles)) - - commit_particles=0 - end function - - ! setters / getters for tracks - ! metallicity_dir (string) - ! metallicity_dir_he (string) - ! z_accuracy_limit (real) - ! mass_accuracy_limit (real) - - function set_metallicity_dir(metallicity_dir_in) - implicit none - character(len=256) :: metallicity_dir_in - integer :: set_metallicity_dir - METALLICITY_DIR = metallicity_dir_in - set_metallicity_dir = 0 - end function - - function get_metallicity_dir(metallicity_dir_out) - implicit none - character(len=256) :: metallicity_dir_out - integer :: get_metallicity_dir - metallicity_dir_out = METALLICITY_DIR - get_metallicity_dir = 0 - end function - - function set_metallicity_dir_he(metallicity_dir_he_in) - implicit none - character(len=256) :: metallicity_dir_he_in - integer :: set_metallicity_dir_he - METALLICITY_DIR_HE = metallicity_dir_he_in - set_metallicity_dir_he = 0 - end function - - function get_metallicity_dir_he(metallicity_dir_he_out) - implicit none - character(len=256) :: metallicity_dir_he_out - integer :: get_metallicity_dir_he - metallicity_dir_he_out = METALLICITY_DIR_HE - get_metallicity_dir_he = 0 - end function - - function set_z_accuracy_limit(z_accuracy_limit_in) - implicit none - real(c_double) :: z_accuracy_limit_in - integer :: set_z_accuracy_limit - z_accuracy_limit = z_accuracy_limit_in - set_z_accuracy_limit = 0 - end function - - function get_z_accuracy_limit(z_accuracy_limit_out) - implicit none - real(c_double) :: z_accuracy_limit_out - integer :: get_z_accuracy_limit - z_accuracy_limit_out = z_accuracy_limit - get_z_accuracy_limit = 0 - end function - - function set_mass_accuracy_limit(mass_accuracy_limit_in) - implicit none - real(c_double) :: mass_accuracy_limit_in - integer :: set_mass_accuracy_limit - mass_accuracy_limit = mass_accuracy_limit_in - set_mass_accuracy_limit = 0 - end function - - function get_mass_accuracy_limit(mass_accuracy_limit_out) - implicit none - real(c_double) :: mass_accuracy_limit_out - integer :: get_mass_accuracy_limit - mass_accuracy_limit_out = mass_accuracy_limit - get_mass_accuracy_limit = 0 - end function - - ! setters / getters for misc controls - ! verbose (bool) - ! construct_postagb_track (bool) - - function set_verbose(verbose_in) - implicit none - logical :: verbose_in - integer :: set_verbose - verbose = verbose_in - set_verbose = 0 - end function - - function get_verbose(verbose_out) - implicit none - logical :: verbose_out - integer :: get_verbose - verbose_out = verbose - get_verbose = 0 - end function - - function set_construct_postagb_track(construct_postagb_track_in) - implicit none - logical :: construct_postagb_track_in - integer :: set_construct_postagb_track - construct_postagb_track = construct_postagb_track_in - set_construct_postagb_track = 0 - end function - - function get_construct_postagb_track(construct_postagb_track_out) - implicit none - logical :: construct_postagb_track_out - integer :: get_construct_postagb_track - construct_postagb_track_out = construct_postagb_track - get_construct_postagb_track = 0 - end function - - ! setters/getters for parameters - ! initial_metallicity(real) - ! wd_mass_scheme (string, 256) - ! use_initial_final_mass_relation(bool) - ! bhns_mass_scheme (string, 256) - ! max_ns_mass (real) - ! allow_electron_capture (bool) - - function set_initial_metallicity(initial_metallicity_in) - implicit none - real(c_double) :: initial_metallicity_in - integer :: set_initial_metallicity - initial_Z = initial_metallicity_in - set_initial_metallicity = 0 - end function - - function get_initial_metallicity(initial_metallicity_out) - implicit none - real(c_double) :: initial_metallicity_out - integer :: get_initial_metallicity - initial_metallicity_out = initial_Z - get_initial_metallicity = 0 - end function - - function set_wd_mass_scheme(wd_mass_scheme_in) - implicit none - character(len=256) :: wd_mass_scheme_in - integer :: set_wd_mass_scheme - WD_mass_scheme = wd_mass_scheme_in - set_wd_mass_scheme = 0 - end function - - function get_wd_mass_scheme(wd_mass_scheme_out) - implicit none - character(len=256) :: wd_mass_scheme_out - integer :: get_wd_mass_scheme - wd_mass_scheme_out = WD_mass_scheme - get_wd_mass_scheme = 0 - end function - - function set_use_initial_final_mass_relation(use_initial_final_mass_relation_in) - implicit none - logical :: use_initial_final_mass_relation_in - integer :: set_use_initial_final_mass_relation - use_initial_final_mass_relation = use_initial_final_mass_relation_in - set_use_initial_final_mass_relation = 0 - end function - - function get_use_initial_final_mass_relation(use_initial_final_mass_relation_out) - implicit none - logical :: use_initial_final_mass_relation_out - integer :: get_use_initial_final_mass_relation - use_initial_final_mass_relation_out = use_initial_final_mass_relation - get_use_initial_final_mass_relation = 0 - end function - - function set_bhns_mass_scheme(bhns_mass_scheme_in) - implicit none - character(len=256) :: bhns_mass_scheme_in - integer :: set_bhns_mass_scheme - BHNS_mass_scheme = bhns_mass_scheme_in - set_bhns_mass_scheme = 0 - end function - - function get_bhns_mass_scheme(bhns_mass_scheme_out) - implicit none - character(len=256) :: bhns_mass_scheme_out - integer :: get_bhns_mass_scheme - bhns_mass_scheme_out = BHNS_mass_scheme - get_bhns_mass_scheme = 0 - end function - - function set_max_ns_mass(max_ns_mass_in) - implicit none - real(c_double) :: max_ns_mass_in - integer :: set_max_ns_mass - max_NS_mass = max_ns_mass_in - set_max_ns_mass = 0 - end function - - function get_max_ns_mass(max_ns_mass_out) - implicit none - real(c_double) :: max_ns_mass_out - integer :: get_max_ns_mass - max_ns_mass_out = max_NS_mass - get_max_ns_mass = 0 - end function - - function set_allow_electron_capture(allow_electron_capture_in) - implicit none - logical :: allow_electron_capture_in - integer :: set_allow_electron_capture - allow_electron_capture = allow_electron_capture_in - set_allow_electron_capture = 0 - end function - - function get_allow_electron_capture(allow_electron_capture_out) - implicit none - logical :: allow_electron_capture_out - integer :: get_allow_electron_capture - allow_electron_capture_out = allow_electron_capture - get_allow_electron_capture = 0 - end function - - ! setters/getters for timestep control - ! pts_1 to pts_3 (real) - - function set_time_step_pts_1(pts_1_in) - implicit none - real(c_double) :: pts_1_in - integer :: set_time_step_pts_1 - pts_1 = pts_1_in - set_time_step_pts_1 = 0 - end function - - function get_time_step_pts_1(pts_1_out) - implicit none - real(c_double) :: pts_1_out - integer :: get_time_step_pts_1 - pts_1_out = pts_1 - get_time_step_pts_1 = 0 - end function - - function set_time_step_pts_2(pts_2_in) - implicit none - real(c_double) :: pts_2_in - integer :: set_time_step_pts_2 - pts_2 = pts_2_in - set_time_step_pts_2 = 0 - end function - - function get_time_step_pts_2(pts_2_out) - implicit none - real(c_double) :: pts_2_out - integer :: get_time_step_pts_2 - pts_2_out = pts_2 - get_time_step_pts_2 = 0 - end function - - function set_time_step_pts_3(pts_3_in) - implicit none - real(c_double) :: pts_3_in - integer :: set_time_step_pts_3 - pts_3 = pts_3_in - set_time_step_pts_3 = 0 - end function - - function get_time_step_pts_3(pts_3_out) - implicit none - real(c_double) :: pts_3_out - integer :: get_time_step_pts_3 - pts_3_out = pts_3 - get_time_step_pts_3 = 0 - end function - - ! particle management: - ! new_particle, delete_particle - - function new_particle(index_of_the_particle, mass) - implicit none - integer, intent(inout) :: index_of_the_particle - real(c_double), intent(inout) :: mass - integer :: new_particle - index_of_the_particle = star_system%new_star(mass) - end function - - function delete_star(index_of_the_star) - implicit none - integer :: index_of_the_star - integer :: delete_star - call star_system%remove_star(index_of_the_star) - delete_star = 0 - end function - - ! evolving stars: - ! evolve_for, evolve_one_step - ! evolve_model, evolve_stars - - function evolve_for(index_of_the_star, delta_t) - implicit none - integer :: index_of_the_star - integer :: error - real(c_double) :: delta_t, time_step, age, previous_time_step - integer :: evolve_for - evolve_for = 0 - - !write(*,*) 'evolving star ', index_of_the_star, ' for ', delta_t - call star_system%get_time_step(index_of_the_star, previous_time_step, error) - ! When the previous time step is 0, the star can not be evolved forward anymore, so we stop. - ! Possibly, the tracks don't advance further than the current age of the star. - do while (delta_t > 0.0_c_double .and. previous_time_step > 0.0_c_double) - call star_system%get_time_step(index_of_the_star, time_step, error) - if (delta_t < time_step) then - !write(*,*) 'setting time step of star ', index_of_the_star, ' to ', delta_t - call star_system%set_time_step(index_of_the_star, delta_t, error) - time_step = delta_t - end if - evolve_for = evolve_one_step(index_of_the_star) - if (evolve_for /= 0) return - delta_t = delta_t - time_step - call star_system%get_age(index_of_the_star, age, error) - !write(*,*) "age, step: ", age, time_step - previous_time_step = time_step - end do - end function - - function evolve_one_step(index_of_the_star) - implicit none - integer :: index_of_the_star - integer :: evolve_one_step - integer :: error - real(c_double) :: time_step, nuclear_time_scale - real(c_double) :: mass - real(c_double) :: age - type(track), pointer :: t - evolve_one_step = 0 - - call star_system%get_time_step(index_of_the_star, time_step, error) - if (time_step <= 0.0_c_double) then - evolve_one_step = 1 ! 1: cannot evolve forward anymore - return - end if - call star_system%get_initial_mass(index_of_the_star, mass, error) - call star_system%get_age(index_of_the_star, age, error) - call allocate_track(1, mass) ! allocates tarr. mass is ignored... - t => tarr(1) - nuclear_time_scale = t % nuc_time - if (age + time_step > nuclear_time_scale) then - time_step = nuclear_time_scale - age - call star_system%set_time_step(index_of_the_star, time_step, error) - write(*,*) "reaching end of the nuclear time scale, setting time step to: ", time_step - evolve_one_step = 1 - end if - call evolv_metisse(mass, age + time_step, error, 1) - if (error == 0) then - call star_system%set_mass(index_of_the_star, t % pars % mass, error) - call star_system%set_age(& - index_of_the_star,& - t % pars % age,& ! METISSE uses Myr internally, we store years - error) - call star_system%set_time_step(& - index_of_the_star,& - t % pars % dt,& - error) - call star_system%set_luminosity(index_of_the_star, t % pars % luminosity, error) - call star_system%set_temperature(index_of_the_star, t % pars % Teff, error) - call star_system%set_radius(index_of_the_star, t % pars % radius, error) - call star_system%set_epoch(index_of_the_star, t % pars % epoch, error) - call star_system%set_core_mass(index_of_the_star, t % pars % core_mass, error) - call star_system%set_core_radius(index_of_the_star, t % pars % core_radius, error) - call star_system%set_stellar_type(index_of_the_star, t % pars % phase, error) - call star_system%set_co_core_mass(index_of_the_star, t % pars % McCO, error) - call star_system%set_spin(index_of_the_star, t % pars % bhspin, error) - end if - call dealloc_track() - end function - - function evolve_model(t_end) - implicit none - real(c_double) :: t_end - integer :: evolve_model - integer :: number_of_particles - integer :: error - integer :: i - real(c_double) :: mass - - call star_system%get_number_of_stars(number_of_particles) - do i = 1, number_of_particles - call star_system%get_initial_mass(i, mass, error) - call allocate_track(1,mass) - call evolv_metisse(mass, t_end, error, 1) - if (error /= 0) then - call star_system%set_age(i, t_end, error) - call star_system%set_mass(i, mass, error) - end if - call dealloc_track() - end do - - evolve_model = 0 - end function - - function evolve_stars(indices_of_stars, delta_time, error, n) - implicit none - integer, intent(in) :: n - integer :: error - integer, intent(in), dimension(n) :: indices_of_stars - real(c_double) :: delta_time - integer :: evolve_stars - integer :: i - integer :: number_of_stars_to_evolve - - number_of_stars_to_evolve = size(indices_of_stars) - - do i = 1, number_of_stars_to_evolve - error = evolve_for(indices_of_stars(i), delta_time) - if (error /= 0) then - evolve_stars = error - return - end if - end do - - evolve_stars = 0 - end function - - function get_age(index_of_the_star, age) - implicit none - integer :: index_of_the_star - real(c_double) :: age - integer :: get_age - call star_system%get_age(index_of_the_star, age, get_age) - age = age * 1.0d+06 - end function - - function get_luminosity(index_of_the_star, luminosity) - implicit none - integer :: index_of_the_star - real(c_double) :: luminosity - integer :: get_luminosity - call star_system%get_luminosity(index_of_the_star, luminosity, get_luminosity) - end function - - function get_mass(index_of_the_star, mass) - implicit none - integer :: index_of_the_star - real(c_double) :: mass - integer :: get_mass - call star_system%get_mass(index_of_the_star, mass, get_mass) - end function - - function get_metallicity(metallicity) - implicit none - real(c_double) :: metallicity - integer :: get_metallicity - ! what to do here depends on whether metallicity can be set for individual stars or only globally - get_metallicity=0 - end function - - function get_epoch(index_of_the_star, epoch) - implicit none - integer :: index_of_the_star - real(c_double) :: epoch - integer :: get_epoch - call star_system%get_epoch(index_of_the_star, epoch, get_epoch) - end function - - function get_core_mass(index_of_the_star, core_mass) - implicit none - integer :: index_of_the_star - real(c_double) :: core_mass - integer :: get_core_mass - call star_system%get_core_mass(index_of_the_star, core_mass, get_core_mass) - end function - - function get_core_radius(index_of_the_star, core_radius) - implicit none - integer :: index_of_the_star - real(c_double) :: core_radius - integer :: get_core_radius - call star_system%get_core_radius(index_of_the_star, core_radius, get_core_radius) - end function - - function get_convective_envelope_mass(index_of_the_star, convective_envelope_mass) - implicit none - integer :: index_of_the_star - real(c_double) :: convective_envelope_mass - integer :: get_convective_envelope_mass - call star_system%get_convective_envelope_mass(index_of_the_star, convective_envelope_mass, get_convective_envelope_mass) - end function - - function get_convective_envelope_radius(index_of_the_star, convective_envelope_radius) - implicit none - integer :: index_of_the_star - real(c_double) :: convective_envelope_radius - integer :: get_convective_envelope_radius - call star_system%get_convective_envelope_radius(index_of_the_star, convective_envelope_radius, get_convective_envelope_radius) - end function - - function get_CO_core_mass(index_of_the_star, CO_core_mass) - implicit none - integer :: index_of_the_star - real(c_double) :: CO_core_mass - integer :: get_CO_core_mass - call star_system%get_CO_core_mass(index_of_the_star, CO_core_mass, get_CO_core_mass) - end function - - function get_main_sequence_lifetime(index_of_the_star, main_sequence_lifetime) - implicit none - integer :: index_of_the_star - real(c_double) :: main_sequence_lifetime - integer :: get_main_sequence_lifetime - call star_system%get_main_sequence_lifetime(index_of_the_star, main_sequence_lifetime, get_main_sequence_lifetime) - end function - - function get_number_of_particles(number_of_particles) - implicit none - integer :: number_of_particles - integer :: get_number_of_particles - call star_system%get_number_of_stars(number_of_particles) - get_number_of_particles = 0 - end function - - function get_radius(index_of_the_star, radius) - implicit none - integer :: index_of_the_star - real(c_double) :: radius - integer :: get_radius - call star_system%get_radius(index_of_the_star, radius, get_radius) - end function - - function get_stellar_type(index_of_the_star, stellar_type) - implicit none - integer :: index_of_the_star, stellar_type - integer :: get_stellar_type - call star_system%get_stellar_type(index_of_the_star, stellar_type, get_stellar_type) - end function - - function get_temperature(index_of_the_star, temperature) - implicit none - integer :: index_of_the_star - real(c_double) :: temperature - integer :: get_temperature - call star_system%get_temperature(index_of_the_star, temperature, get_temperature) - end function - - function get_time_step(index_of_the_star, time_step) - implicit none - integer :: index_of_the_star - real(c_double) :: time_step - integer :: get_time_step - call star_system%get_time_step(index_of_the_star, time_step, get_time_step) - time_step = time_step * 1.0d+06 - end function - - function get_initial_mass(index_of_the_star, mass) - implicit none - integer :: index_of_the_star - double precision :: mass - integer :: get_initial_mass - call star_system%get_initial_mass(index_of_the_star, mass, get_initial_mass) - end function - - function set_metallicity(metallicity) - implicit none - real(c_double) :: metallicity - integer :: set_metallicity - set_metallicity=0 - end function - - -end module - diff --git a/src/amuse/community/metisse/Makefile b/src/amuse_metisse/Makefile similarity index 94% rename from src/amuse/community/metisse/Makefile rename to src/amuse_metisse/Makefile index 6dddff4634..61ca4cffa6 100644 --- a/src/amuse/community/metisse/Makefile +++ b/src/amuse_metisse/Makefile @@ -29,11 +29,10 @@ src/metisse-$(VERSION): metisse.tar.gz # Building the code into a static library -##### Remove anything not needed ##### DEPFLAGS += $(STOPCOND_CFLAGS) $(STOPCONDMPI_CFLAGS) $(AMUSE_MPI_CFLAGS) DEPFLAGS += $(FORSOCKETS_CFLAGS) $(SIMPLE_HASH_CFLAGS) $(G6LIB_CFLAGS) DEPFLAGS += $(OPENMP_FFLAGS) -FCFLAGS += $(DEPFLAGS) +FCFLAGS += $(DEPFLAGS) -ffpe-summary=all -fbacktrace -fcheck=all -g -Wall LDLIBS += -lm $(STOPCOND_LIBS) $(STOPCONDMPI_LIBS) $(AMUSE_MPI_LIBS) LDLIBS += $(FORSOCKETS_LIBS) @@ -47,7 +46,6 @@ src/METISSE/$(CODELIB): | src/METISSE/make # Building the workers -##### Keep if Fortran worker ##### metisse_worker.f90: interface.py amusifier --type=f90 interface.py MetisseInterface -o $@ @@ -65,10 +63,8 @@ metisse_worker: interface.o metisse_worker.o storage.o src/METISSE/$(CODELIB) # Which packages contain which workers? -##### Add and/or remove as needed ##### amuse-metisse_contains: metisse_worker - # Building and installing packages develop-%: %_contains support/shared/uninstall.sh $* diff --git a/src/amuse_metisse/interface.f90 b/src/amuse_metisse/interface.f90 new file mode 100644 index 0000000000..8c4e177f2d --- /dev/null +++ b/src/amuse_metisse/interface.f90 @@ -0,0 +1,686 @@ +module metisseInterface + use iso_c_binding + use store_stars, only: stars + use track_support + use z_support + implicit none + type(stars):: star_system + real(c_double), allocatable:: mass_array(:) + +contains + + ! standard AMUSE interface functions: + ! initialize + ! commit_parameters + ! commit_particles + ! cleanup_code + + subroutine assign_commons_amuse() + + end subroutine + + function initialize_code() + implicit none + integer:: initialize_code + integer:: error + + ! Need to define this front end for METISSE + call initialize_front_end("AMUSE") + error = set_defaults() + + initialize_code = 0 + end function + + function set_defaults() + implicit none + integer:: set_defaults + write_output_to_file = .false. + pts_1 = 0.05_c_double + pts_2 = 0.01_c_double + pts_3 = 0.02_c_double + WD_mass_scheme = "Modified_mestel" + BHNS_mass_scheme = "Belczynski2008" + max_age = 1.2e4_c_double + initial_Z = 0.02_c_double + !verbose = .true. + + set_defaults = 0 + end function + + function recommit_parameters() + implicit none + integer:: recommit_parameters + recommit_parameters = 0 + end function + + function recommit_particles() + implicit none + integer:: number_of_particles + integer:: error + integer:: recommit_particles + deallocate (mass_array) + error = get_number_of_particles(number_of_particles) + allocate (mass_array(number_of_particles)) + recommit_particles = 0 + end function + + function cleanup_code() + implicit none + integer:: cleanup_code + cleanup_code = 0 + end function + + function commit_parameters() + implicit none + integer:: commit_parameters + integer:: error + real(c_double):: zpars(20) + + commit_parameters = 0 + + write(*,*) "Calling commit_parameters" + + ! This will read the tracks-so need to have set the paths before + call METISSE_zcnsts(initial_Z, zpars, '', '', error) + if (error /= 0) return + + call assign_commons_main() + end function + + function commit_particles() + implicit none + integer:: commit_particles + integer:: number_of_particles + integer:: i + integer:: error + real(c_double):: mass + + error = get_number_of_particles(number_of_particles) + + allocate (mass_array(number_of_particles)) + + commit_particles = 0 + end function + + ! setters/getters for tracks + ! metallicity_dir (string) + ! metallicity_dir_he (string) + ! z_accuracy_limit (real) + ! mass_accuracy_limit (real) + + function set_metallicity_dir(metallicity_dir_in) + implicit none + character(len = 256):: metallicity_dir_in + integer:: set_metallicity_dir + METALLICITY_DIR = metallicity_dir_in + set_metallicity_dir = 0 + end function + + function get_metallicity_dir(metallicity_dir_out) + implicit none + character(len = 256):: metallicity_dir_out + integer:: get_metallicity_dir + metallicity_dir_out = METALLICITY_DIR + get_metallicity_dir = 0 + end function + + function set_metallicity_dir_he(metallicity_dir_he_in) + implicit none + character(len = 256):: metallicity_dir_he_in + integer:: set_metallicity_dir_he + METALLICITY_DIR_HE = metallicity_dir_he_in + set_metallicity_dir_he = 0 + end function + + function get_metallicity_dir_he(metallicity_dir_he_out) + implicit none + character(len = 256):: metallicity_dir_he_out + integer:: get_metallicity_dir_he + metallicity_dir_he_out = METALLICITY_DIR_HE + get_metallicity_dir_he = 0 + end function + + function set_z_accuracy_limit(z_accuracy_limit_in) + implicit none + real(c_double):: z_accuracy_limit_in + integer:: set_z_accuracy_limit + z_accuracy_limit = z_accuracy_limit_in + set_z_accuracy_limit = 0 + end function + + function get_z_accuracy_limit(z_accuracy_limit_out) + implicit none + real(c_double):: z_accuracy_limit_out + integer:: get_z_accuracy_limit + z_accuracy_limit_out = z_accuracy_limit + get_z_accuracy_limit = 0 + end function + + function set_mass_accuracy_limit(mass_accuracy_limit_in) + implicit none + real(c_double):: mass_accuracy_limit_in + integer:: set_mass_accuracy_limit + mass_accuracy_limit = mass_accuracy_limit_in + set_mass_accuracy_limit = 0 + end function + + function get_mass_accuracy_limit(mass_accuracy_limit_out) + implicit none + real(c_double):: mass_accuracy_limit_out + integer:: get_mass_accuracy_limit + mass_accuracy_limit_out = mass_accuracy_limit + get_mass_accuracy_limit = 0 + end function + + ! setters/getters for misc controls + ! verbose (bool) + ! construct_postagb_track (bool) + + function set_verbose(verbose_in) + implicit none + logical:: verbose_in + integer:: set_verbose + verbose = verbose_in + set_verbose = 0 + end function + + function get_verbose(verbose_out) + implicit none + logical:: verbose_out + integer:: get_verbose + verbose_out = verbose + get_verbose = 0 + end function + + function set_construct_postagb_track(construct_postagb_track_in) + implicit none + logical:: construct_postagb_track_in + integer:: set_construct_postagb_track + construct_postagb_track = construct_postagb_track_in + set_construct_postagb_track = 0 + end function + + function get_construct_postagb_track(construct_postagb_track_out) + implicit none + logical:: construct_postagb_track_out + integer:: get_construct_postagb_track + construct_postagb_track_out = construct_postagb_track + get_construct_postagb_track = 0 + end function + + ! setters/getters for parameters + ! initial_metallicity(real) + ! wd_mass_scheme (string, 256) + ! use_initial_final_mass_relation(bool) + ! bhns_mass_scheme (string, 256) + ! max_ns_mass (real) + ! allow_electron_capture (bool) + + function set_initial_metallicity(initial_metallicity_in) + implicit none + real(c_double):: initial_metallicity_in + integer:: set_initial_metallicity + initial_Z = initial_metallicity_in + set_initial_metallicity = 0 + end function + + function get_initial_metallicity(initial_metallicity_out) + implicit none + real(c_double):: initial_metallicity_out + integer:: get_initial_metallicity + initial_metallicity_out = initial_Z + get_initial_metallicity = 0 + end function + + function set_wd_mass_scheme(wd_mass_scheme_in) + implicit none + character(len = 256):: wd_mass_scheme_in + integer:: set_wd_mass_scheme + WD_mass_scheme = wd_mass_scheme_in + set_wd_mass_scheme = 0 + end function + + function get_wd_mass_scheme(wd_mass_scheme_out) + implicit none + character(len = 256):: wd_mass_scheme_out + integer:: get_wd_mass_scheme + wd_mass_scheme_out = WD_mass_scheme + get_wd_mass_scheme = 0 + end function + + function set_use_initial_final_mass_relation(use_initial_final_mass_relation_in) + implicit none + logical:: use_initial_final_mass_relation_in + integer:: set_use_initial_final_mass_relation + use_initial_final_mass_relation = use_initial_final_mass_relation_in + set_use_initial_final_mass_relation = 0 + end function + + function get_use_initial_final_mass_relation(use_initial_final_mass_relation_out) + implicit none + logical:: use_initial_final_mass_relation_out + integer:: get_use_initial_final_mass_relation + use_initial_final_mass_relation_out = use_initial_final_mass_relation + get_use_initial_final_mass_relation = 0 + end function + + function set_bhns_mass_scheme(bhns_mass_scheme_in) + implicit none + character(len = 256):: bhns_mass_scheme_in + integer:: set_bhns_mass_scheme + BHNS_mass_scheme = bhns_mass_scheme_in + set_bhns_mass_scheme = 0 + end function + + function get_bhns_mass_scheme(bhns_mass_scheme_out) + implicit none + character(len = 256):: bhns_mass_scheme_out + integer:: get_bhns_mass_scheme + bhns_mass_scheme_out = BHNS_mass_scheme + get_bhns_mass_scheme = 0 + end function + + function set_max_ns_mass(max_ns_mass_in) + implicit none + real(c_double):: max_ns_mass_in + integer:: set_max_ns_mass + max_NS_mass = max_ns_mass_in + set_max_ns_mass = 0 + end function + + function get_max_ns_mass(max_ns_mass_out) + implicit none + real(c_double):: max_ns_mass_out + integer:: get_max_ns_mass + max_ns_mass_out = max_NS_mass + get_max_ns_mass = 0 + end function + + function set_allow_electron_capture(allow_electron_capture_in) + implicit none + logical:: allow_electron_capture_in + integer:: set_allow_electron_capture + allow_electron_capture = allow_electron_capture_in + set_allow_electron_capture = 0 + end function + + function get_allow_electron_capture(allow_electron_capture_out) + implicit none + logical:: allow_electron_capture_out + integer:: get_allow_electron_capture + allow_electron_capture_out = allow_electron_capture + get_allow_electron_capture = 0 + end function + + ! setters/getters for timestep control + ! pts_1 to pts_3 (real) + + function set_time_step_pts_1(pts_1_in) + implicit none + real(c_double):: pts_1_in + integer:: set_time_step_pts_1 + pts_1 = pts_1_in + set_time_step_pts_1 = 0 + end function + + function get_time_step_pts_1(pts_1_out) + implicit none + real(c_double):: pts_1_out + integer:: get_time_step_pts_1 + pts_1_out = pts_1 + get_time_step_pts_1 = 0 + end function + + function set_time_step_pts_2(pts_2_in) + implicit none + real(c_double):: pts_2_in + integer:: set_time_step_pts_2 + pts_2 = pts_2_in + set_time_step_pts_2 = 0 + end function + + function get_time_step_pts_2(pts_2_out) + implicit none + real(c_double):: pts_2_out + integer:: get_time_step_pts_2 + pts_2_out = pts_2 + get_time_step_pts_2 = 0 + end function + + function set_time_step_pts_3(pts_3_in) + implicit none + real(c_double):: pts_3_in + integer:: set_time_step_pts_3 + pts_3 = pts_3_in + set_time_step_pts_3 = 0 + end function + + function get_time_step_pts_3(pts_3_out) + implicit none + real(c_double):: pts_3_out + integer:: get_time_step_pts_3 + pts_3_out = pts_3 + get_time_step_pts_3 = 0 + end function + + ! particle management: + ! new_particle, delete_particle + + function new_particle(index_of_the_particle, mass) + implicit none + integer, intent(inout):: index_of_the_particle + real(c_double), intent(inout):: mass + integer:: new_particle + index_of_the_particle = star_system%new_star(mass) + end function + + function delete_star(index_of_the_star) + implicit none + integer:: index_of_the_star + integer:: delete_star + call star_system%remove_star(index_of_the_star) + delete_star = 0 + end function + + ! evolving stars: + ! evolve_for, evolve_one_step + ! evolve_model, evolve_stars + + function evolve_for(index_of_the_star, delta_t) + implicit none + integer:: index_of_the_star + integer:: error + real(c_double):: delta_t, time_step, age, previous_time_step, mass + integer:: evolve_for + type(track), pointer:: t + evolve_for = 0 + + write(*,*) 'evolving star ', index_of_the_star, ' for ', delta_t + call star_system%get_initial_mass(index_of_the_star, mass, error) + call star_system%get_age(index_of_the_star, age, error) + + call allocate_track(1, mass) + call evolv_metisse(mass, age+delta_t, error, 1) + call dealloc_track() + if (error /= 0) then + write(*,*) 'METISSE error: ', error + return + end if + call star_system%set_mass(index_of_the_star, t%pars%mass, error) + call star_system%set_age( & + index_of_the_star, & + t%pars%age, & ! METISSE uses Myr internally, we prefer to store years + error) + call star_system%set_time_step( & + index_of_the_star, & + t%pars%dt, & + error) + write(*,*) "Lum: ", t%pars%luminosity + call star_system%set_luminosity(index_of_the_star, t%pars%luminosity, error) + call star_system%set_temperature(index_of_the_star, t%pars%Teff, error) + call star_system%set_radius(index_of_the_star, t%pars%radius, error) + call star_system%set_epoch(index_of_the_star, t%pars%epoch, error) + call star_system%set_core_mass(index_of_the_star, t%pars%core_mass, error) + call star_system%set_core_radius(index_of_the_star, t%pars%core_radius, error) + call star_system%set_stellar_type(index_of_the_star, t%pars%phase, error) + call star_system%set_co_core_mass(index_of_the_star, t%pars%McCO, error) + call star_system%set_spin(index_of_the_star, t%pars%bhspin, error) + end function + + !function pevolve_for(index_of_the_star, delta_t) + ! implicit none + ! integer:: index_of_the_star + ! integer:: error + ! real(c_double):: delta_t, time_step, age, previous_time_step + ! integer:: evolve_for + ! evolve_for = 0 + ! max_age = 1.2e4_c_double + + ! write(*,*) 'evolving star ', index_of_the_star, ' for ', delta_t + ! call star_system%get_time_step(index_of_the_star, previous_time_step, error) + ! ! When the previous time step is 0, the star can not be evolved forward anymore, so we stop. + ! ! Possibly, the tracks don't advance further than the current age of the star. + ! do while (delta_t > 0.0_c_double .and. previous_time_step > 0.0_c_double) + ! call star_system%get_time_step(index_of_the_star, time_step, error) + ! call star_system%get_age(index_of_the_star, age, error) + ! if (delta_t < time_step) then + ! !write(*,*) 'setting time step of star ', index_of_the_star, ' to ', delta_t + ! call star_system%set_time_step(index_of_the_star, delta_t, error) + ! time_step = delta_t + ! end if + ! evolve_for = evolve_one_step(index_of_the_star) + ! write(*,*) "evolve_for: ", evolve_for + ! if (evolve_for == 2) return + ! delta_t = delta_t-time_step + ! write(*,*) "age, step: ", age, time_step + ! previous_time_step = time_step + ! end do + !end function + + function evolve_one_step(index_of_the_star) + implicit none + integer:: index_of_the_star + integer:: evolve_one_step + integer:: error + real(c_double):: time_step, nuclear_time_scale + real(c_double):: mass + real(c_double):: age + type(track), pointer:: t + evolve_one_step = 0 + + call star_system%get_time_step(index_of_the_star, time_step, error) + if (time_step <= 0.0_c_double) then + write(*,*) "cannot evolve forward anymore" + write(*,*) "time step is: ", time_step + evolve_one_step = 1 ! 1: cannot evolve forward anymore + return + end if + call star_system%get_initial_mass(index_of_the_star, mass, error) + call star_system%get_age(index_of_the_star, age, error) + call allocate_track(1, mass) ! allocates tarr. mass is ignored... + t => tarr(1) + nuclear_time_scale = t%nuc_time + if (age+time_step > nuclear_time_scale) then + time_step = nuclear_time_scale-age + call star_system%set_time_step(index_of_the_star, time_step, error) + write(*,*) "nuclear time scale: ", nuclear_time_scale + write(*,*) "age+time step: ", age, time_step, age+time_step + write(*,*) "reaching end of the nuclear time scale, setting time step to: ", time_step + evolve_one_step = 2 ! 2: reached end of the nuclear time scale + end if + write(*,*) "evolving star ", index_of_the_star, " for ", time_step + call evolv_metisse(mass, age+time_step, error, 1) + write(*,*) "evolved star ", index_of_the_star + if (error == 0) then + call star_system%set_mass(index_of_the_star, t%pars%mass, error) + call star_system%set_age( & + index_of_the_star, & + t%pars%age, & ! METISSE uses Myr internally, we store years + error) + call star_system%set_time_step( & + index_of_the_star, & + t%pars%dt, & + error) + write(*,*) "Lum: ", t%pars%luminosity + call star_system%set_luminosity(index_of_the_star, t%pars%luminosity, error) + call star_system%set_temperature(index_of_the_star, t%pars%Teff, error) + call star_system%set_radius(index_of_the_star, t%pars%radius, error) + call star_system%set_epoch(index_of_the_star, t%pars%epoch, error) + call star_system%set_core_mass(index_of_the_star, t%pars%core_mass, error) + call star_system%set_core_radius(index_of_the_star, t%pars%core_radius, error) + call star_system%set_stellar_type(index_of_the_star, t%pars%phase, error) + call star_system%set_co_core_mass(index_of_the_star, t%pars%McCO, error) + call star_system%set_spin(index_of_the_star, t%pars%bhspin, error) + end if + call dealloc_track() + end function + + function evolve_stars(indices_of_stars, delta_time, error, n) + implicit none + integer, intent(in):: n + integer:: error + integer, intent(in), dimension(n):: indices_of_stars + real(c_double):: delta_time + integer:: evolve_stars + integer:: i + integer:: number_of_stars_to_evolve + + number_of_stars_to_evolve = size(indices_of_stars) + + do i = 1, number_of_stars_to_evolve + error = evolve_for(indices_of_stars(i), delta_time) + if (error /= 0) then + evolve_stars = error + return + end if + end do + + evolve_stars = 0 + end function + + function get_age(index_of_the_star, age) + implicit none + integer:: index_of_the_star + real(c_double):: age + integer:: get_age + call star_system%get_age(index_of_the_star, age, get_age) + age = age*1.0d+06 + end function + + function get_luminosity(index_of_the_star, luminosity) + implicit none + integer:: index_of_the_star + real(c_double):: luminosity + integer:: get_luminosity + call star_system%get_luminosity(index_of_the_star, luminosity, get_luminosity) + end function + + function get_mass(index_of_the_star, mass) + implicit none + integer:: index_of_the_star + real(c_double):: mass + integer:: get_mass + call star_system%get_mass(index_of_the_star, mass, get_mass) + end function + + function get_metallicity(metallicity) + implicit none + real(c_double):: metallicity + integer:: get_metallicity + ! what to do here depends on whether metallicity can be set for individual stars or only globally + get_metallicity = 0 + end function + + function get_epoch(index_of_the_star, epoch) + implicit none + integer:: index_of_the_star + real(c_double):: epoch + integer:: get_epoch + call star_system%get_epoch(index_of_the_star, epoch, get_epoch) + end function + + function get_core_mass(index_of_the_star, core_mass) + implicit none + integer:: index_of_the_star + real(c_double):: core_mass + integer:: get_core_mass + call star_system%get_core_mass(index_of_the_star, core_mass, get_core_mass) + end function + + function get_core_radius(index_of_the_star, core_radius) + implicit none + integer:: index_of_the_star + real(c_double):: core_radius + integer:: get_core_radius + call star_system%get_core_radius(index_of_the_star, core_radius, get_core_radius) + end function + + function get_convective_envelope_mass(index_of_the_star, convective_envelope_mass) + implicit none + integer:: index_of_the_star + real(c_double):: convective_envelope_mass + integer:: get_convective_envelope_mass + call star_system%get_convective_envelope_mass(index_of_the_star, convective_envelope_mass, get_convective_envelope_mass) + end function + + function get_convective_envelope_radius(index_of_the_star, convective_envelope_radius) + implicit none + integer:: index_of_the_star + real(c_double):: convective_envelope_radius + integer:: get_convective_envelope_radius + call star_system%get_convective_envelope_radius(index_of_the_star, convective_envelope_radius, get_convective_envelope_radius) + end function + + function get_CO_core_mass(index_of_the_star, CO_core_mass) + implicit none + integer:: index_of_the_star + real(c_double):: CO_core_mass + integer:: get_CO_core_mass + call star_system%get_CO_core_mass(index_of_the_star, CO_core_mass, get_CO_core_mass) + end function + + function get_main_sequence_lifetime(index_of_the_star, main_sequence_lifetime) + implicit none + integer:: index_of_the_star + real(c_double):: main_sequence_lifetime + integer:: get_main_sequence_lifetime + call star_system%get_main_sequence_lifetime(index_of_the_star, main_sequence_lifetime, get_main_sequence_lifetime) + end function + + function get_number_of_particles(number_of_particles) + implicit none + integer:: number_of_particles + integer:: get_number_of_particles + call star_system%get_number_of_stars(number_of_particles) + get_number_of_particles = 0 + end function + + function get_radius(index_of_the_star, radius) + implicit none + integer:: index_of_the_star + real(c_double):: radius + integer:: get_radius + call star_system%get_radius(index_of_the_star, radius, get_radius) + end function + + function get_stellar_type(index_of_the_star, stellar_type) + implicit none + integer:: index_of_the_star, stellar_type + integer:: get_stellar_type + call star_system%get_stellar_type(index_of_the_star, stellar_type, get_stellar_type) + end function + + function get_temperature(index_of_the_star, temperature) + implicit none + integer:: index_of_the_star + real(c_double):: temperature + integer:: get_temperature + call star_system%get_temperature(index_of_the_star, temperature, get_temperature) + end function + + function get_time_step(index_of_the_star, time_step) + implicit none + integer:: index_of_the_star + real(c_double):: time_step + integer:: get_time_step + call star_system%get_time_step(index_of_the_star, time_step, get_time_step) + time_step = time_step*1.0d+06 + end function + + function get_initial_mass(index_of_the_star, mass) + implicit none + integer:: index_of_the_star + double precision:: mass + integer:: get_initial_mass + call star_system%get_initial_mass(index_of_the_star, mass, get_initial_mass) + end function + + function set_metallicity(metallicity) + implicit none + real(c_double):: metallicity + integer:: set_metallicity + set_metallicity = 0 + end function + +end module + diff --git a/src/amuse/community/metisse/interface.py b/src/amuse_metisse/interface.py similarity index 94% rename from src/amuse/community/metisse/interface.py rename to src/amuse_metisse/interface.py index 677f8dc52c..7dea3807b6 100644 --- a/src/amuse/community/metisse/interface.py +++ b/src/amuse_metisse/interface.py @@ -10,8 +10,8 @@ remote_function, ) from amuse.community.interface import se -from amuse.datamodel import Particles, ParticlesSubset -from amuse.units import units, constants + +from amuse.units import units # low level interface class @@ -35,7 +35,7 @@ def __init__(self, **keyword_arguments): self, name_of_the_worker="metisse_worker", **keyword_arguments ) LiteratureReferencesMixIn.__init__(self) - self.model_time = 0.0 | units.julianyr + self.model_time = 0.0 | units.mega(units.julianyr) # Remote functions - getters and setters # Note that we should maybe use SI units rather than derived (MSun etc), at @@ -47,7 +47,7 @@ def get_initial_mass(index_of_the_star="i"): @remote_function(can_handle_array=True) def get_epoch(index_of_the_star="i"): - returns (epoch="d" | units.julianyr) + returns (epoch="d" | units.mega(units.julianyr)) @remote_function(can_handle_array=True) def get_core_mass(index_of_the_star="i"): @@ -71,7 +71,7 @@ def get_CO_core_mass(index_of_the_star="i"): @remote_function(can_handle_array=True) def get_main_sequence_lifetime(index_of_the_star="i"): - returns (main_sequence_lifetime="d" | units.Myr) + returns (main_sequence_lifetime="d" | units.mega(units.julianyr)) # getters and setters for tracks # metallicity_dir (string) @@ -223,17 +223,12 @@ def __init__(self, **options): # self.stopping_conditions = StoppingConditions(self) # self.stopping_conditions.supernova_detection = code.StoppingCondition('supernova_detection') se.StellarEvolution.__init__(self, MetisseInterface(**options), **options) + self.model_time = 0.0 | units.mega(units.julianyr) # the definition of the state model of the code def define_state(self, handler): - # for example: - # handler.set_initial_state("UNINITIALIZED") - # handler.add_transition("!UNINITIALIZED!STOPPED", "END", "cleanup_code") - # handler.add_transition("END", "STOPPED", "stop", False) - # handler.add_transition( - # "UNINITIALIZED", "INITIALIZED", "initialize_code") - # handler.add_method("STOPPED", "stop") - pass + se.StellarEvolution.define_state(self, handler) + handler.add_method('RUN', 'evolve_model') # the definition of any properties def define_properties(self, handler): @@ -427,7 +422,7 @@ def define_particle_sets(self, handler): handler.add_method("particles", "evolve_for") def evolve_model(self, end_time=None, keep_synchronous=True): - print("evolve_model", end_time, keep_synchronous) + print(f"evolve_model {end_time=} {keep_synchronous=}") if not keep_synchronous: for particle in self.particles: particle.evolve_one_step() @@ -440,6 +435,6 @@ def evolve_model(self, end_time=None, keep_synchronous=True): ) print(f"{delta_time=}") for i, particle in enumerate(self.particles): - print(f"{i} {particle.age} {particle.mass}") + print(f"particle {i=} {particle.age=} {particle.mass=}") particle.evolve_for(particle.age + delta_time) self.model_time += delta_time diff --git a/src/amuse/community/metisse/storage.f90 b/src/amuse_metisse/storage.f90 similarity index 58% rename from src/amuse/community/metisse/storage.f90 rename to src/amuse_metisse/storage.f90 index 22b3fc8a71..a4dbe2cf68 100644 --- a/src/amuse/community/metisse/storage.f90 +++ b/src/amuse_metisse/storage.f90 @@ -13,100 +13,110 @@ module store_stars use iso_c_binding implicit none - type, public :: star + type, public:: star private - integer :: id - real(c_double) :: age - real(c_double) :: CO_core_mass - real(c_double) :: core_mass - real(c_double) :: core_radius - real(c_double) :: convective_envelope_mass - real(c_double) :: convective_envelope_radius - real(c_double) :: epoch - real(c_double) :: initial_mass - real(c_double) :: luminosity - real(c_double) :: main_sequence_lifetime - real(c_double) :: mass - real(c_double) :: metallicity - real(c_double) :: radius - real(c_double) :: spin - real(c_double) :: temperature - real(c_double) :: time_step - integer :: stellar_type + integer:: id ! never-changing identifier + integer:: track_id ! track of the star in metisse-may change if a star is removed/a track is deallocated?? + real(c_double):: age + real(c_double):: CO_core_mass + real(c_double):: core_mass + real(c_double):: core_radius + real(c_double):: convective_envelope_mass + real(c_double):: convective_envelope_radius + real(c_double):: epoch + real(c_double):: initial_mass + real(c_double):: luminosity + real(c_double):: main_sequence_lifetime + real(c_double):: mass + real(c_double):: metallicity + real(c_double):: radius + real(c_double):: spin + real(c_double):: temperature + real(c_double):: time_step + integer:: stellar_type end type star - type, public :: stars + type, public:: stars private - type(star), allocatable :: star_array(:) - integer :: num_stars = 0 ! number of stars in the system - integer :: next_star_id = 1 ! the id of the next star, should only ever increase + type(star), allocatable:: star_array(:) + integer:: num_stars = 0 ! number of stars in the system + integer:: next_star_id = 1 ! the id of the next star, should only ever increase contains - procedure, public :: new_star - procedure, public :: remove_star - procedure, private :: resize - procedure, private :: lookup_star_id - - procedure, private :: get_property_double - procedure, private :: get_property_int - procedure, private :: set_property_double - procedure, private :: set_property_int + procedure, public:: new_star + procedure, public:: remove_star + procedure, private:: resize + procedure, private:: lookup_star_id + + procedure, private:: get_property_double + procedure, private:: get_property_int + procedure, private:: set_property_double + procedure, private:: set_property_int - procedure, public :: get_number_of_stars + procedure, public:: get_number_of_stars ! Every property has a public getter and a setter, listed alphabetically here. ! 'id' is only used internally, so it is not exposed. - procedure, public :: get_age - procedure, public :: get_CO_core_mass - procedure, public :: get_core_mass - procedure, public :: get_core_radius - procedure, public :: get_convective_envelope_mass - procedure, public :: get_convective_envelope_radius - procedure, public :: get_epoch - procedure, public :: get_initial_mass - procedure, public :: get_luminosity - procedure, public :: get_main_sequence_lifetime - procedure, public :: get_mass - procedure, public :: get_metallicity - procedure, public :: get_radius - procedure, public :: get_spin - procedure, public :: get_stellar_type - procedure, public :: get_temperature - procedure, public :: get_time_step - - procedure, public :: set_age - procedure, public :: set_CO_core_mass - procedure, public :: set_core_mass - procedure, public :: set_core_radius - procedure, public :: set_convective_envelope_mass - procedure, public :: set_convective_envelope_radius - procedure, public :: set_epoch - procedure, public :: set_initial_mass - procedure, public :: set_luminosity - procedure, public :: set_main_sequence_lifetime - procedure, public :: set_mass - procedure, public :: set_metallicity - procedure, public :: set_radius - procedure, public :: set_spin - procedure, public :: set_stellar_type - procedure, public :: set_temperature - procedure, public :: set_time_step + procedure, public:: get_age + procedure, public:: get_CO_core_mass + procedure, public:: get_core_mass + procedure, public:: get_core_radius + procedure, public:: get_convective_envelope_mass + procedure, public:: get_convective_envelope_radius + procedure, public:: get_epoch + procedure, public:: get_initial_mass + procedure, public:: get_luminosity + procedure, public:: get_main_sequence_lifetime + procedure, public:: get_mass + procedure, public:: get_metallicity + procedure, public:: get_radius + procedure, public:: get_spin + procedure, public:: get_stellar_type + procedure, public:: get_temperature + procedure, public:: get_time_step + + procedure, public:: set_age + procedure, public:: set_CO_core_mass + procedure, public:: set_core_mass + procedure, public:: set_core_radius + procedure, public:: set_convective_envelope_mass + procedure, public:: set_convective_envelope_radius + procedure, public:: set_epoch + procedure, public:: set_initial_mass + procedure, public:: set_luminosity + procedure, public:: set_main_sequence_lifetime + procedure, public:: set_mass + procedure, public:: set_metallicity + procedure, public:: set_radius + procedure, public:: set_spin + procedure, public:: set_stellar_type + procedure, public:: set_temperature + procedure, public:: set_time_step end type stars contains + subroutine initialize(self) + class(stars), intent(inout):: self + allocate(self%star_array(0)) + self%num_stars = 0 + self%next_star_id = 1 + end subroutine + subroutine get_number_of_stars(self, number_of_stars) - class(stars), intent(in) :: self - integer, intent(out) :: number_of_stars + class(stars), intent(in):: self + integer, intent(out):: number_of_stars number_of_stars = self%num_stars end subroutine function new_star(self, initial_mass) result(new_id) - class(stars), intent(inout) :: self - real(c_double), intent(in) :: initial_mass - integer :: new_id - integer :: i - - self%num_stars = self%num_stars + 1 + class(stars), intent(inout):: self + real(c_double), intent(in):: initial_mass + integer:: new_id + integer:: i + + self%num_stars = self%num_stars+1 + write(*,*) "adding new star ! so resizing to ", self%num_stars + call flush(6) call self%resize(self%num_stars) i = self%num_stars new_id = self%next_star_id @@ -131,22 +141,24 @@ function new_star(self, initial_mass) result(new_id) self%star_array(i)%time_step = 1.0_c_double self%star_array(i)%temperature = 0.0_c_double - self%next_star_id = new_id + 1 + self%next_star_id = new_id+1 end function new_star subroutine remove_star(self, id) - class(stars), intent(inout) :: self - integer, intent(in) :: id + class(stars), intent(inout):: self + integer, intent(in):: id - integer :: i + integer:: i do i = 1, self%num_stars if (self%star_array(i)%id == id) then if (i /= self%num_stars) then self%star_array(i:self%num_stars-1) = self%star_array(i+1:self%num_stars) end if - self%num_stars = self%num_stars - 1 + self%num_stars = self%num_stars-1 + write(*,*) "resizing to ", self%num_stars + call flush(6) call self%resize(self%num_stars) exit end if @@ -155,10 +167,10 @@ subroutine remove_star(self, id) end subroutine remove_star subroutine resize(self, required_size) - class(stars), intent(inout) :: self - integer, intent(in) :: required_size - type(star), allocatable :: temp(:) - integer :: current_size, new_capacity + class(stars), intent(inout):: self + integer, intent(in):: required_size + type(star), allocatable:: temp(:) + integer:: current_size, new_capacity if (required_size <= 0) then self%num_stars = 0 @@ -175,7 +187,7 @@ subroutine resize(self, required_size) new_capacity = current_size do while (required_size .gt. new_capacity) - new_capacity = max(100, int(new_capacity * 1.1)) + new_capacity = max(100, int(new_capacity*1.1)) end do if (.not. allocated(self%star_array) .or. new_capacity > current_size) then @@ -194,10 +206,10 @@ subroutine resize(self, required_size) end subroutine resize function lookup_star_id(self, id) result(index_of_the_star) - class(stars), intent(in) :: self - integer, intent(in) :: id - integer :: index_of_the_star - integer :: i + class(stars), intent(in):: self + integer, intent(in):: id + integer:: index_of_the_star + integer:: i do i = 1, self%num_stars if (self%star_array(i)%id == id) then @@ -211,11 +223,11 @@ function lookup_star_id(self, id) result(index_of_the_star) ! Getters for all the stellar properties subroutine get_property_double(self, id, property_name, value, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - character(len=*), intent(in) :: property_name - real(c_double), intent(out) :: value - integer :: i, error + class(stars), intent(in):: self + integer, intent(in):: id + character(len=*), intent(in):: property_name + real(c_double), intent(out):: value + integer:: i, error i = lookup_star_id(self, id) if (i == 0) then @@ -265,11 +277,11 @@ subroutine get_property_double(self, id, property_name, value, error) end subroutine get_property_double subroutine get_property_int(self, id, property_name, value, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - character(len=*), intent(in) :: property_name - integer, intent(out) :: value - integer :: i, error + class(stars), intent(in):: self + integer, intent(in):: id + character(len=*), intent(in):: property_name + integer, intent(out):: value + integer:: i, error i = lookup_star_id(self, id) if (i == 0) then @@ -291,11 +303,11 @@ end subroutine get_property_int ! Setters for all the stellar properties subroutine set_property_double(self, id, property_name, value, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - character(len=*), intent(in) :: property_name - real(c_double), intent(in) :: value - integer :: i, error + class(stars), intent(inout):: self + integer, intent(in):: id + character(len=*), intent(in):: property_name + real(c_double), intent(in):: value + integer:: i, error i = lookup_star_id(self, id) if (i == 0) then @@ -320,6 +332,8 @@ subroutine set_property_double(self, id, property_name, value, error) self%star_array(i)%epoch = value case ('initial_mass') self%star_array(i)%initial_mass = value + case ('luminosity') + self%star_array(i)%luminosity = value case ('main_sequence_lifetime') self%star_array(i)%main_sequence_lifetime = value case ('mass') @@ -342,11 +356,11 @@ subroutine set_property_double(self, id, property_name, value, error) end subroutine subroutine set_property_int(self, id, property_name, value, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - character(len=*), intent(in) :: property_name - integer, intent(in) :: value - integer :: i, error + class(stars), intent(inout):: self + integer, intent(in):: id + character(len=*), intent(in):: property_name + integer, intent(in):: value + integer:: i, error i = lookup_star_id(self, id) if (i == 0) then @@ -367,138 +381,138 @@ subroutine set_property_int(self, id, property_name, value, error) ! getters for all the stellar properties subroutine get_age(self, id, age, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: age - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: age + integer:: error call get_property_double(self, id, 'age', age, error) end subroutine subroutine get_CO_core_mass(self, id, CO_core_mass, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: CO_core_mass - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: CO_core_mass + integer:: error call get_property_double(self, id, 'CO_core_mass', CO_core_mass, error) end subroutine subroutine get_core_mass(self, id, core_mass, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: core_mass - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: core_mass + integer:: error call get_property_double(self, id, 'core_mass', core_mass, error) end subroutine subroutine get_core_radius(self, id, core_radius, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: core_radius - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: core_radius + integer:: error call get_property_double(self, id, 'core_radius', core_radius, error) end subroutine subroutine get_convective_envelope_mass(self, id, convective_envelope_mass, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: convective_envelope_mass - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: convective_envelope_mass + integer:: error call get_property_double(self, id, 'convective_envelope_mass', convective_envelope_mass, error) end subroutine subroutine get_convective_envelope_radius(self, id, convective_envelope_radius, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: convective_envelope_radius - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: convective_envelope_radius + integer:: error call get_property_double(self, id, 'convective_envelope_radius', convective_envelope_radius, error) end subroutine subroutine get_epoch(self, id, epoch, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: epoch - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: epoch + integer:: error call get_property_double(self, id, 'epoch', epoch, error) end subroutine subroutine get_initial_mass(self, id, initial_mass, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: initial_mass - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: initial_mass + integer:: error call get_property_double(self, id, 'initial_mass', initial_mass, error) end subroutine subroutine get_luminosity(self, id, luminosity, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: luminosity - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: luminosity + integer:: error call get_property_double(self, id, 'luminosity', luminosity, error) end subroutine subroutine get_main_sequence_lifetime(self, id, main_sequence_lifetime, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: main_sequence_lifetime - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: main_sequence_lifetime + integer:: error call get_property_double(self, id, 'main_sequence_lifetime', main_sequence_lifetime, error) end subroutine subroutine get_mass(self, id, mass, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: mass - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: mass + integer:: error call get_property_double(self, id, 'mass', mass, error) end subroutine subroutine get_metallicity(self, id, metallicity, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: metallicity - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: metallicity + integer:: error call get_property_double(self, id, 'metallicity', metallicity, error) end subroutine subroutine get_radius(self, id, radius, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: radius - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: radius + integer:: error call get_property_double(self, id, 'radius', radius, error) end subroutine subroutine get_spin(self, id, spin, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: spin - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: spin + integer:: error call get_property_double(self, id, 'spin', spin, error) end subroutine subroutine get_stellar_type(self, id, stellar_type, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - integer, intent(out) :: stellar_type - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + integer, intent(out):: stellar_type + integer:: error call get_property_int(self, id, 'stellar_type', stellar_type, error) end subroutine subroutine get_temperature(self, id, temperature, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: temperature - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: temperature + integer:: error call get_property_double(self, id, 'temperature', temperature, error) end subroutine subroutine get_time_step(self, id, time_step, error) - class(stars), intent(in) :: self - integer, intent(in) :: id - real(c_double), intent(out) :: time_step - integer :: error + class(stars), intent(in):: self + integer, intent(in):: id + real(c_double), intent(out):: time_step + integer:: error call get_property_double(self, id, 'time_step', time_step, error) end subroutine @@ -506,138 +520,139 @@ subroutine get_time_step(self, id, time_step, error) ! setters for all the stellar properties (in the same order as the getters) subroutine set_age(self, id, age, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: age - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: age + integer:: error call set_property_double(self, id, 'age', age, error) end subroutine subroutine set_CO_core_mass(self, id, CO_core_mass, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: CO_core_mass - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: CO_core_mass + integer:: error call set_property_double(self, id, 'CO_core_mass', CO_core_mass, error) end subroutine subroutine set_core_mass(self, id, core_mass, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: core_mass - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: core_mass + integer:: error call set_property_double(self, id, 'core_mass', core_mass, error) end subroutine subroutine set_core_radius(self, id, core_radius, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: core_radius - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: core_radius + integer:: error call set_property_double(self, id, 'core_radius', core_radius, error) end subroutine subroutine set_convective_envelope_mass(self, id, convective_envelope_mass, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: convective_envelope_mass - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: convective_envelope_mass + integer:: error call set_property_double(self, id, 'convective_envelope_mass', convective_envelope_mass, error) end subroutine subroutine set_convective_envelope_radius(self, id, convective_envelope_radius, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: convective_envelope_radius - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: convective_envelope_radius + integer:: error call set_property_double(self, id, 'convective_envelope_radius', convective_envelope_radius, error) end subroutine subroutine set_epoch(self, id, epoch, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: epoch - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: epoch + integer:: error call set_property_double(self, id, 'epoch', epoch, error) end subroutine subroutine set_initial_mass(self, id, initial_mass, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: initial_mass - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: initial_mass + integer:: error call set_property_double(self, id, 'initial_mass', initial_mass, error) end subroutine subroutine set_luminosity(self, id, luminosity, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: luminosity - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: luminosity + integer:: error call set_property_double(self, id, 'luminosity', luminosity, error) + write(*,*) 'luminosity', luminosity end subroutine subroutine set_main_sequence_lifetime(self, id, main_sequence_lifetime, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: main_sequence_lifetime - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: main_sequence_lifetime + integer:: error call set_property_double(self, id, 'main_sequence_lifetime', main_sequence_lifetime, error) end subroutine subroutine set_mass(self, id, mass, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: mass - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: mass + integer:: error call set_property_double(self, id, 'mass', mass, error) end subroutine subroutine set_metallicity(self, id, metallicity, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: metallicity - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: metallicity + integer:: error call set_property_double(self, id, 'metallicity', metallicity, error) end subroutine subroutine set_radius(self, id, radius, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: radius - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: radius + integer:: error call set_property_double(self, id, 'radius', radius, error) end subroutine subroutine set_spin(self, id, spin, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: spin - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: spin + integer:: error call set_property_double(self, id, 'spin', spin, error) end subroutine subroutine set_stellar_type(self, id, stellar_type, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - integer, intent(in) :: stellar_type - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + integer, intent(in):: stellar_type + integer:: error call set_property_int(self, id, 'stellar_type', stellar_type, error) end subroutine subroutine set_temperature(self, id, temperature, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: temperature - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: temperature + integer:: error call set_property_double(self, id, 'temperature', temperature, error) end subroutine subroutine set_time_step(self, id, time_step, error) - class(stars), intent(inout) :: self - integer, intent(in) :: id - real(c_double), intent(in) :: time_step - integer :: error + class(stars), intent(inout):: self + integer, intent(in):: id + real(c_double), intent(in):: time_step + integer:: error call set_property_double(self, id, 'time_step', time_step, error) end subroutine From 359245b8dec461a35c10baf3ca6eaa5405bd6e8a Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 19 Jun 2025 14:13:31 +0200 Subject: [PATCH 23/40] update to new dir structure (2) --- src/amuse/community/metisse/support/shared | 1 - .../community/metisse => amuse_metisse}/support/aclocal.m4 | 0 .../community/metisse => amuse_metisse}/support/config.mk.in | 0 src/{amuse/community/metisse => amuse_metisse}/support/configure | 0 .../community/metisse => amuse_metisse}/support/configure.ac | 0 src/amuse_metisse/support/shared | 1 + 6 files changed, 1 insertion(+), 1 deletion(-) delete mode 120000 src/amuse/community/metisse/support/shared rename src/{amuse/community/metisse => amuse_metisse}/support/aclocal.m4 (100%) rename src/{amuse/community/metisse => amuse_metisse}/support/config.mk.in (100%) rename src/{amuse/community/metisse => amuse_metisse}/support/configure (100%) rename src/{amuse/community/metisse => amuse_metisse}/support/configure.ac (100%) create mode 120000 src/amuse_metisse/support/shared diff --git a/src/amuse/community/metisse/support/shared b/src/amuse/community/metisse/support/shared deleted file mode 120000 index 77c5e6f8ed..0000000000 --- a/src/amuse/community/metisse/support/shared +++ /dev/null @@ -1 +0,0 @@ -../../../../../support/shared \ No newline at end of file diff --git a/src/amuse/community/metisse/support/aclocal.m4 b/src/amuse_metisse/support/aclocal.m4 similarity index 100% rename from src/amuse/community/metisse/support/aclocal.m4 rename to src/amuse_metisse/support/aclocal.m4 diff --git a/src/amuse/community/metisse/support/config.mk.in b/src/amuse_metisse/support/config.mk.in similarity index 100% rename from src/amuse/community/metisse/support/config.mk.in rename to src/amuse_metisse/support/config.mk.in diff --git a/src/amuse/community/metisse/support/configure b/src/amuse_metisse/support/configure similarity index 100% rename from src/amuse/community/metisse/support/configure rename to src/amuse_metisse/support/configure diff --git a/src/amuse/community/metisse/support/configure.ac b/src/amuse_metisse/support/configure.ac similarity index 100% rename from src/amuse/community/metisse/support/configure.ac rename to src/amuse_metisse/support/configure.ac diff --git a/src/amuse_metisse/support/shared b/src/amuse_metisse/support/shared new file mode 120000 index 0000000000..00817b0193 --- /dev/null +++ b/src/amuse_metisse/support/shared @@ -0,0 +1 @@ +../../../support/shared \ No newline at end of file From 542abffa3ca45a4ffafa96c145e93b22b38d37fb Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 19 Jun 2025 14:15:30 +0200 Subject: [PATCH 24/40] move to new dir structure (3) --- src/amuse_metisse/README | 13 ++++++ .../metisse => amuse_metisse}/__init__.py | 0 .../packages/amuse-metisse.amuse_deps | 1 + .../packages/amuse-metisse/amuse_metisse | 1 + .../packages/amuse-metisse/pyproject.toml | 46 +++++++++++++++++++ 5 files changed, 61 insertions(+) create mode 100644 src/amuse_metisse/README rename src/{amuse/community/metisse => amuse_metisse}/__init__.py (100%) create mode 100644 src/amuse_metisse/packages/amuse-metisse.amuse_deps create mode 120000 src/amuse_metisse/packages/amuse-metisse/amuse_metisse create mode 100644 src/amuse_metisse/packages/amuse-metisse/pyproject.toml diff --git a/src/amuse_metisse/README b/src/amuse_metisse/README new file mode 100644 index 0000000000..d42f040428 --- /dev/null +++ b/src/amuse_metisse/README @@ -0,0 +1,13 @@ + +To update/replace the current version of SSE, just unpack a new +tarball in the sse directory. The MUSE module uses the SSE +distribution unchanged, and relies only on the calling sequences to +zcnsts(), evolv1(), star(), and deltat(). + +Scientific papers written using this module should reference the paper + +"Comprehensive analytic formulae for stellar evolution as a function + of mass and metallicity" + Hurley J.R., Pols O.R., Tout C.A., 2000, MNRAS, 315, 543 + +(See sse/README_SSE for more details.) diff --git a/src/amuse/community/metisse/__init__.py b/src/amuse_metisse/__init__.py similarity index 100% rename from src/amuse/community/metisse/__init__.py rename to src/amuse_metisse/__init__.py diff --git a/src/amuse_metisse/packages/amuse-metisse.amuse_deps b/src/amuse_metisse/packages/amuse-metisse.amuse_deps new file mode 100644 index 0000000000..235aec9dc0 --- /dev/null +++ b/src/amuse_metisse/packages/amuse-metisse.amuse_deps @@ -0,0 +1 @@ +fortran mpi \ No newline at end of file diff --git a/src/amuse_metisse/packages/amuse-metisse/amuse_metisse b/src/amuse_metisse/packages/amuse-metisse/amuse_metisse new file mode 120000 index 0000000000..c25bddb6dd --- /dev/null +++ b/src/amuse_metisse/packages/amuse-metisse/amuse_metisse @@ -0,0 +1 @@ +../.. \ No newline at end of file diff --git a/src/amuse_metisse/packages/amuse-metisse/pyproject.toml b/src/amuse_metisse/packages/amuse-metisse/pyproject.toml new file mode 100644 index 0000000000..7f30849303 --- /dev/null +++ b/src/amuse_metisse/packages/amuse-metisse/pyproject.toml @@ -0,0 +1,46 @@ +[project] +name = "amuse-metisse" +dynamic = ["version"] +requires-python = ">=3.7" +dependencies = [ + "amuse-framework" +] + +[build-system] +requires = ["hatchling", "versioningit"] +build-backend = "hatchling.build" + +[tool.hatch.version] +source = "versioningit" + +[tool.versioningit.next-version] +method = "smallest" + +[tool.versioningit.format] +distance = "{next_version}.dev{distance}+{vcs}{rev}" +dirty = "{base_version}+d{build_date:%Y%m%d}" +distance-dirty = "{next_version}.dev{distance}+{vcs}{rev}.d{build_date:%Y%m%d}" + +[tool.versioningit.vcs] +method = { module = "version_helper", value = "get_amuse_version", module-dir = "amuse_metisse/support/shared" } + +[tool.hatch.build] +skip-excluded-dirs = true + +[tool.hatch.build.targets.wheel] +include = ["amuse_metisse/**/*.py"] +exclude = [ + "amuse_metisse/packages", + "amuse_metisse/support", + "amuse_metisse/src", + "amuse_metisse/tests" + ] +artifacts = ["amuse_metisse/metisse_worker"] + +[tool.pytest.ini_options] +pythonpath = ["amuse_metisse/tests/"] + +testpaths = ["amuse_metisse/tests"] + +addopts = "--import-mode=append" # test the installed package + From fd6bfde33a4bd43242b235f718a8daa265430bd3 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 23 Jun 2025 09:12:40 +0200 Subject: [PATCH 25/40] small updates for newer METISSE --- src/amuse_metisse/interface.f90 | 6 ++++-- src/amuse_metisse/interface.py | 2 +- src/amuse_metisse/storage.f90 | 1 + 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/amuse_metisse/interface.f90 b/src/amuse_metisse/interface.f90 index 8c4e177f2d..29ca84edd8 100644 --- a/src/amuse_metisse/interface.f90 +++ b/src/amuse_metisse/interface.f90 @@ -27,6 +27,7 @@ function initialize_code() ! Need to define this front end for METISSE call initialize_front_end("AMUSE") error = set_defaults() + call star_system%initialize() initialize_code = 0 end function @@ -81,7 +82,7 @@ function commit_parameters() write(*,*) "Calling commit_parameters" ! This will read the tracks-so need to have set the paths before - call METISSE_zcnsts(initial_Z, zpars, '', '', error) + call METISSE_zcnsts(initial_Z, zpars, error) if (error /= 0) return call assign_commons_main() @@ -400,8 +401,8 @@ function evolve_for(index_of_the_star, delta_t) call star_system%get_age(index_of_the_star, age, error) call allocate_track(1, mass) + t => tarr(1) call evolv_metisse(mass, age+delta_t, error, 1) - call dealloc_track() if (error /= 0) then write(*,*) 'METISSE error: ', error return @@ -425,6 +426,7 @@ function evolve_for(index_of_the_star, delta_t) call star_system%set_stellar_type(index_of_the_star, t%pars%phase, error) call star_system%set_co_core_mass(index_of_the_star, t%pars%McCO, error) call star_system%set_spin(index_of_the_star, t%pars%bhspin, error) + call dealloc_track() end function !function pevolve_for(index_of_the_star, delta_t) diff --git a/src/amuse_metisse/interface.py b/src/amuse_metisse/interface.py index 7dea3807b6..396c5e4aca 100644 --- a/src/amuse_metisse/interface.py +++ b/src/amuse_metisse/interface.py @@ -433,7 +433,7 @@ def evolve_model(self, end_time=None, keep_synchronous=True): if end_time else 0.99*min(self.particles.time_step) ) - print(f"{delta_time=}") + print(f"delta_time = {delta_time}") for i, particle in enumerate(self.particles): print(f"particle {i=} {particle.age=} {particle.mass=}") particle.evolve_for(particle.age + delta_time) diff --git a/src/amuse_metisse/storage.f90 b/src/amuse_metisse/storage.f90 index a4dbe2cf68..65f7ba7c8d 100644 --- a/src/amuse_metisse/storage.f90 +++ b/src/amuse_metisse/storage.f90 @@ -42,6 +42,7 @@ module store_stars integer:: num_stars = 0 ! number of stars in the system integer:: next_star_id = 1 ! the id of the next star, should only ever increase contains + procedure, public:: initialize procedure, public:: new_star procedure, public:: remove_star procedure, private:: resize From 0e8f6c639c89b9757f9a861e1503fbd2f0fb9054 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 23 Jun 2025 16:14:17 +0200 Subject: [PATCH 26/40] updates for memory management --- src/amuse_metisse/interface.f90 | 2 +- src/amuse_metisse/storage.f90 | 63 +++++++++++++++++++++++++++------ 2 files changed, 54 insertions(+), 11 deletions(-) diff --git a/src/amuse_metisse/interface.f90 b/src/amuse_metisse/interface.f90 index 29ca84edd8..332c7f5812 100644 --- a/src/amuse_metisse/interface.f90 +++ b/src/amuse_metisse/interface.f90 @@ -403,6 +403,7 @@ function evolve_for(index_of_the_star, delta_t) call allocate_track(1, mass) t => tarr(1) call evolv_metisse(mass, age+delta_t, error, 1) + call dealloc_track() if (error /= 0) then write(*,*) 'METISSE error: ', error return @@ -426,7 +427,6 @@ function evolve_for(index_of_the_star, delta_t) call star_system%set_stellar_type(index_of_the_star, t%pars%phase, error) call star_system%set_co_core_mass(index_of_the_star, t%pars%McCO, error) call star_system%set_spin(index_of_the_star, t%pars%bhspin, error) - call dealloc_track() end function !function pevolve_for(index_of_the_star, delta_t) diff --git a/src/amuse_metisse/storage.f90 b/src/amuse_metisse/storage.f90 index 65f7ba7c8d..ca373af91d 100644 --- a/src/amuse_metisse/storage.f90 +++ b/src/amuse_metisse/storage.f90 @@ -190,19 +190,62 @@ subroutine resize(self, required_size) do while (required_size .gt. new_capacity) new_capacity = max(100, int(new_capacity*1.1)) end do - - if (.not. allocated(self%star_array) .or. new_capacity > current_size) then + + if (required_size .lt. current_size) then + ! Reduce the size of the array if (allocated(self%star_array)) then - allocate(temp(current_size)) - temp = self%star_array deallocate(self%star_array) - end if - allocate(self%star_array(new_capacity)) - if (allocated(temp)) then - self%star_array(1:current_size) = temp + endif + allocate(self%star_array(required_size)) + else + ! Increase the size of the array + if (.not. allocated(self%star_array)) then + allocate(self%star_array(required_size)) + else + ! Allocate a temporary array to hold the new data + !type(star), allocatable:: temp(:) + allocate(temp(required_size)) + ! Copy the old data to the temporary array + temp(1:current_size) = self%star_array + ! Deallocate the old memory + deallocate(self%star_array) + ! Allocate new memory for the array + allocate(self%star_array(required_size)) + ! Copy the data from the temporary array to the new array + self%star_array = temp + ! Deallocate the temporary array deallocate(temp) - end if - end if + endif + endif + + !if (.not. allocated(self%star_array) .or. new_capacity > current_size) then + ! write(*,*) "not allocated OR resizing needed" + ! call flush(6) + ! if (allocated(self%star_array)) then + ! write(*,*) "allocated but resizing needed" + ! call flush(6) + ! allocate(temp(current_size)) + ! temp = self%star_array + ! deallocate(self%star_array) + ! end if + ! write(*,*) "allocating array of needed size (", new_capacity, ")" + ! call flush(6) + ! write(*,*) "allocated? ", allocated(self%star_array) + ! call flush(6) + ! if (.not. allocated(self%star_array)) then + ! allocate(self%star_array(new_capacity)) + ! else + ! print *, "Error: Memory already allocated" + ! stop + ! endif + ! !allocate(self%star_array(new_capacity)) + ! if (allocated(temp)) then + ! write(*,*) "copying data from temp" + ! call flush(6) + ! self%star_array(1:current_size) = temp + ! deallocate(temp) + ! end if + !end if end subroutine resize From 9f1f9da53791c9dc1616dc791865be5699c47e1d Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 23 Jun 2025 16:14:28 +0200 Subject: [PATCH 27/40] add test script for metisse --- .../test_metisse_standaloneimitate.py | 81 +++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 src/amuse_metisse/test_metisse_standaloneimitate.py diff --git a/src/amuse_metisse/test_metisse_standaloneimitate.py b/src/amuse_metisse/test_metisse_standaloneimitate.py new file mode 100644 index 0000000000..06f4e4d935 --- /dev/null +++ b/src/amuse_metisse/test_metisse_standaloneimitate.py @@ -0,0 +1,81 @@ +import logging + +import numpy as np + +from amuse.units import units +from amuse.datamodel import Particles + +# from amuse.community.metisse import Metisse +from amuse_metisse import Metisse + +logger = logging.getLogger("amuse") +# logger.setLevel(logging.DEBUG) +# logging.basicConfig(level=logging.DEBUG) + + +def setup_metisse(): + # instance = Metisse(redirection="file") + instance = Metisse(redirection="none") + # instance.initialize_code() + + instance.parameters.metallicity_dir = "/Users/rieder/Code/UvA/Toonen/tres3.0/amuse/src/amuse_metisse/data/hydrogen" + instance.parameters.metallicity_dir_he = "/Users/rieder/Code/UvA/Toonen/tres3.0/amuse/src/amuse_metisse/data/helium" + + instance.parameters.wd_mass_scheme = "Modified_mestel" + instance.parameters.bhns_mass_scheme = "Belczynski2008" + instance.parameters.initial_metallicity = 0.02 + + # instance.commit_parameters() + return instance + + +def test_metisse_sun(): + instance = setup_metisse() + star = Particles(1) + star.mass = 4.7893208794726441 | units.MSun + + stars_in_metisse = instance.particles.add_particles(star) + assert stars_in_metisse[0].mass == 1.0 | units.MSun + print(stars_in_metisse[0]) + print("Evolving...") + instance.evolve_model(10000.0 | units.yr) + print(stars_in_metisse[0]) + print("Done") + instance.stop() + + +def test_metisse_twostars(): + instance = setup_metisse() + star = Particles(2) + star.mass = [0.3, 2.5] | units.MSun + + stars_in_metisse = instance.particles.add_particles(star) + print(stars_in_metisse[0]) + print("Evolving...") + instance.evolve_model(1000.0 | units.yr) + print(stars_in_metisse.stellar_type) + print("Done") + instance.stop() + +def test_metisse_kroupa(): + from amuse.ic.kroupa import new_kroupa_mass_distribution + np.random.seed(127) + instance = setup_metisse() + number_of_stars = 1000 + star = Particles(number_of_stars) + star.mass = new_kroupa_mass_distribution(number_of_stars, mass_min=0.3 | units.MSun, mass_max=10.0 | units.MSun) + + stars_in_metisse = instance.particles.add_particles(star) + print(stars_in_metisse[0]) + print("Evolving...") + instance.evolve_model(1000.0 | units.yr) + print(stars_in_metisse) + print("Done") + instance.stop() + + +# test_metisse_sun() + +test_metisse_twostars() + +# test_metisse_kroupa() From 79e2bed8d8930ab68b09d8df9bdd7e42e0820a92 Mon Sep 17 00:00:00 2001 From: Lourens Veen Date: Sat, 21 Jun 2025 12:19:03 +0200 Subject: [PATCH 28/40] Add code tests --- .github/workflows/test-aarsethzare.yml | 79 +++++++++++++++++++ .github/workflows/test-adaptb.yml | 79 +++++++++++++++++++ .github/workflows/test-athena.yml | 79 +++++++++++++++++++ .github/workflows/test-bhtree.yml | 79 +++++++++++++++++++ .github/workflows/test-brutus.yml | 79 +++++++++++++++++++ .github/workflows/test-bse.yml | 79 +++++++++++++++++++ .github/workflows/test-capreole.yml | 79 +++++++++++++++++++ .github/workflows/test-evtwin.yml | 79 +++++++++++++++++++ .github/workflows/test-fastkick.yml | 79 +++++++++++++++++++ .github/workflows/test-fi.yml | 79 +++++++++++++++++++ .github/workflows/test-fractalcluster.yml | 79 +++++++++++++++++++ .github/workflows/test-gadget2.yml | 79 +++++++++++++++++++ .github/workflows/test-galactics.yml | 79 +++++++++++++++++++ .github/workflows/test-galaxia.yml | 79 +++++++++++++++++++ .github/workflows/test-halogen.yml | 79 +++++++++++++++++++ .github/workflows/test-hermite-grx.yml | 79 +++++++++++++++++++ .github/workflows/test-hermite.yml | 79 +++++++++++++++++++ .github/workflows/test-hop.yml | 79 +++++++++++++++++++ .github/workflows/test-huayno.yml | 79 +++++++++++++++++++ .github/workflows/test-kepler-orbiters.yml | 79 +++++++++++++++++++ .github/workflows/test-kepler.yml | 79 +++++++++++++++++++ .github/workflows/test-krome.yml | 79 +++++++++++++++++++ .github/workflows/test-mameclot.yml | 79 +++++++++++++++++++ .github/workflows/test-mercury.yml | 79 +++++++++++++++++++ .github/workflows/test-mesa-r15140.yml | 75 ++++++++++++++++++ .github/workflows/test-mesa-r2208.yml | 79 +++++++++++++++++++ .github/workflows/test-mi6.yml | 79 +++++++++++++++++++ .github/workflows/test-mikkola.yml | 79 +++++++++++++++++++ .github/workflows/test-mmams.yml | 79 +++++++++++++++++++ .github/workflows/test-mobse.yml | 79 +++++++++++++++++++ .github/workflows/test-mocassin.yml | 79 +++++++++++++++++++ .github/workflows/test-mosse.yml | 79 +++++++++++++++++++ .github/workflows/test-mpiamrvac.yml | 79 +++++++++++++++++++ .github/workflows/test-nbody6xx.yml | 79 +++++++++++++++++++ .github/workflows/test-petar.yml | 79 +++++++++++++++++++ .github/workflows/test-ph4.yml | 79 +++++++++++++++++++ .github/workflows/test-phantom.yml | 79 +++++++++++++++++++ .github/workflows/test-phigrape.yml | 79 +++++++++++++++++++ .github/workflows/test-rebound.yml | 79 +++++++++++++++++++ .github/workflows/test-sakura.yml | 79 +++++++++++++++++++ .github/workflows/test-seba.yml | 57 ++++++------- .github/workflows/test-secularmultiple.yml | 79 +++++++++++++++++++ .github/workflows/test-sei.yml | 79 +++++++++++++++++++ .github/workflows/test-simplex.yml | 79 +++++++++++++++++++ .github/workflows/test-smalln.yml | 79 +++++++++++++++++++ .github/workflows/test-sphray.yml | 79 +++++++++++++++++++ .github/workflows/test-sse.yml | 79 +++++++++++++++++++ .github/workflows/test-symple.yml | 79 +++++++++++++++++++ .github/workflows/test-tupan.yml | 79 +++++++++++++++++++ .github/workflows/test-twobody.yml | 79 +++++++++++++++++++ .github/workflows/test-vader.yml | 79 +++++++++++++++++++ src/amuse_adaptb/src/makefile | 7 +- src/amuse_gadget2/Makefile | 4 + src/amuse_gadget2/tests/test_gadget2.py | 2 +- src/amuse_hermite/tests/test_hermite.py | 2 +- src/amuse_hermite_grx/Makefile | 3 +- .../patches/{series_mesa => series} | 0 src/amuse_mi6/src/Vector3.h | 2 +- src/amuse_petar/Makefile | 11 ++- src/amuse_petar/tests/test_petar.py | 2 +- support/setup/installing.sh | 6 ++ 61 files changed, 3997 insertions(+), 45 deletions(-) create mode 100644 .github/workflows/test-aarsethzare.yml create mode 100644 .github/workflows/test-adaptb.yml create mode 100644 .github/workflows/test-athena.yml create mode 100644 .github/workflows/test-bhtree.yml create mode 100644 .github/workflows/test-brutus.yml create mode 100644 .github/workflows/test-bse.yml create mode 100644 .github/workflows/test-capreole.yml create mode 100644 .github/workflows/test-evtwin.yml create mode 100644 .github/workflows/test-fastkick.yml create mode 100644 .github/workflows/test-fi.yml create mode 100644 .github/workflows/test-fractalcluster.yml create mode 100644 .github/workflows/test-gadget2.yml create mode 100644 .github/workflows/test-galactics.yml create mode 100644 .github/workflows/test-galaxia.yml create mode 100644 .github/workflows/test-halogen.yml create mode 100644 .github/workflows/test-hermite-grx.yml create mode 100644 .github/workflows/test-hermite.yml create mode 100644 .github/workflows/test-hop.yml create mode 100644 .github/workflows/test-huayno.yml create mode 100644 .github/workflows/test-kepler-orbiters.yml create mode 100644 .github/workflows/test-kepler.yml create mode 100644 .github/workflows/test-krome.yml create mode 100644 .github/workflows/test-mameclot.yml create mode 100644 .github/workflows/test-mercury.yml create mode 100644 .github/workflows/test-mesa-r15140.yml create mode 100644 .github/workflows/test-mesa-r2208.yml create mode 100644 .github/workflows/test-mi6.yml create mode 100644 .github/workflows/test-mikkola.yml create mode 100644 .github/workflows/test-mmams.yml create mode 100644 .github/workflows/test-mobse.yml create mode 100644 .github/workflows/test-mocassin.yml create mode 100644 .github/workflows/test-mosse.yml create mode 100644 .github/workflows/test-mpiamrvac.yml create mode 100644 .github/workflows/test-nbody6xx.yml create mode 100644 .github/workflows/test-petar.yml create mode 100644 .github/workflows/test-ph4.yml create mode 100644 .github/workflows/test-phantom.yml create mode 100644 .github/workflows/test-phigrape.yml create mode 100644 .github/workflows/test-rebound.yml create mode 100644 .github/workflows/test-sakura.yml create mode 100644 .github/workflows/test-secularmultiple.yml create mode 100644 .github/workflows/test-sei.yml create mode 100644 .github/workflows/test-simplex.yml create mode 100644 .github/workflows/test-smalln.yml create mode 100644 .github/workflows/test-sphray.yml create mode 100644 .github/workflows/test-sse.yml create mode 100644 .github/workflows/test-symple.yml create mode 100644 .github/workflows/test-tupan.yml create mode 100644 .github/workflows/test-twobody.yml create mode 100644 .github/workflows/test-vader.yml rename src/amuse_mesa_r15140/patches/{series_mesa => series} (100%) diff --git a/.github/workflows/test-aarsethzare.yml b/.github/workflows/test-aarsethzare.yml new file mode 100644 index 0000000000..a0c0259b20 --- /dev/null +++ b/.github/workflows/test-aarsethzare.yml @@ -0,0 +1,79 @@ +name: Test AMUSE aarsethzare + +on: + push: + paths: + - .github/workflows/test-aarsethzare.yml + - 'src/amuse_aarsethzare/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-aarsethzare.yml + - 'src/amuse_aarsethzare/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build aarsethzare + run: | + ./setup install amuse-aarsethzare + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test aarsethzare + run: | + ./setup test amuse-aarsethzare + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-adaptb.yml b/.github/workflows/test-adaptb.yml new file mode 100644 index 0000000000..31dadc0b3a --- /dev/null +++ b/.github/workflows/test-adaptb.yml @@ -0,0 +1,79 @@ +name: Test AMUSE adaptb + +on: + push: + paths: + - .github/workflows/test-adaptb.yml + - 'src/amuse_adaptb/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-adaptb.yml + - 'src/amuse_adaptb/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build adaptb + run: | + ./setup install amuse-adaptb + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test adaptb + run: | + ./setup test amuse-adaptb + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-athena.yml b/.github/workflows/test-athena.yml new file mode 100644 index 0000000000..754fb4d6aa --- /dev/null +++ b/.github/workflows/test-athena.yml @@ -0,0 +1,79 @@ +name: Test AMUSE athena + +on: + push: + paths: + - .github/workflows/test-athena.yml + - 'src/amuse_athena/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-athena.yml + - 'src/amuse_athena/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build athena + run: | + ./setup install amuse-athena + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test athena + run: | + ./setup test amuse-athena + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-bhtree.yml b/.github/workflows/test-bhtree.yml new file mode 100644 index 0000000000..65009088e6 --- /dev/null +++ b/.github/workflows/test-bhtree.yml @@ -0,0 +1,79 @@ +name: Test AMUSE bhtree + +on: + push: + paths: + - .github/workflows/test-bhtree.yml + - 'src/amuse_bhtree/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-bhtree.yml + - 'src/amuse_bhtree/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build bhtree + run: | + ./setup install amuse-bhtree + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test bhtree + run: | + ./setup test amuse-bhtree + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-brutus.yml b/.github/workflows/test-brutus.yml new file mode 100644 index 0000000000..94cd7d3ac2 --- /dev/null +++ b/.github/workflows/test-brutus.yml @@ -0,0 +1,79 @@ +name: Test AMUSE brutus + +on: + push: + paths: + - .github/workflows/test-brutus.yml + - 'src/amuse_brutus/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-brutus.yml + - 'src/amuse_brutus/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build brutus + run: | + ./setup install amuse-brutus + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test brutus + run: | + ./setup test amuse-brutus + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-bse.yml b/.github/workflows/test-bse.yml new file mode 100644 index 0000000000..188d588b40 --- /dev/null +++ b/.github/workflows/test-bse.yml @@ -0,0 +1,79 @@ +name: Test AMUSE bse + +on: + push: + paths: + - .github/workflows/test-bse.yml + - 'src/amuse_bse/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-bse.yml + - 'src/amuse_bse/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build bse + run: | + ./setup install amuse-bse + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test bse + run: | + ./setup test amuse-bse + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-capreole.yml b/.github/workflows/test-capreole.yml new file mode 100644 index 0000000000..391cd9125e --- /dev/null +++ b/.github/workflows/test-capreole.yml @@ -0,0 +1,79 @@ +name: Test AMUSE capreole + +on: + push: + paths: + - .github/workflows/test-capreole.yml + - 'src/amuse_capreole/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-capreole.yml + - 'src/amuse_capreole/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build capreole + run: | + ./setup install amuse-capreole + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test capreole + run: | + ./setup test amuse-capreole + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-evtwin.yml b/.github/workflows/test-evtwin.yml new file mode 100644 index 0000000000..526418939c --- /dev/null +++ b/.github/workflows/test-evtwin.yml @@ -0,0 +1,79 @@ +name: Test AMUSE evtwin + +on: + push: + paths: + - .github/workflows/test-evtwin.yml + - 'src/amuse_evtwin/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-evtwin.yml + - 'src/amuse_evtwin/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build evtwin + run: | + ./setup install amuse-evtwin + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test evtwin + run: | + ./setup test amuse-evtwin + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-fastkick.yml b/.github/workflows/test-fastkick.yml new file mode 100644 index 0000000000..feb67ba3fa --- /dev/null +++ b/.github/workflows/test-fastkick.yml @@ -0,0 +1,79 @@ +name: Test AMUSE fastkick + +on: + push: + paths: + - .github/workflows/test-fastkick.yml + - 'src/amuse_fastkick/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-fastkick.yml + - 'src/amuse_fastkick/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build fastkick + run: | + ./setup install amuse-fastkick + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test fastkick + run: | + ./setup test amuse-fastkick + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-fi.yml b/.github/workflows/test-fi.yml new file mode 100644 index 0000000000..c15e18ab7a --- /dev/null +++ b/.github/workflows/test-fi.yml @@ -0,0 +1,79 @@ +name: Test AMUSE fi + +on: + push: + paths: + - .github/workflows/test-fi.yml + - 'src/amuse_fi/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-fi.yml + - 'src/amuse_fi/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build fi + run: | + ./setup install amuse-fi + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test fi + run: | + ./setup test amuse-fi + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-fractalcluster.yml b/.github/workflows/test-fractalcluster.yml new file mode 100644 index 0000000000..964a92f100 --- /dev/null +++ b/.github/workflows/test-fractalcluster.yml @@ -0,0 +1,79 @@ +name: Test AMUSE fractalcluster + +on: + push: + paths: + - .github/workflows/test-fractalcluster.yml + - 'src/amuse_fractalcluster/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-fractalcluster.yml + - 'src/amuse_fractalcluster/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build fractalcluster + run: | + ./setup install amuse-fractalcluster + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test fractalcluster + run: | + ./setup test amuse-fractalcluster + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-gadget2.yml b/.github/workflows/test-gadget2.yml new file mode 100644 index 0000000000..2c0205d983 --- /dev/null +++ b/.github/workflows/test-gadget2.yml @@ -0,0 +1,79 @@ +name: Test AMUSE gadget2 + +on: + push: + paths: + - .github/workflows/test-gadget2.yml + - 'src/amuse_gadget2/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-gadget2.yml + - 'src/amuse_gadget2/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build gadget2 + run: | + ./setup install amuse-gadget2 + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test gadget2 + run: | + ./setup test amuse-gadget2 + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-galactics.yml b/.github/workflows/test-galactics.yml new file mode 100644 index 0000000000..784783645a --- /dev/null +++ b/.github/workflows/test-galactics.yml @@ -0,0 +1,79 @@ +name: Test AMUSE galactics + +on: + push: + paths: + - .github/workflows/test-galactics.yml + - 'src/amuse_galactics/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-galactics.yml + - 'src/amuse_galactics/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build galactics + run: | + ./setup install amuse-galactics + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test galactics + run: | + ./setup test amuse-galactics + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-galaxia.yml b/.github/workflows/test-galaxia.yml new file mode 100644 index 0000000000..07d1a58f93 --- /dev/null +++ b/.github/workflows/test-galaxia.yml @@ -0,0 +1,79 @@ +name: Test AMUSE galaxia + +on: + push: + paths: + - .github/workflows/test-galaxia.yml + - 'src/amuse_galaxia/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-galaxia.yml + - 'src/amuse_galaxia/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build galaxia + run: | + ./setup install amuse-galaxia + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test galaxia + run: | + ./setup test amuse-galaxia + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-halogen.yml b/.github/workflows/test-halogen.yml new file mode 100644 index 0000000000..f793390b9d --- /dev/null +++ b/.github/workflows/test-halogen.yml @@ -0,0 +1,79 @@ +name: Test AMUSE halogen + +on: + push: + paths: + - .github/workflows/test-halogen.yml + - 'src/amuse_halogen/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-halogen.yml + - 'src/amuse_halogen/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build halogen + run: | + ./setup install amuse-halogen + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test halogen + run: | + ./setup test amuse-halogen + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-hermite-grx.yml b/.github/workflows/test-hermite-grx.yml new file mode 100644 index 0000000000..16acbd8b1c --- /dev/null +++ b/.github/workflows/test-hermite-grx.yml @@ -0,0 +1,79 @@ +name: Test AMUSE hermite-grx + +on: + push: + paths: + - .github/workflows/test-hermite-grx.yml + - 'src/amuse_hermite_grx/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-hermite-grx.yml + - 'src/amuse_hermite_grx/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build hermite-grx + run: | + ./setup install amuse-hermite-grx + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test hermite-grx + run: | + ./setup test amuse-hermite-grx + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-hermite.yml b/.github/workflows/test-hermite.yml new file mode 100644 index 0000000000..27641d7c0f --- /dev/null +++ b/.github/workflows/test-hermite.yml @@ -0,0 +1,79 @@ +name: Test AMUSE hermite + +on: + push: + paths: + - .github/workflows/test-hermite.yml + - 'src/amuse_hermite/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-hermite.yml + - 'src/amuse_hermite/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build hermite + run: | + ./setup install amuse-hermite + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test hermite + run: | + ./setup test amuse-hermite + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-hop.yml b/.github/workflows/test-hop.yml new file mode 100644 index 0000000000..53b9b11872 --- /dev/null +++ b/.github/workflows/test-hop.yml @@ -0,0 +1,79 @@ +name: Test AMUSE hop + +on: + push: + paths: + - .github/workflows/test-hop.yml + - 'src/amuse_hop/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-hop.yml + - 'src/amuse_hop/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build hop + run: | + ./setup install amuse-hop + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test hop + run: | + ./setup test amuse-hop + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-huayno.yml b/.github/workflows/test-huayno.yml new file mode 100644 index 0000000000..b9b91f2cfa --- /dev/null +++ b/.github/workflows/test-huayno.yml @@ -0,0 +1,79 @@ +name: Test AMUSE huayno + +on: + push: + paths: + - .github/workflows/test-huayno.yml + - 'src/amuse_huayno/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-huayno.yml + - 'src/amuse_huayno/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build huayno + run: | + ./setup install amuse-huayno + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test huayno + run: | + ./setup test amuse-huayno + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-kepler-orbiters.yml b/.github/workflows/test-kepler-orbiters.yml new file mode 100644 index 0000000000..5622027bd8 --- /dev/null +++ b/.github/workflows/test-kepler-orbiters.yml @@ -0,0 +1,79 @@ +name: Test AMUSE kepler-orbiters + +on: + push: + paths: + - .github/workflows/test-kepler-orbiters.yml + - 'src/amuse_kepler_orbiters/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-kepler-orbiters.yml + - 'src/amuse_kepler_orbiters/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build kepler-orbiters + run: | + ./setup install amuse-kepler-orbiters + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test kepler-orbiters + run: | + ./setup test amuse-kepler-orbiters + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-kepler.yml b/.github/workflows/test-kepler.yml new file mode 100644 index 0000000000..153e2be371 --- /dev/null +++ b/.github/workflows/test-kepler.yml @@ -0,0 +1,79 @@ +name: Test AMUSE kepler + +on: + push: + paths: + - .github/workflows/test-kepler.yml + - 'src/amuse_kepler/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-kepler.yml + - 'src/amuse_kepler/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build kepler + run: | + ./setup install amuse-kepler + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test kepler + run: | + ./setup test amuse-kepler + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-krome.yml b/.github/workflows/test-krome.yml new file mode 100644 index 0000000000..28fb71a5bf --- /dev/null +++ b/.github/workflows/test-krome.yml @@ -0,0 +1,79 @@ +name: Test AMUSE krome + +on: + push: + paths: + - .github/workflows/test-krome.yml + - 'src/amuse_krome/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-krome.yml + - 'src/amuse_krome/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build krome + run: | + ./setup install amuse-krome + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test krome + run: | + ./setup test amuse-krome + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-mameclot.yml b/.github/workflows/test-mameclot.yml new file mode 100644 index 0000000000..b010337ad8 --- /dev/null +++ b/.github/workflows/test-mameclot.yml @@ -0,0 +1,79 @@ +name: Test AMUSE mameclot + +on: + push: + paths: + - .github/workflows/test-mameclot.yml + - 'src/amuse_mameclot/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-mameclot.yml + - 'src/amuse_mameclot/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build mameclot + run: | + ./setup install amuse-mameclot + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test mameclot + run: | + ./setup test amuse-mameclot + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-mercury.yml b/.github/workflows/test-mercury.yml new file mode 100644 index 0000000000..a6ca570104 --- /dev/null +++ b/.github/workflows/test-mercury.yml @@ -0,0 +1,79 @@ +name: Test AMUSE mercury + +on: + push: + paths: + - .github/workflows/test-mercury.yml + - 'src/amuse_mercury/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-mercury.yml + - 'src/amuse_mercury/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build mercury + run: | + ./setup install amuse-mercury + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test mercury + run: | + ./setup test amuse-mercury + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-mesa-r15140.yml b/.github/workflows/test-mesa-r15140.yml new file mode 100644 index 0000000000..6c86fe5e87 --- /dev/null +++ b/.github/workflows/test-mesa-r15140.yml @@ -0,0 +1,75 @@ +name: Test AMUSE mesa-r15140 + +on: + push: + paths: + - .github/workflows/test-mesa-r15140.yml + - 'src/amuse_mesa_r15140/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-mesa-r15140.yml + - 'src/amuse_mesa_r15140/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build mesa-r15140 + run: | + ./setup develop amuse-mesa-r15140 + + - name: Test mesa-r15140 + run: | + ./setup test amuse-mesa-r15140 + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-mesa-r2208.yml b/.github/workflows/test-mesa-r2208.yml new file mode 100644 index 0000000000..fef37cf543 --- /dev/null +++ b/.github/workflows/test-mesa-r2208.yml @@ -0,0 +1,79 @@ +name: Test AMUSE mesa-r2208 + +on: + push: + paths: + - .github/workflows/test-mesa-r2208.yml + - 'src/amuse_mesa_r2208/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-mesa-r2208.yml + - 'src/amuse_mesa_r2208/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build mesa-r2208 + run: | + ./setup install amuse-mesa-r2208 + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test mesa-r2208 + run: | + ./setup test amuse-mesa-r2208 + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-mi6.yml b/.github/workflows/test-mi6.yml new file mode 100644 index 0000000000..fc54632ad4 --- /dev/null +++ b/.github/workflows/test-mi6.yml @@ -0,0 +1,79 @@ +name: Test AMUSE mi6 + +on: + push: + paths: + - .github/workflows/test-mi6.yml + - 'src/amuse_mi6/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-mi6.yml + - 'src/amuse_mi6/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build mi6 + run: | + ./setup install amuse-mi6 + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test mi6 + run: | + ./setup test amuse-mi6 + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-mikkola.yml b/.github/workflows/test-mikkola.yml new file mode 100644 index 0000000000..a90f8430a3 --- /dev/null +++ b/.github/workflows/test-mikkola.yml @@ -0,0 +1,79 @@ +name: Test AMUSE mikkola + +on: + push: + paths: + - .github/workflows/test-mikkola.yml + - 'src/amuse_mikkola/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-mikkola.yml + - 'src/amuse_mikkola/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build mikkola + run: | + ./setup install amuse-mikkola + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test mikkola + run: | + ./setup test amuse-mikkola + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-mmams.yml b/.github/workflows/test-mmams.yml new file mode 100644 index 0000000000..9b42a42600 --- /dev/null +++ b/.github/workflows/test-mmams.yml @@ -0,0 +1,79 @@ +name: Test AMUSE mmams + +on: + push: + paths: + - .github/workflows/test-mmams.yml + - 'src/amuse_mmams/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-mmams.yml + - 'src/amuse_mmams/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build mmams + run: | + ./setup install amuse-mmams + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test mmams + run: | + ./setup test amuse-mmams + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-mobse.yml b/.github/workflows/test-mobse.yml new file mode 100644 index 0000000000..777cdf33e1 --- /dev/null +++ b/.github/workflows/test-mobse.yml @@ -0,0 +1,79 @@ +name: Test AMUSE mobse + +on: + push: + paths: + - .github/workflows/test-mobse.yml + - 'src/amuse_mobse/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-mobse.yml + - 'src/amuse_mobse/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build mobse + run: | + ./setup install amuse-mobse + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test mobse + run: | + ./setup test amuse-mobse + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-mocassin.yml b/.github/workflows/test-mocassin.yml new file mode 100644 index 0000000000..b31a72d250 --- /dev/null +++ b/.github/workflows/test-mocassin.yml @@ -0,0 +1,79 @@ +name: Test AMUSE mocassin + +on: + push: + paths: + - .github/workflows/test-mocassin.yml + - 'src/amuse_mocassin/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-mocassin.yml + - 'src/amuse_mocassin/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build mocassin + run: | + ./setup install amuse-mocassin + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test mocassin + run: | + ./setup test amuse-mocassin + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-mosse.yml b/.github/workflows/test-mosse.yml new file mode 100644 index 0000000000..3192aa63a0 --- /dev/null +++ b/.github/workflows/test-mosse.yml @@ -0,0 +1,79 @@ +name: Test AMUSE mosse + +on: + push: + paths: + - .github/workflows/test-mosse.yml + - 'src/amuse_mosse/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-mosse.yml + - 'src/amuse_mosse/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build mosse + run: | + ./setup install amuse-mosse + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test mosse + run: | + ./setup test amuse-mosse + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-mpiamrvac.yml b/.github/workflows/test-mpiamrvac.yml new file mode 100644 index 0000000000..0bd7183025 --- /dev/null +++ b/.github/workflows/test-mpiamrvac.yml @@ -0,0 +1,79 @@ +name: Test AMUSE mpiamrvac + +on: + push: + paths: + - .github/workflows/test-mpiamrvac.yml + - 'src/amuse_mpiamrvac/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-mpiamrvac.yml + - 'src/amuse_mpiamrvac/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build mpiamrvac + run: | + ./setup install amuse-mpiamrvac + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test mpiamrvac + run: | + ./setup test amuse-mpiamrvac + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-nbody6xx.yml b/.github/workflows/test-nbody6xx.yml new file mode 100644 index 0000000000..818b2e5cc2 --- /dev/null +++ b/.github/workflows/test-nbody6xx.yml @@ -0,0 +1,79 @@ +name: Test AMUSE nbody6xx + +on: + push: + paths: + - .github/workflows/test-nbody6xx.yml + - 'src/amuse_nbody6xx/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-nbody6xx.yml + - 'src/amuse_nbody6xx/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build nbody6xx + run: | + ./setup install amuse-nbody6xx + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test nbody6xx + run: | + ./setup test amuse-nbody6xx + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-petar.yml b/.github/workflows/test-petar.yml new file mode 100644 index 0000000000..a282fca891 --- /dev/null +++ b/.github/workflows/test-petar.yml @@ -0,0 +1,79 @@ +name: Test AMUSE petar + +on: + push: + paths: + - .github/workflows/test-petar.yml + - 'src/amuse_petar/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-petar.yml + - 'src/amuse_petar/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build petar + run: | + ./setup install amuse-petar + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test petar + run: | + ./setup test amuse-petar + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-ph4.yml b/.github/workflows/test-ph4.yml new file mode 100644 index 0000000000..f2701085f0 --- /dev/null +++ b/.github/workflows/test-ph4.yml @@ -0,0 +1,79 @@ +name: Test AMUSE ph4 + +on: + push: + paths: + - .github/workflows/test-ph4.yml + - 'src/amuse_ph4/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-ph4.yml + - 'src/amuse_ph4/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build ph4 + run: | + ./setup install amuse-ph4 + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test ph4 + run: | + ./setup test amuse-ph4 + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-phantom.yml b/.github/workflows/test-phantom.yml new file mode 100644 index 0000000000..0d579a73e3 --- /dev/null +++ b/.github/workflows/test-phantom.yml @@ -0,0 +1,79 @@ +name: Test AMUSE phantom + +on: + push: + paths: + - .github/workflows/test-phantom.yml + - 'src/amuse_phantom/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-phantom.yml + - 'src/amuse_phantom/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build phantom + run: | + ./setup install amuse-phantom + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test phantom + run: | + ./setup test amuse-phantom + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-phigrape.yml b/.github/workflows/test-phigrape.yml new file mode 100644 index 0000000000..12564a5d7f --- /dev/null +++ b/.github/workflows/test-phigrape.yml @@ -0,0 +1,79 @@ +name: Test AMUSE phigrape + +on: + push: + paths: + - .github/workflows/test-phigrape.yml + - 'src/amuse_phigrape/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-phigrape.yml + - 'src/amuse_phigrape/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build phigrape + run: | + ./setup install amuse-phigrape + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test phigrape + run: | + ./setup test amuse-phigrape + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-rebound.yml b/.github/workflows/test-rebound.yml new file mode 100644 index 0000000000..24e7d1e6b3 --- /dev/null +++ b/.github/workflows/test-rebound.yml @@ -0,0 +1,79 @@ +name: Test AMUSE rebound + +on: + push: + paths: + - .github/workflows/test-rebound.yml + - 'src/amuse_rebound/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-rebound.yml + - 'src/amuse_rebound/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build rebound + run: | + ./setup install amuse-rebound + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test rebound + run: | + ./setup test amuse-rebound + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-sakura.yml b/.github/workflows/test-sakura.yml new file mode 100644 index 0000000000..5d86a39986 --- /dev/null +++ b/.github/workflows/test-sakura.yml @@ -0,0 +1,79 @@ +name: Test AMUSE sakura + +on: + push: + paths: + - .github/workflows/test-sakura.yml + - 'src/amuse_sakura/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-sakura.yml + - 'src/amuse_sakura/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build sakura + run: | + ./setup install amuse-sakura + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test sakura + run: | + ./setup test amuse-sakura + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-seba.yml b/.github/workflows/test-seba.yml index b99dee76bb..788edb9677 100644 --- a/.github/workflows/test-seba.yml +++ b/.github/workflows/test-seba.yml @@ -1,22 +1,24 @@ -# This workflow will install AMUSE and SeBa and run SeBa tests with a single version of Python - -name: Build and test SeBa +name: Test AMUSE seba on: push: paths: - - src/amuse_seba + - .github/workflows/test-seba.yml + - 'src/amuse_seba/**' pull_request: + branches: + - main paths: - - src/amuse_seba + - .github/workflows/test-seba.yml + - 'src/amuse_seba/**' + workflow_dispatch: jobs: test: - name: Test SeBa on ${{ matrix.os }} - runs-on: ${{ matrix.os }} strategy: + fail-fast: false matrix: os: - ubuntu-latest @@ -41,44 +43,37 @@ jobs: - name: Install dependencies run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake openmpi gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - # - name: Configure OpenMPI - # run: | - # mkdir -p "$HOME/.openmpi" - # echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - # echo "mpi_yield_when_idle = true" >>"$HOME/.openmpi/mca-params.conf" - # mkdir -p "$HOME/.prte" - # echo "rmaps_default_mapping_policy = :oversubscribe" >>"$HOME/.prte/mca-params.conf" - # echo "prte_if_include = lo" >>"$HOME/.prte/mca-params.conf" + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - name: Checkout uses: actions/checkout@v4 with: - fetch-depth: 100 + fetch-depth: 0 fetch-tags: true - - name: Build SeBa + - name: Build seba run: | ./setup install amuse-seba - - name: Test SeBa - # env: - # OMPI_MCA_rmaps_base_oversubscribe: 1 - # PRTE_MCA_rmaps_base_oversubscribe: 1 - # PRTE_MCA_rmaps_default_mapping_policy: ":oversubscribe" - # OMPI_MCA_btl_tcp_if_include: lo - # OMPI_MCA_mpi_yield_when_idle: 1 - # OMPI_MCA_pmix_server_max_wait: 10 + - name: Ensure we test only the installed package run: | - ./setup test amuse-seba + ./setup distclean - - name: Save build logs + - name: Test seba run: | - tar czf logs-${{ matrix.os }}.tar.gz support/logs + ./setup test amuse-seba - name: Archive build logs + if: always() uses: actions/upload-artifact@v4 with: - name: logs-${{ matrix.os }}.tar.gz - path: logs-${{ matrix.os }}.tar.gz + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-secularmultiple.yml b/.github/workflows/test-secularmultiple.yml new file mode 100644 index 0000000000..efbd44834d --- /dev/null +++ b/.github/workflows/test-secularmultiple.yml @@ -0,0 +1,79 @@ +name: Test AMUSE secularmultiple + +on: + push: + paths: + - .github/workflows/test-secularmultiple.yml + - 'src/amuse_secularmultiple/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-secularmultiple.yml + - 'src/amuse_secularmultiple/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build secularmultiple + run: | + ./setup install amuse-secularmultiple + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test secularmultiple + run: | + ./setup test amuse-secularmultiple + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-sei.yml b/.github/workflows/test-sei.yml new file mode 100644 index 0000000000..d71d107291 --- /dev/null +++ b/.github/workflows/test-sei.yml @@ -0,0 +1,79 @@ +name: Test AMUSE sei + +on: + push: + paths: + - .github/workflows/test-sei.yml + - 'src/amuse_sei/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-sei.yml + - 'src/amuse_sei/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build sei + run: | + ./setup install amuse-sei + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test sei + run: | + ./setup test amuse-sei + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-simplex.yml b/.github/workflows/test-simplex.yml new file mode 100644 index 0000000000..0266abe8e9 --- /dev/null +++ b/.github/workflows/test-simplex.yml @@ -0,0 +1,79 @@ +name: Test AMUSE simplex + +on: + push: + paths: + - .github/workflows/test-simplex.yml + - 'src/amuse_simplex/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-simplex.yml + - 'src/amuse_simplex/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib healpix_cxx qhull pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build simplex + run: | + ./setup install amuse-simplex + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test simplex + run: | + ./setup test amuse-simplex + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-smalln.yml b/.github/workflows/test-smalln.yml new file mode 100644 index 0000000000..a293713dcd --- /dev/null +++ b/.github/workflows/test-smalln.yml @@ -0,0 +1,79 @@ +name: Test AMUSE smalln + +on: + push: + paths: + - .github/workflows/test-smalln.yml + - 'src/amuse_smalln/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-smalln.yml + - 'src/amuse_smalln/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build smalln + run: | + ./setup install amuse-smalln + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test smalln + run: | + ./setup test amuse-smalln + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-sphray.yml b/.github/workflows/test-sphray.yml new file mode 100644 index 0000000000..3b04fcc294 --- /dev/null +++ b/.github/workflows/test-sphray.yml @@ -0,0 +1,79 @@ +name: Test AMUSE sphray + +on: + push: + paths: + - .github/workflows/test-sphray.yml + - 'src/amuse_sphray/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-sphray.yml + - 'src/amuse_sphray/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build sphray + run: | + ./setup install amuse-sphray + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test sphray + run: | + ./setup test amuse-sphray + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-sse.yml b/.github/workflows/test-sse.yml new file mode 100644 index 0000000000..a68f805f32 --- /dev/null +++ b/.github/workflows/test-sse.yml @@ -0,0 +1,79 @@ +name: Test AMUSE sse + +on: + push: + paths: + - .github/workflows/test-sse.yml + - 'src/amuse_sse/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-sse.yml + - 'src/amuse_sse/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build sse + run: | + ./setup install amuse-sse + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test sse + run: | + ./setup test amuse-sse + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-symple.yml b/.github/workflows/test-symple.yml new file mode 100644 index 0000000000..ac4e00c4ba --- /dev/null +++ b/.github/workflows/test-symple.yml @@ -0,0 +1,79 @@ +name: Test AMUSE symple + +on: + push: + paths: + - .github/workflows/test-symple.yml + - 'src/amuse_symple/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-symple.yml + - 'src/amuse_symple/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build symple + run: | + ./setup install amuse-symple + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test symple + run: | + ./setup test amuse-symple + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-tupan.yml b/.github/workflows/test-tupan.yml new file mode 100644 index 0000000000..61f87da3f0 --- /dev/null +++ b/.github/workflows/test-tupan.yml @@ -0,0 +1,79 @@ +name: Test AMUSE tupan + +on: + push: + paths: + - .github/workflows/test-tupan.yml + - 'src/amuse_tupan/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-tupan.yml + - 'src/amuse_tupan/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build tupan + run: | + ./setup install amuse-tupan + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test tupan + run: | + ./setup test amuse-tupan + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-twobody.yml b/.github/workflows/test-twobody.yml new file mode 100644 index 0000000000..7ad65b80a5 --- /dev/null +++ b/.github/workflows/test-twobody.yml @@ -0,0 +1,79 @@ +name: Test AMUSE twobody + +on: + push: + paths: + - .github/workflows/test-twobody.yml + - 'src/amuse_twobody/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-twobody.yml + - 'src/amuse_twobody/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build twobody + run: | + ./setup install amuse-twobody + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test twobody + run: | + ./setup test amuse-twobody + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/.github/workflows/test-vader.yml b/.github/workflows/test-vader.yml new file mode 100644 index 0000000000..e3a784d702 --- /dev/null +++ b/.github/workflows/test-vader.yml @@ -0,0 +1,79 @@ +name: Test AMUSE vader + +on: + push: + paths: + - .github/workflows/test-vader.yml + - 'src/amuse_vader/**' + pull_request: + branches: + - main + paths: + - .github/workflows/test-vader.yml + - 'src/amuse_vader/**' + workflow_dispatch: + +jobs: + test: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + - macos-latest + + defaults: + run: + shell: bash -el {0} + + steps: + - name: Set up conda + uses: conda-incubator/setup-miniconda@v3 + with: + auto-update-conda: true + channels: conda-forge + channel-priority: strict + + - name: Show conda info + run: | + conda info + conda list + + - name: Install dependencies + run: | + conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest + + - name: Configure OpenMPI + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + mkdir -p "$HOME/.openmpi" + echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" + echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true + + - name: Build vader + run: | + ./setup install amuse-vader + + - name: Ensure we test only the installed package + run: | + ./setup distclean + + - name: Test vader + run: | + ./setup test amuse-vader + + - name: Archive build logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: logs-${{ matrix.os }} + path: ${{ github.workspace }}/support/logs/ + if-no-files-found: warn diff --git a/src/amuse_adaptb/src/makefile b/src/amuse_adaptb/src/makefile index fc25d24090..9b768c69bb 100644 --- a/src/amuse_adaptb/src/makefile +++ b/src/amuse_adaptb/src/makefile @@ -1,8 +1,3 @@ -# standard amuse configuration include -# config.mk will be made after ./configure has run -AMUSE_DIR?=../../../../.. --include ${AMUSE_DIR}/config.mk - ################################################################### CXX ?= g++ @@ -26,7 +21,7 @@ all: $(OBJS) $(EXEC) $(CODELIB) ################################################################### integrator_MS.exe: $(OBJS) - $(CXX) $(CXXFLAGS) -o $(EXEC) $(OBJS) $(LIBS) + $(CXX) $(CXXFLAGS) $(LDFLAGS) -o $(EXEC) $(OBJS) $(LIBS) libadaptb.a: $(OBJS) rm -f $@ diff --git a/src/amuse_gadget2/Makefile b/src/amuse_gadget2/Makefile index 43aee04791..5339e8e060 100644 --- a/src/amuse_gadget2/Makefile +++ b/src/amuse_gadget2/Makefile @@ -81,7 +81,11 @@ package-%: %_contains python3 -m pip install -vv --no-cache-dir --no-deps --no-build-isolation --prefix ${PREFIX} packages/$* test-%: +ifneq ($(CI),) + cd packages/$* && pytest -k 'not noci' +else cd packages/$* && pytest +endif # Cleaning up diff --git a/src/amuse_gadget2/tests/test_gadget2.py b/src/amuse_gadget2/tests/test_gadget2.py index f5ef0d9341..8e5c678974 100644 --- a/src/amuse_gadget2/tests/test_gadget2.py +++ b/src/amuse_gadget2/tests/test_gadget2.py @@ -881,7 +881,7 @@ def test18(self): self.assertAlmostEqual(instance.dm_particles.z, [0., 0., 0.4] | units.kpc, places=6) instance.stop() - def test19(self): + def test19_noci(self): particles = new_plummer_model(31) instance = Gadget2(self.default_converter, number_of_workers=1) diff --git a/src/amuse_hermite/tests/test_hermite.py b/src/amuse_hermite/tests/test_hermite.py index 964c0819b8..04538f54ec 100644 --- a/src/amuse_hermite/tests/test_hermite.py +++ b/src/amuse_hermite/tests/test_hermite.py @@ -579,7 +579,7 @@ def test15(self): instance.stop() - def test16(self): + def test16_noci(self): particles = new_plummer_model(200) particles.scale_to_standard() instance = Hermite() diff --git a/src/amuse_hermite_grx/Makefile b/src/amuse_hermite_grx/Makefile index c72c752e81..802d504913 100644 --- a/src/amuse_hermite_grx/Makefile +++ b/src/amuse_hermite_grx/Makefile @@ -24,10 +24,11 @@ src/Hermite_GRX: | hermite_grx.tar.gz src # Building the code into a static library DEPFLAGS += $(STOPCOND_CFLAGS) -CFLAGS += $(DEPFLAGS) -pthread +CFLAGS += $(DEPFLAGS) -pthread -std=c++11 LDFLAGS += -pthread LDLIBS += $(STOPCOND_LIBS) +$(info libs: $(LDLIBS) sc: $(STOPCOND_LIBS)) CODELIB = src/Hermite_GRX/src/libhermite_grx.a diff --git a/src/amuse_mesa_r15140/patches/series_mesa b/src/amuse_mesa_r15140/patches/series similarity index 100% rename from src/amuse_mesa_r15140/patches/series_mesa rename to src/amuse_mesa_r15140/patches/series diff --git a/src/amuse_mi6/src/Vector3.h b/src/amuse_mi6/src/Vector3.h index f57753b4f6..c0335fae0b 100644 --- a/src/amuse_mi6/src/Vector3.h +++ b/src/amuse_mi6/src/Vector3.h @@ -114,7 +114,7 @@ class Vector3 } inline Vector3& operator /= (const double b){ - register double binv = 1.0/b; + const double binv = 1.0/b; v[0] *= binv; v[1] *= binv; v[2] *= binv; return *this; } diff --git a/src/amuse_petar/Makefile b/src/amuse_petar/Makefile index b4cebcc132..d050c1feb5 100644 --- a/src/amuse_petar/Makefile +++ b/src/amuse_petar/Makefile @@ -108,11 +108,16 @@ install-%: %_contains package-%: %_contains python3 -m pip install -vv --no-cache-dir --no-deps --no-build-isolation --prefix ${PREFIX} packages/$* + +# The tests use up to 5 workers, each of which will use all the cores via OpenMP, +# slowing things down enormously. By limiting the number of threads, the tests +# run in a reasonable time at least on my 8C/16T laptop. test-%: - # The tests use up to 5 workers, each of which will use all the cores via OpenMP, - # slowing things down enormously. By limiting the number of threads, the tests - # run in a reasonable time at least on my 8C/16T laptop. +ifneq ($(CI),) + cd packages/$* && OMP_NUM_THREADS=1 pytest -k 'not noci' +else cd packages/$* && OMP_NUM_THREADS=2 pytest +endif # Cleaning up diff --git a/src/amuse_petar/tests/test_petar.py b/src/amuse_petar/tests/test_petar.py index 0f56685e66..ee4327439e 100644 --- a/src/amuse_petar/tests/test_petar.py +++ b/src/amuse_petar/tests/test_petar.py @@ -31,7 +31,7 @@ def test_reversed_time_allowed(self): class TestPetar(TestWithMPI): - def test_small_plummer_model(self): + def test_small_plummer_model_noci(self): particles = plummer.new_plummer_model(31) instance = Petar(number_of_workers=1) # , debugger="xterm") diff --git a/support/setup/installing.sh b/support/setup/installing.sh index 102f6f05cf..554446c92d 100644 --- a/support/setup/installing.sh +++ b/support/setup/installing.sh @@ -187,16 +187,20 @@ install_package() { check_package "${package}" if ! is_subset "amuse-framework" "${INSTALLED_PACKAGES}" ; then + save_cmd="${cmd}" save_package="${package}" install_framework package="${save_package}" + cmd="${save_cmd}" fi if is_subset "${package}" "${NEEDS_SAPPORO_LIGHT}" ; then if ! is_subset "sapporo-light" "${INSTALLED_PACKAGES}" ; then + save_cmd="${cmd}" save_package="${package}" install_sapporo_light package="${save_package}" + cmd="${save_cmd}" fi fi @@ -207,9 +211,11 @@ install_package() { # If the code is e.g. CUDA-only, then there may not be a base package. if is_subset "${base_package}" "${EXTANT_PACKAGES}" ; then if ! is_subset "${base_package}" "${INSTALLED_PACKAGES}" ; then + save_cmd="${cmd}" save_package="${package}" install_package "${cmd}" "${base_package}" "${brief}" package="${save_package}" + cmd="${save_cmd}" fi fi fi From 0293c2e9a7db512ba960842ebfa4e6c389315544 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 24 Jun 2025 13:37:28 +0200 Subject: [PATCH 29/40] updated way of setting the metallicity/track dirs --- src/amuse_metisse/interface.f90 | 14 +++++++++++--- .../test_metisse_standaloneimitate.py | 12 ++++++++---- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/src/amuse_metisse/interface.f90 b/src/amuse_metisse/interface.f90 index 332c7f5812..ca3f420af5 100644 --- a/src/amuse_metisse/interface.f90 +++ b/src/amuse_metisse/interface.f90 @@ -83,7 +83,10 @@ function commit_parameters() ! This will read the tracks-so need to have set the paths before call METISSE_zcnsts(initial_Z, zpars, error) - if (error /= 0) return + if (error /= 0) then + commit_parameters = 1 + return + end if call assign_commons_main() end function @@ -113,7 +116,8 @@ function set_metallicity_dir(metallicity_dir_in) implicit none character(len = 256):: metallicity_dir_in integer:: set_metallicity_dir - METALLICITY_DIR = metallicity_dir_in + !METALLICITY_DIR = metallicity_dir_in + amuse_metallicity_dir = metallicity_dir_in set_metallicity_dir = 0 end function @@ -121,6 +125,8 @@ function get_metallicity_dir(metallicity_dir_out) implicit none character(len = 256):: metallicity_dir_out integer:: get_metallicity_dir + ! Note: this is only accurate*after*commit_parameters! + ! Maybe solve by returning amuse_metallicity_dir*only*if state is INITIALIZED metallicity_dir_out = METALLICITY_DIR get_metallicity_dir = 0 end function @@ -129,7 +135,8 @@ function set_metallicity_dir_he(metallicity_dir_he_in) implicit none character(len = 256):: metallicity_dir_he_in integer:: set_metallicity_dir_he - METALLICITY_DIR_HE = metallicity_dir_he_in + !METALLICITY_DIR_HE = metallicity_dir_he_in + amuse_metallicity_dir_he = metallicity_dir_he_in set_metallicity_dir_he = 0 end function @@ -137,6 +144,7 @@ function get_metallicity_dir_he(metallicity_dir_he_out) implicit none character(len = 256):: metallicity_dir_he_out integer:: get_metallicity_dir_he + ! Note: this is only accurate*after*commit_parameters! metallicity_dir_he_out = METALLICITY_DIR_HE get_metallicity_dir_he = 0 end function diff --git a/src/amuse_metisse/test_metisse_standaloneimitate.py b/src/amuse_metisse/test_metisse_standaloneimitate.py index 06f4e4d935..d4cd901872 100644 --- a/src/amuse_metisse/test_metisse_standaloneimitate.py +++ b/src/amuse_metisse/test_metisse_standaloneimitate.py @@ -33,12 +33,16 @@ def test_metisse_sun(): instance = setup_metisse() star = Particles(1) star.mass = 4.7893208794726441 | units.MSun + # star.mass = 1.0 | units.MSun stars_in_metisse = instance.particles.add_particles(star) - assert stars_in_metisse[0].mass == 1.0 | units.MSun + print(instance.parameters) + # assert stars_in_metisse[0].mass == 1.0 | units.MSun + assert stars_in_metisse[0].mass == 4.7893208794726441 | units.MSun print(stars_in_metisse[0]) print("Evolving...") - instance.evolve_model(10000.0 | units.yr) + # instance.evolve_model(1000.0 | units.yr) + instance.evolve_one_step(1) print(stars_in_metisse[0]) print("Done") instance.stop() @@ -74,8 +78,8 @@ def test_metisse_kroupa(): instance.stop() -# test_metisse_sun() +test_metisse_sun() -test_metisse_twostars() +# test_metisse_twostars() # test_metisse_kroupa() From 12c746a4486b4d98fd96da7b386079c45475fc56 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 1 Jul 2025 14:21:56 +0200 Subject: [PATCH 30/40] Set correct unit for evolving! --- src/amuse_metisse/interface.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/amuse_metisse/interface.py b/src/amuse_metisse/interface.py index 396c5e4aca..edff20a458 100644 --- a/src/amuse_metisse/interface.py +++ b/src/amuse_metisse/interface.py @@ -438,3 +438,13 @@ def evolve_model(self, end_time=None, keep_synchronous=True): print(f"particle {i=} {particle.age=} {particle.mass=}") particle.evolve_for(particle.age + delta_time) self.model_time += delta_time + + def define_methods(self, handler): + se.StellarEvolution.define_methods(self, handler) + + # Metisse specific: uses Myr instead of yr + handler.add_method( + "evolve_for", + (handler.INDEX, units.mega(units.yr)), + (handler.ERROR_CODE,) + ) From 1261d47343a5d625c7e1381c38d9eeee54e981dd Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 1 Jul 2025 14:22:28 +0200 Subject: [PATCH 31/40] remove debug print --- src/amuse_metisse/interface.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/amuse_metisse/interface.py b/src/amuse_metisse/interface.py index edff20a458..41882ffec0 100644 --- a/src/amuse_metisse/interface.py +++ b/src/amuse_metisse/interface.py @@ -435,7 +435,6 @@ def evolve_model(self, end_time=None, keep_synchronous=True): ) print(f"delta_time = {delta_time}") for i, particle in enumerate(self.particles): - print(f"particle {i=} {particle.age=} {particle.mass=}") particle.evolve_for(particle.age + delta_time) self.model_time += delta_time From 170c7096a560aa6024533d8d9f13020c2e8261e3 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 1 Jul 2025 14:22:56 +0200 Subject: [PATCH 32/40] ignore data files --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index ab9fc0910f..273ec0359d 100644 --- a/.gitignore +++ b/.gitignore @@ -455,3 +455,5 @@ packages/dist # standard directory for a virtualenv env/ +src/amuse_metisse/data +src/amuse_metisse/src/METISSE From bf275089b4a13fa1d415e05a70e04ee18772949c Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 14 Jul 2025 13:21:21 +0200 Subject: [PATCH 33/40] remove duplicates --- src/amuse/community/metisse/test_storage.f90 | 43 ------------------- .../community/metisse/tests/test_metisse.py | 19 -------- 2 files changed, 62 deletions(-) delete mode 100644 src/amuse/community/metisse/test_storage.f90 delete mode 100644 src/amuse/community/metisse/tests/test_metisse.py diff --git a/src/amuse/community/metisse/test_storage.f90 b/src/amuse/community/metisse/test_storage.f90 deleted file mode 100644 index 4dd8a0c45b..0000000000 --- a/src/amuse/community/metisse/test_storage.f90 +++ /dev/null @@ -1,43 +0,0 @@ -program test_store_stars - use iso_c_binding - use store_stars - implicit none - - type(stars) :: star_system - integer :: new_ids(2) - integer :: ids_to_remove(2) - integer :: i, number_of_stars - - ! Test new_star - new_ids(1) = star_system%new_star(1.0_c_double) - new_ids(2) = star_system%new_star(2.0_c_double) - if (new_ids(1) /= 1 .or. new_ids(2) /= 2) then - error stop "new_star failed" - end if - - ! Test remove_star - ids_to_remove = [1, 2] - call star_system%remove_star(ids_to_remove(1)) - call star_system%remove_star(ids_to_remove(2)) - call star_system%get_number_of_stars(number_of_stars) - if (number_of_stars /= 0) then - write (*, *) "Number of stars: ", number_of_stars - error stop "remove_star failed" - end if - - ! Test edge cases - new_ids = star_system%new_star(1.0_c_double) - if (new_ids(1) /= 1) then - error stop "new_star failed with single star" - end if - - ids_to_remove(1) = 1 - call star_system%remove_star(ids_to_remove(1)) - call star_system%get_number_of_stars(number_of_stars) - if (number_of_stars /= 0) then - error stop "remove_star failed with single star" - end if - - write (*, *) "All tests passed" - -end program test_store_stars diff --git a/src/amuse/community/metisse/tests/test_metisse.py b/src/amuse/community/metisse/tests/test_metisse.py deleted file mode 100644 index f7045f1ae0..0000000000 --- a/src/amuse/community/metisse/tests/test_metisse.py +++ /dev/null @@ -1,19 +0,0 @@ -from amuse.test.amusetest import TestWithMPI - -from .interface import MetisseInterface -from .interface import Metisse - -class MetisseInterfaceTests(TestWithMPI): - def test_initialize(self): - instance = MetisseInterface() - error = instance.initialize() - self.assertEqual(error, 0) - instance.stop() - - -class MetisseTests(TestWithMPI): - def test_initialize(self): - instance = Metisse() - error = instance.initialize() - self.assertEqual(error, 0) - instance.stop() From a57acfeba2368580e3ce390476bfa10943924e99 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 14 Jul 2025 13:22:29 +0200 Subject: [PATCH 34/40] update test --- .../test_metisse_standaloneimitate.py | 107 ++++++++++++++++-- 1 file changed, 95 insertions(+), 12 deletions(-) diff --git a/src/amuse_metisse/test_metisse_standaloneimitate.py b/src/amuse_metisse/test_metisse_standaloneimitate.py index d4cd901872..d2fd12948a 100644 --- a/src/amuse_metisse/test_metisse_standaloneimitate.py +++ b/src/amuse_metisse/test_metisse_standaloneimitate.py @@ -2,12 +2,20 @@ import numpy as np -from amuse.units import units +from amuse.units import units, nbody_system from amuse.datamodel import Particles # from amuse.community.metisse import Metisse from amuse_metisse import Metisse +from amuse.community.sse import Sse +from amuse.ic.kroupa import new_kroupa_mass_distribution +from amuse.ic.plummer import new_plummer_model +from amuse.community.ph4 import Ph4 + +from amuse.io import write_set_to_file + +np.random.seed(127) logger = logging.getLogger("amuse") # logger.setLevel(logging.DEBUG) # logging.basicConfig(level=logging.DEBUG) @@ -18,8 +26,8 @@ def setup_metisse(): instance = Metisse(redirection="none") # instance.initialize_code() - instance.parameters.metallicity_dir = "/Users/rieder/Code/UvA/Toonen/tres3.0/amuse/src/amuse_metisse/data/hydrogen" - instance.parameters.metallicity_dir_he = "/Users/rieder/Code/UvA/Toonen/tres3.0/amuse/src/amuse_metisse/data/helium" + instance.parameters.metallicity_dir = "/Users/rieder/Code/UvA/Toonen/tres3.0/amuse/src/amuse_metisse/data/Hydrogen" + instance.parameters.metallicity_dir_he = "/Users/rieder/Code/UvA/Toonen/tres3.0/amuse/src/amuse_metisse/data/Helium" instance.parameters.wd_mass_scheme = "Modified_mestel" instance.parameters.bhns_mass_scheme = "Belczynski2008" @@ -32,17 +40,17 @@ def setup_metisse(): def test_metisse_sun(): instance = setup_metisse() star = Particles(1) - star.mass = 4.7893208794726441 | units.MSun - # star.mass = 1.0 | units.MSun + # star.mass = 4.7893208794726441 | units.MSun + star.mass = 1.0 | units.MSun stars_in_metisse = instance.particles.add_particles(star) print(instance.parameters) - # assert stars_in_metisse[0].mass == 1.0 | units.MSun - assert stars_in_metisse[0].mass == 4.7893208794726441 | units.MSun + assert stars_in_metisse[0].mass == 1.0 | units.MSun + # assert stars_in_metisse[0].mass == 4.7893208794726441 | units.MSun print(stars_in_metisse[0]) print("Evolving...") - # instance.evolve_model(1000.0 | units.yr) - instance.evolve_one_step(1) + instance.evolve_model(1000.0 | units.yr) + # instance.evolve_one_step(1) print(stars_in_metisse[0]) print("Done") instance.stop() @@ -62,8 +70,6 @@ def test_metisse_twostars(): instance.stop() def test_metisse_kroupa(): - from amuse.ic.kroupa import new_kroupa_mass_distribution - np.random.seed(127) instance = setup_metisse() number_of_stars = 1000 star = Particles(number_of_stars) @@ -78,8 +84,85 @@ def test_metisse_kroupa(): instance.stop() -test_metisse_sun() +def evolve_stars_with_metisse(stars, age): + instance = setup_metisse() + stars_in_metisse = instance.particles.add_particles(stars) + instance.evolve_model(age) + stars_after_evolution = stars_in_metisse.copy() + instance.stop() + return stars_after_evolution + + +def star_cluster_with_metisse(number_of_stars, time_end, time_step, start=0): + mass = new_kroupa_mass_distribution( + number_of_stars, + mass_min=0.75 | units.MSun, + mass_max=100.0 | units.MSun, + ) + mass = np.logspace(np.log10(1.0), np.log10(100.0), number_of_stars) | units.MSun + converter = nbody_system.nbody_to_si(mass.sum(), 3 | units.parsec) + stars = new_plummer_model(number_of_stars, converter) + stars.mass = mass + + gravity = Ph4(converter) + gravity.parameters.epsilon_squared = 0.01 | units.parsec**2 + stars_in_gravity = gravity.particles.add_particles(stars) + + time = 0.0 | units.yr + i = 0 + time += start * time_step + i += start + while time < time_end: + print(f"Evolving to time: {time}") + stars_evo = evolve_stars_with_metisse(stars, time) + # gravity.evolve_model(time) + # evo_to_gravity = stars_evo.new_channel_to(stars_in_gravity) + # evo_to_gravity.copy_attributes(["mass"]) + evo_to_model = stars_evo.new_channel_to(stars) + evo_to_model.copy_attributes(["mass", "luminosity", "stellar_type", "temperature"]) + # grav_to_model = gravity.particles.new_channel_to(stars) + # grav_to_model.copy_attributes(["x", "y", "z", "vx", "vy", "vz"]) + write_set_to_file( + stars, f"star_cluster2_metisse_{i:04d}.amuse" + ) + time += time_step + i += 1 + +def evolve_stars_with_sse(stars, age): + instance = Sse() + stars_in_sse = instance.particles.add_particles(stars) + instance.evolve_model(age) + stars_after_evolution = stars_in_sse.copy() + instance.stop() + return stars_after_evolution + + +def star_cluster_with_metisse_and_sse(number_of_stars, time_end, time_step, start=0): + mass = np.logspace(np.log10(1.0), np.log10(100.0), number_of_stars) | units.MSun + stars = Particles(number_of_stars) + stars.mass = mass + + time = 0.0 | units.yr + i = 0 + time += start * time_step + i += start + while time < time_end: + print(f"Evolving to time: {time}") + stars_metisse = evolve_stars_with_metisse(stars, time) + write_set_to_file( + stars_metisse, f"stars2_metisse_{i:04d}.amuse" + ) + # stars_sse = evolve_stars_with_sse(stars, time) + # write_set_to_file( + # stars_sse, f"stars_sse_{i:04d}.amuse" + # ) + time += time_step + i += 1 + +# test_metisse_sun() # test_metisse_twostars() # test_metisse_kroupa() +# star_cluster_with_metisse(1000, 100 | units.Myr, 100 | units.kyr, start=0) +star_cluster_with_metisse_and_sse(10000, 100 | units.Myr, 20 | units.kyr, start=0) From 20f2c5f7fb8cf50a5ce91fdffeb5417f47c0ccfe Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 14 Jul 2025 13:52:54 +0200 Subject: [PATCH 35/40] add plotting script --- src/amuse_metisse/plot_hr.py | 253 +++++++++++++++++++++++++++++++++++ 1 file changed, 253 insertions(+) create mode 100644 src/amuse_metisse/plot_hr.py diff --git a/src/amuse_metisse/plot_hr.py b/src/amuse_metisse/plot_hr.py new file mode 100644 index 0000000000..a3f4337a1c --- /dev/null +++ b/src/amuse_metisse/plot_hr.py @@ -0,0 +1,253 @@ +""" +Plot a Hertzsprung-Russell diagram of a star cluster, for a number of +snapshots, and make a movie. + +Uses data from +https://astronomy.stackexchange.com/questions/39994/what-is-the-rgb-curve-for-blackbodies +to convert temperature to RGB. + +Shows a progress bar. +""" + +import sys +import os.path +import argparse +import numpy as np +import matplotlib.pyplot as plt +from matplotlib import animation +from amuse.io import read_set_from_file +from amuse.units import units, constants + +# package for progress bar +from tqdm import tqdm + + +def lumrad_to_temp(luminosity, radius): + temperature = (( + luminosity + / (constants.four_pi_stefan_boltzmann * radius**2) + )**0.25).in_(units.K) + return temperature + + +def lumtemp_to_rad(luminosity, temperature): + radius = ((luminosity / (constants.four_pi_stefan_boltzmann * temperature**4))**0.5).in_(units.RSun) + return radius + + +def templum_to_xyz(temperature, luminosity): + log_temperature = np.nan_to_num(np.log10(temperature.value_in(units.K))) + log_luminosity = np.nan_to_num(np.log10(luminosity.value_in(units.LSun))) + color = temp_to_rgb(temperature) + return log_temperature, log_luminosity, color + + +def temp_to_rgb(temperature): + temp = temperature.value_in(units.K) + logT = np.log(temp) + logT1000 = np.log(temp - 1000.0) + rgb = np.zeros((len(temp), 3)) + rgb[:, 0] = 1.0 + rgb[:, 1] = 0.390081972 * logT - 2.427925631 + rgb[:, 2] = 0.543206396 * logT1000 - 3.698136688 + + t6600 = temp > 6600 + rgb[t6600, 0] = 2.4054 * (temp[t6600]-6000)**(-0.1332047592) + rgb[t6600, 1] = 1.6 * (temp[t6600]-6000)**(-0.0755148492) + rgb[t6600, 2] = 1.0 + + rgb = np.clip(rgb, 0, 1) + return rgb + + + +class StarHRPlotter: + def __init__(self, name, extension="amuse"): + self.fig = plt.figure(figsize=(10, 10)) + self.ax = self.fig.add_subplot(111) + self.ax.set_xlabel("log(Teff)") + self.ax.set_ylabel("log(L)") + self.temperature_range = [5.5, 3] + self.luminosity_range = [-5, 9] + self.ax.set_xlim(self.temperature_range) + self.ax.set_ylim(self.luminosity_range) + self.ax.set_facecolor("k") + self.scatter = None + self.scatter2 = None + self.name = name + self.extension = extension + self.ndigit = 4 + + + def make_movie(self, start, end): + "Find all snapshots, and make a movie" + def update(frame): + """ + update frame and update progress bar. The progress bar doesn't work + yet so printing dots too. + """ + print(".", end="", flush=True) + i = start + frame + + filename = f"{self.name}{i:0{self.ndigit}d}.{self.extension}" + stars = read_set_from_file(filename) + size = 4 * stars.radius.value_in(units.RSun)**0.5 + x, y, color = templum_to_xyz(stars.temperature, stars.luminosity) + self.ax.set_title(f"Snapshot {i}") + self.scatter.set_offsets(np.array([x, y,]).T) + self.scatter.set_sizes(size) + self.scatter.set_facecolors(color) + + + i = start + filename = f"{self.name}{i:0{self.ndigit}d}.{self.extension}" + stars = read_set_from_file(filename) + x, y, color = templum_to_xyz(stars.temperature, stars.luminosity) + # radius_is_zero = stars.radius == 0 | units.RSun + # print(radius_is_zero) + # stars[radius_is_zero] = lumtemp_to_rad( + # stars[radius_is_zero].luminosity, + # stars[radius_is_zero].temperature, + # ) + size = 4 * stars.radius.value_in(units.RSun)**0.5 + self.scatter = self.ax.scatter(x, y, s=size, c=color, edgecolor="none") + anim = animation.FuncAnimation( + self.fig, + update, + frames=tqdm(range(end - start), position=0, file=sys.stdout), + interval=30, + repeat=False, + ) + anim.save(f"{self.name}.mp4", dpi=150, writer=animation.FFMpegWriter(fps=25)) + + + def templum_to_xy(self): + log_temperature = np.nan_to_num(np.log10(stars.temperature.value_in(units.K))) + log_luminosity = np.nan_to_num(np.log10(stars.luminosity.value_in(units.LSun))) + + + def plot_hr(self, stars, stars2=None): + log_temperature = np.nan_to_num(np.log10(stars.temperature.value_in(units.K))) + log_luminosity = np.nan_to_num(np.log10(stars.luminosity.value_in(units.LSun))) + col = temp_to_rgb(stars.temperature) + if not self.scatter: + self.scatter = self.ax.scatter( + log_temperature, + log_luminosity, + s=42, + c=temp_to_rgb(stars.temperature), + edgecolor="none", + ) + else: + self.scatter.set_offsets( + np.array( + [ + log_temperature, + log_luminosity, + ] + ).T + ) + self.scatter.set_facecolors(col) + if stars2: + + log_temperature2 = np.nan_to_num(np.log10(stars2.temperature.value_in(units.K))) + log_luminosity2 = np.nan_to_num(np.log10(stars2.luminosity.value_in(units.LSun))) + if not self.scatter2: + self.scatter2 = self.ax.scatter( + log_temperature2, + log_luminosity2, + s=2, + ) + else: + self.scatter2.set_offsets( + np.array( + [ + log_temperature2, + log_luminosity2, + ] + ).T + ) + + + def savefig(self, filename): + self.fig.savefig(filename) + + + +def new_argument_parser(): + "Parse command line arguments, show defaults" + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument( + "-i", + "--infile", + type=str, + default="", + help="The first snapshot to plot", + ) + parser.add_argument( + "-I", + "--infile2", + type=str, + default="", + help="Second set of snapshots to plot", + ) + parser.add_argument( + "-n", + "--number", + type=int, + default=1, + help="The number of snapshots to plot", + ) + return parser + + +def main(): + args = new_argument_parser().parse_args() + filename_template = args.infile + # check extension of the file + filename_template = filename_template.split(".") + extension = filename_template[-1] + name = filename_template[0] + if args.infile2: + filename_template2 = args.infile2 + filename_template2 = filename_template2.split(".") + extension2 = filename_template2[-1] + name2 = filename_template2[0] + # check if the name ends with a number, if so, check how many characters + # are there and store it and strip it + i = 0 + while name[-i - 1].isdigit(): + i += 1 + if i > 1: + name = name[:-i] + if args.infile2: + name2 = name2[:-i] + ndigit = i + else: + raise ValueError("The name of the first snapshot should end with a number") + + # set up plotter + plotter = StarHRPlotter(name) + plotter.make_movie(0, args.number) + sys.exit() + + # read the snapshots + for i in range(args.number): + filename = f"{name}{i:0{ndigit}}.{extension}" + snapshot = read_set_from_file(filename) + if args.infile2: + filename2 = f"{name2}{i:0{ndigit}}.{extension}" + snapshot2 = read_set_from_file(filename2) + print(filename, filename2) + plotter.plot_hr(snapshot, snapshot2) + plotter.savefig(f"{name}{name2}{i:0{ndigit}}.png") + else: + print(filename) + plotter.plot_hr(snapshot) + plotter.savefig(f"rgb{name}{i:0{ndigit}}.png") + + +if __name__ == "__main__": + main() From 2ecb5e7fbc0066d88fb6acb292a1b399fbf2e2dc Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 14 Apr 2026 16:49:57 +0200 Subject: [PATCH 36/40] remove workflows from another branch --- .github/workflows/test-aarsethzare.yml | 79 ---------------------- .github/workflows/test-adaptb.yml | 79 ---------------------- .github/workflows/test-athena.yml | 79 ---------------------- .github/workflows/test-bhtree.yml | 79 ---------------------- .github/workflows/test-brutus.yml | 79 ---------------------- .github/workflows/test-bse.yml | 79 ---------------------- .github/workflows/test-capreole.yml | 79 ---------------------- .github/workflows/test-evtwin.yml | 79 ---------------------- .github/workflows/test-fastkick.yml | 79 ---------------------- .github/workflows/test-fi.yml | 79 ---------------------- .github/workflows/test-fractalcluster.yml | 79 ---------------------- .github/workflows/test-gadget2.yml | 79 ---------------------- .github/workflows/test-galactics.yml | 79 ---------------------- .github/workflows/test-galaxia.yml | 79 ---------------------- .github/workflows/test-halogen.yml | 79 ---------------------- .github/workflows/test-hermite-grx.yml | 79 ---------------------- .github/workflows/test-hermite.yml | 79 ---------------------- .github/workflows/test-hop.yml | 79 ---------------------- .github/workflows/test-huayno.yml | 79 ---------------------- .github/workflows/test-kepler-orbiters.yml | 79 ---------------------- .github/workflows/test-kepler.yml | 79 ---------------------- .github/workflows/test-krome.yml | 79 ---------------------- .github/workflows/test-mameclot.yml | 79 ---------------------- .github/workflows/test-mercury.yml | 79 ---------------------- .github/workflows/test-mesa-r15140.yml | 75 -------------------- .github/workflows/test-mesa-r2208.yml | 79 ---------------------- .github/workflows/test-mi6.yml | 79 ---------------------- .github/workflows/test-mikkola.yml | 79 ---------------------- .github/workflows/test-mmams.yml | 79 ---------------------- .github/workflows/test-mobse.yml | 79 ---------------------- .github/workflows/test-mocassin.yml | 79 ---------------------- .github/workflows/test-mosse.yml | 79 ---------------------- .github/workflows/test-mpiamrvac.yml | 79 ---------------------- .github/workflows/test-nbody6xx.yml | 79 ---------------------- .github/workflows/test-petar.yml | 79 ---------------------- .github/workflows/test-ph4.yml | 79 ---------------------- .github/workflows/test-phantom.yml | 79 ---------------------- .github/workflows/test-phigrape.yml | 79 ---------------------- .github/workflows/test-rebound.yml | 79 ---------------------- .github/workflows/test-sakura.yml | 79 ---------------------- .github/workflows/test-secularmultiple.yml | 79 ---------------------- .github/workflows/test-sei.yml | 79 ---------------------- .github/workflows/test-simplex.yml | 79 ---------------------- .github/workflows/test-smalln.yml | 79 ---------------------- .github/workflows/test-sphray.yml | 79 ---------------------- .github/workflows/test-sse.yml | 79 ---------------------- .github/workflows/test-symple.yml | 79 ---------------------- .github/workflows/test-tupan.yml | 79 ---------------------- .github/workflows/test-twobody.yml | 79 ---------------------- .github/workflows/test-vader.yml | 79 ---------------------- src/amuse_mesa_r15140/patches/series | 12 ---- 51 files changed, 3958 deletions(-) delete mode 100644 .github/workflows/test-aarsethzare.yml delete mode 100644 .github/workflows/test-adaptb.yml delete mode 100644 .github/workflows/test-athena.yml delete mode 100644 .github/workflows/test-bhtree.yml delete mode 100644 .github/workflows/test-brutus.yml delete mode 100644 .github/workflows/test-bse.yml delete mode 100644 .github/workflows/test-capreole.yml delete mode 100644 .github/workflows/test-evtwin.yml delete mode 100644 .github/workflows/test-fastkick.yml delete mode 100644 .github/workflows/test-fi.yml delete mode 100644 .github/workflows/test-fractalcluster.yml delete mode 100644 .github/workflows/test-gadget2.yml delete mode 100644 .github/workflows/test-galactics.yml delete mode 100644 .github/workflows/test-galaxia.yml delete mode 100644 .github/workflows/test-halogen.yml delete mode 100644 .github/workflows/test-hermite-grx.yml delete mode 100644 .github/workflows/test-hermite.yml delete mode 100644 .github/workflows/test-hop.yml delete mode 100644 .github/workflows/test-huayno.yml delete mode 100644 .github/workflows/test-kepler-orbiters.yml delete mode 100644 .github/workflows/test-kepler.yml delete mode 100644 .github/workflows/test-krome.yml delete mode 100644 .github/workflows/test-mameclot.yml delete mode 100644 .github/workflows/test-mercury.yml delete mode 100644 .github/workflows/test-mesa-r15140.yml delete mode 100644 .github/workflows/test-mesa-r2208.yml delete mode 100644 .github/workflows/test-mi6.yml delete mode 100644 .github/workflows/test-mikkola.yml delete mode 100644 .github/workflows/test-mmams.yml delete mode 100644 .github/workflows/test-mobse.yml delete mode 100644 .github/workflows/test-mocassin.yml delete mode 100644 .github/workflows/test-mosse.yml delete mode 100644 .github/workflows/test-mpiamrvac.yml delete mode 100644 .github/workflows/test-nbody6xx.yml delete mode 100644 .github/workflows/test-petar.yml delete mode 100644 .github/workflows/test-ph4.yml delete mode 100644 .github/workflows/test-phantom.yml delete mode 100644 .github/workflows/test-phigrape.yml delete mode 100644 .github/workflows/test-rebound.yml delete mode 100644 .github/workflows/test-sakura.yml delete mode 100644 .github/workflows/test-secularmultiple.yml delete mode 100644 .github/workflows/test-sei.yml delete mode 100644 .github/workflows/test-simplex.yml delete mode 100644 .github/workflows/test-smalln.yml delete mode 100644 .github/workflows/test-sphray.yml delete mode 100644 .github/workflows/test-sse.yml delete mode 100644 .github/workflows/test-symple.yml delete mode 100644 .github/workflows/test-tupan.yml delete mode 100644 .github/workflows/test-twobody.yml delete mode 100644 .github/workflows/test-vader.yml delete mode 100644 src/amuse_mesa_r15140/patches/series diff --git a/.github/workflows/test-aarsethzare.yml b/.github/workflows/test-aarsethzare.yml deleted file mode 100644 index a0c0259b20..0000000000 --- a/.github/workflows/test-aarsethzare.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE aarsethzare - -on: - push: - paths: - - .github/workflows/test-aarsethzare.yml - - 'src/amuse_aarsethzare/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-aarsethzare.yml - - 'src/amuse_aarsethzare/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build aarsethzare - run: | - ./setup install amuse-aarsethzare - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test aarsethzare - run: | - ./setup test amuse-aarsethzare - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-adaptb.yml b/.github/workflows/test-adaptb.yml deleted file mode 100644 index 31dadc0b3a..0000000000 --- a/.github/workflows/test-adaptb.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE adaptb - -on: - push: - paths: - - .github/workflows/test-adaptb.yml - - 'src/amuse_adaptb/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-adaptb.yml - - 'src/amuse_adaptb/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build adaptb - run: | - ./setup install amuse-adaptb - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test adaptb - run: | - ./setup test amuse-adaptb - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-athena.yml b/.github/workflows/test-athena.yml deleted file mode 100644 index 754fb4d6aa..0000000000 --- a/.github/workflows/test-athena.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE athena - -on: - push: - paths: - - .github/workflows/test-athena.yml - - 'src/amuse_athena/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-athena.yml - - 'src/amuse_athena/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build athena - run: | - ./setup install amuse-athena - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test athena - run: | - ./setup test amuse-athena - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-bhtree.yml b/.github/workflows/test-bhtree.yml deleted file mode 100644 index 65009088e6..0000000000 --- a/.github/workflows/test-bhtree.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE bhtree - -on: - push: - paths: - - .github/workflows/test-bhtree.yml - - 'src/amuse_bhtree/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-bhtree.yml - - 'src/amuse_bhtree/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build bhtree - run: | - ./setup install amuse-bhtree - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test bhtree - run: | - ./setup test amuse-bhtree - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-brutus.yml b/.github/workflows/test-brutus.yml deleted file mode 100644 index 94cd7d3ac2..0000000000 --- a/.github/workflows/test-brutus.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE brutus - -on: - push: - paths: - - .github/workflows/test-brutus.yml - - 'src/amuse_brutus/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-brutus.yml - - 'src/amuse_brutus/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build brutus - run: | - ./setup install amuse-brutus - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test brutus - run: | - ./setup test amuse-brutus - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-bse.yml b/.github/workflows/test-bse.yml deleted file mode 100644 index 188d588b40..0000000000 --- a/.github/workflows/test-bse.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE bse - -on: - push: - paths: - - .github/workflows/test-bse.yml - - 'src/amuse_bse/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-bse.yml - - 'src/amuse_bse/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build bse - run: | - ./setup install amuse-bse - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test bse - run: | - ./setup test amuse-bse - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-capreole.yml b/.github/workflows/test-capreole.yml deleted file mode 100644 index 391cd9125e..0000000000 --- a/.github/workflows/test-capreole.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE capreole - -on: - push: - paths: - - .github/workflows/test-capreole.yml - - 'src/amuse_capreole/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-capreole.yml - - 'src/amuse_capreole/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build capreole - run: | - ./setup install amuse-capreole - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test capreole - run: | - ./setup test amuse-capreole - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-evtwin.yml b/.github/workflows/test-evtwin.yml deleted file mode 100644 index 526418939c..0000000000 --- a/.github/workflows/test-evtwin.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE evtwin - -on: - push: - paths: - - .github/workflows/test-evtwin.yml - - 'src/amuse_evtwin/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-evtwin.yml - - 'src/amuse_evtwin/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build evtwin - run: | - ./setup install amuse-evtwin - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test evtwin - run: | - ./setup test amuse-evtwin - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-fastkick.yml b/.github/workflows/test-fastkick.yml deleted file mode 100644 index feb67ba3fa..0000000000 --- a/.github/workflows/test-fastkick.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE fastkick - -on: - push: - paths: - - .github/workflows/test-fastkick.yml - - 'src/amuse_fastkick/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-fastkick.yml - - 'src/amuse_fastkick/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build fastkick - run: | - ./setup install amuse-fastkick - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test fastkick - run: | - ./setup test amuse-fastkick - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-fi.yml b/.github/workflows/test-fi.yml deleted file mode 100644 index c15e18ab7a..0000000000 --- a/.github/workflows/test-fi.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE fi - -on: - push: - paths: - - .github/workflows/test-fi.yml - - 'src/amuse_fi/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-fi.yml - - 'src/amuse_fi/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build fi - run: | - ./setup install amuse-fi - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test fi - run: | - ./setup test amuse-fi - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-fractalcluster.yml b/.github/workflows/test-fractalcluster.yml deleted file mode 100644 index 964a92f100..0000000000 --- a/.github/workflows/test-fractalcluster.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE fractalcluster - -on: - push: - paths: - - .github/workflows/test-fractalcluster.yml - - 'src/amuse_fractalcluster/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-fractalcluster.yml - - 'src/amuse_fractalcluster/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build fractalcluster - run: | - ./setup install amuse-fractalcluster - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test fractalcluster - run: | - ./setup test amuse-fractalcluster - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-gadget2.yml b/.github/workflows/test-gadget2.yml deleted file mode 100644 index 2c0205d983..0000000000 --- a/.github/workflows/test-gadget2.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE gadget2 - -on: - push: - paths: - - .github/workflows/test-gadget2.yml - - 'src/amuse_gadget2/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-gadget2.yml - - 'src/amuse_gadget2/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build gadget2 - run: | - ./setup install amuse-gadget2 - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test gadget2 - run: | - ./setup test amuse-gadget2 - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-galactics.yml b/.github/workflows/test-galactics.yml deleted file mode 100644 index 784783645a..0000000000 --- a/.github/workflows/test-galactics.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE galactics - -on: - push: - paths: - - .github/workflows/test-galactics.yml - - 'src/amuse_galactics/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-galactics.yml - - 'src/amuse_galactics/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build galactics - run: | - ./setup install amuse-galactics - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test galactics - run: | - ./setup test amuse-galactics - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-galaxia.yml b/.github/workflows/test-galaxia.yml deleted file mode 100644 index 07d1a58f93..0000000000 --- a/.github/workflows/test-galaxia.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE galaxia - -on: - push: - paths: - - .github/workflows/test-galaxia.yml - - 'src/amuse_galaxia/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-galaxia.yml - - 'src/amuse_galaxia/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build galaxia - run: | - ./setup install amuse-galaxia - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test galaxia - run: | - ./setup test amuse-galaxia - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-halogen.yml b/.github/workflows/test-halogen.yml deleted file mode 100644 index f793390b9d..0000000000 --- a/.github/workflows/test-halogen.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE halogen - -on: - push: - paths: - - .github/workflows/test-halogen.yml - - 'src/amuse_halogen/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-halogen.yml - - 'src/amuse_halogen/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build halogen - run: | - ./setup install amuse-halogen - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test halogen - run: | - ./setup test amuse-halogen - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-hermite-grx.yml b/.github/workflows/test-hermite-grx.yml deleted file mode 100644 index 16acbd8b1c..0000000000 --- a/.github/workflows/test-hermite-grx.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE hermite-grx - -on: - push: - paths: - - .github/workflows/test-hermite-grx.yml - - 'src/amuse_hermite_grx/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-hermite-grx.yml - - 'src/amuse_hermite_grx/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build hermite-grx - run: | - ./setup install amuse-hermite-grx - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test hermite-grx - run: | - ./setup test amuse-hermite-grx - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-hermite.yml b/.github/workflows/test-hermite.yml deleted file mode 100644 index 27641d7c0f..0000000000 --- a/.github/workflows/test-hermite.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE hermite - -on: - push: - paths: - - .github/workflows/test-hermite.yml - - 'src/amuse_hermite/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-hermite.yml - - 'src/amuse_hermite/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build hermite - run: | - ./setup install amuse-hermite - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test hermite - run: | - ./setup test amuse-hermite - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-hop.yml b/.github/workflows/test-hop.yml deleted file mode 100644 index 53b9b11872..0000000000 --- a/.github/workflows/test-hop.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE hop - -on: - push: - paths: - - .github/workflows/test-hop.yml - - 'src/amuse_hop/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-hop.yml - - 'src/amuse_hop/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build hop - run: | - ./setup install amuse-hop - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test hop - run: | - ./setup test amuse-hop - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-huayno.yml b/.github/workflows/test-huayno.yml deleted file mode 100644 index b9b91f2cfa..0000000000 --- a/.github/workflows/test-huayno.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE huayno - -on: - push: - paths: - - .github/workflows/test-huayno.yml - - 'src/amuse_huayno/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-huayno.yml - - 'src/amuse_huayno/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build huayno - run: | - ./setup install amuse-huayno - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test huayno - run: | - ./setup test amuse-huayno - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-kepler-orbiters.yml b/.github/workflows/test-kepler-orbiters.yml deleted file mode 100644 index 5622027bd8..0000000000 --- a/.github/workflows/test-kepler-orbiters.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE kepler-orbiters - -on: - push: - paths: - - .github/workflows/test-kepler-orbiters.yml - - 'src/amuse_kepler_orbiters/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-kepler-orbiters.yml - - 'src/amuse_kepler_orbiters/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build kepler-orbiters - run: | - ./setup install amuse-kepler-orbiters - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test kepler-orbiters - run: | - ./setup test amuse-kepler-orbiters - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-kepler.yml b/.github/workflows/test-kepler.yml deleted file mode 100644 index 153e2be371..0000000000 --- a/.github/workflows/test-kepler.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE kepler - -on: - push: - paths: - - .github/workflows/test-kepler.yml - - 'src/amuse_kepler/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-kepler.yml - - 'src/amuse_kepler/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build kepler - run: | - ./setup install amuse-kepler - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test kepler - run: | - ./setup test amuse-kepler - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-krome.yml b/.github/workflows/test-krome.yml deleted file mode 100644 index 28fb71a5bf..0000000000 --- a/.github/workflows/test-krome.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE krome - -on: - push: - paths: - - .github/workflows/test-krome.yml - - 'src/amuse_krome/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-krome.yml - - 'src/amuse_krome/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build krome - run: | - ./setup install amuse-krome - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test krome - run: | - ./setup test amuse-krome - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-mameclot.yml b/.github/workflows/test-mameclot.yml deleted file mode 100644 index b010337ad8..0000000000 --- a/.github/workflows/test-mameclot.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE mameclot - -on: - push: - paths: - - .github/workflows/test-mameclot.yml - - 'src/amuse_mameclot/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-mameclot.yml - - 'src/amuse_mameclot/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build mameclot - run: | - ./setup install amuse-mameclot - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test mameclot - run: | - ./setup test amuse-mameclot - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-mercury.yml b/.github/workflows/test-mercury.yml deleted file mode 100644 index a6ca570104..0000000000 --- a/.github/workflows/test-mercury.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE mercury - -on: - push: - paths: - - .github/workflows/test-mercury.yml - - 'src/amuse_mercury/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-mercury.yml - - 'src/amuse_mercury/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build mercury - run: | - ./setup install amuse-mercury - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test mercury - run: | - ./setup test amuse-mercury - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-mesa-r15140.yml b/.github/workflows/test-mesa-r15140.yml deleted file mode 100644 index 6c86fe5e87..0000000000 --- a/.github/workflows/test-mesa-r15140.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: Test AMUSE mesa-r15140 - -on: - push: - paths: - - .github/workflows/test-mesa-r15140.yml - - 'src/amuse_mesa_r15140/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-mesa-r15140.yml - - 'src/amuse_mesa_r15140/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build mesa-r15140 - run: | - ./setup develop amuse-mesa-r15140 - - - name: Test mesa-r15140 - run: | - ./setup test amuse-mesa-r15140 - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-mesa-r2208.yml b/.github/workflows/test-mesa-r2208.yml deleted file mode 100644 index fef37cf543..0000000000 --- a/.github/workflows/test-mesa-r2208.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE mesa-r2208 - -on: - push: - paths: - - .github/workflows/test-mesa-r2208.yml - - 'src/amuse_mesa_r2208/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-mesa-r2208.yml - - 'src/amuse_mesa_r2208/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build mesa-r2208 - run: | - ./setup install amuse-mesa-r2208 - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test mesa-r2208 - run: | - ./setup test amuse-mesa-r2208 - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-mi6.yml b/.github/workflows/test-mi6.yml deleted file mode 100644 index fc54632ad4..0000000000 --- a/.github/workflows/test-mi6.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE mi6 - -on: - push: - paths: - - .github/workflows/test-mi6.yml - - 'src/amuse_mi6/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-mi6.yml - - 'src/amuse_mi6/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build mi6 - run: | - ./setup install amuse-mi6 - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test mi6 - run: | - ./setup test amuse-mi6 - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-mikkola.yml b/.github/workflows/test-mikkola.yml deleted file mode 100644 index a90f8430a3..0000000000 --- a/.github/workflows/test-mikkola.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE mikkola - -on: - push: - paths: - - .github/workflows/test-mikkola.yml - - 'src/amuse_mikkola/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-mikkola.yml - - 'src/amuse_mikkola/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build mikkola - run: | - ./setup install amuse-mikkola - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test mikkola - run: | - ./setup test amuse-mikkola - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-mmams.yml b/.github/workflows/test-mmams.yml deleted file mode 100644 index 9b42a42600..0000000000 --- a/.github/workflows/test-mmams.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE mmams - -on: - push: - paths: - - .github/workflows/test-mmams.yml - - 'src/amuse_mmams/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-mmams.yml - - 'src/amuse_mmams/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build mmams - run: | - ./setup install amuse-mmams - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test mmams - run: | - ./setup test amuse-mmams - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-mobse.yml b/.github/workflows/test-mobse.yml deleted file mode 100644 index 777cdf33e1..0000000000 --- a/.github/workflows/test-mobse.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE mobse - -on: - push: - paths: - - .github/workflows/test-mobse.yml - - 'src/amuse_mobse/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-mobse.yml - - 'src/amuse_mobse/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build mobse - run: | - ./setup install amuse-mobse - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test mobse - run: | - ./setup test amuse-mobse - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-mocassin.yml b/.github/workflows/test-mocassin.yml deleted file mode 100644 index b31a72d250..0000000000 --- a/.github/workflows/test-mocassin.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE mocassin - -on: - push: - paths: - - .github/workflows/test-mocassin.yml - - 'src/amuse_mocassin/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-mocassin.yml - - 'src/amuse_mocassin/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build mocassin - run: | - ./setup install amuse-mocassin - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test mocassin - run: | - ./setup test amuse-mocassin - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-mosse.yml b/.github/workflows/test-mosse.yml deleted file mode 100644 index 3192aa63a0..0000000000 --- a/.github/workflows/test-mosse.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE mosse - -on: - push: - paths: - - .github/workflows/test-mosse.yml - - 'src/amuse_mosse/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-mosse.yml - - 'src/amuse_mosse/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build mosse - run: | - ./setup install amuse-mosse - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test mosse - run: | - ./setup test amuse-mosse - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-mpiamrvac.yml b/.github/workflows/test-mpiamrvac.yml deleted file mode 100644 index 0bd7183025..0000000000 --- a/.github/workflows/test-mpiamrvac.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE mpiamrvac - -on: - push: - paths: - - .github/workflows/test-mpiamrvac.yml - - 'src/amuse_mpiamrvac/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-mpiamrvac.yml - - 'src/amuse_mpiamrvac/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build mpiamrvac - run: | - ./setup install amuse-mpiamrvac - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test mpiamrvac - run: | - ./setup test amuse-mpiamrvac - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-nbody6xx.yml b/.github/workflows/test-nbody6xx.yml deleted file mode 100644 index 818b2e5cc2..0000000000 --- a/.github/workflows/test-nbody6xx.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE nbody6xx - -on: - push: - paths: - - .github/workflows/test-nbody6xx.yml - - 'src/amuse_nbody6xx/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-nbody6xx.yml - - 'src/amuse_nbody6xx/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build nbody6xx - run: | - ./setup install amuse-nbody6xx - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test nbody6xx - run: | - ./setup test amuse-nbody6xx - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-petar.yml b/.github/workflows/test-petar.yml deleted file mode 100644 index a282fca891..0000000000 --- a/.github/workflows/test-petar.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE petar - -on: - push: - paths: - - .github/workflows/test-petar.yml - - 'src/amuse_petar/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-petar.yml - - 'src/amuse_petar/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build petar - run: | - ./setup install amuse-petar - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test petar - run: | - ./setup test amuse-petar - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-ph4.yml b/.github/workflows/test-ph4.yml deleted file mode 100644 index f2701085f0..0000000000 --- a/.github/workflows/test-ph4.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE ph4 - -on: - push: - paths: - - .github/workflows/test-ph4.yml - - 'src/amuse_ph4/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-ph4.yml - - 'src/amuse_ph4/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build ph4 - run: | - ./setup install amuse-ph4 - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test ph4 - run: | - ./setup test amuse-ph4 - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-phantom.yml b/.github/workflows/test-phantom.yml deleted file mode 100644 index 0d579a73e3..0000000000 --- a/.github/workflows/test-phantom.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE phantom - -on: - push: - paths: - - .github/workflows/test-phantom.yml - - 'src/amuse_phantom/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-phantom.yml - - 'src/amuse_phantom/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build phantom - run: | - ./setup install amuse-phantom - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test phantom - run: | - ./setup test amuse-phantom - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-phigrape.yml b/.github/workflows/test-phigrape.yml deleted file mode 100644 index 12564a5d7f..0000000000 --- a/.github/workflows/test-phigrape.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE phigrape - -on: - push: - paths: - - .github/workflows/test-phigrape.yml - - 'src/amuse_phigrape/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-phigrape.yml - - 'src/amuse_phigrape/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build phigrape - run: | - ./setup install amuse-phigrape - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test phigrape - run: | - ./setup test amuse-phigrape - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-rebound.yml b/.github/workflows/test-rebound.yml deleted file mode 100644 index 24e7d1e6b3..0000000000 --- a/.github/workflows/test-rebound.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE rebound - -on: - push: - paths: - - .github/workflows/test-rebound.yml - - 'src/amuse_rebound/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-rebound.yml - - 'src/amuse_rebound/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build rebound - run: | - ./setup install amuse-rebound - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test rebound - run: | - ./setup test amuse-rebound - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-sakura.yml b/.github/workflows/test-sakura.yml deleted file mode 100644 index 5d86a39986..0000000000 --- a/.github/workflows/test-sakura.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE sakura - -on: - push: - paths: - - .github/workflows/test-sakura.yml - - 'src/amuse_sakura/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-sakura.yml - - 'src/amuse_sakura/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build sakura - run: | - ./setup install amuse-sakura - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test sakura - run: | - ./setup test amuse-sakura - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-secularmultiple.yml b/.github/workflows/test-secularmultiple.yml deleted file mode 100644 index efbd44834d..0000000000 --- a/.github/workflows/test-secularmultiple.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE secularmultiple - -on: - push: - paths: - - .github/workflows/test-secularmultiple.yml - - 'src/amuse_secularmultiple/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-secularmultiple.yml - - 'src/amuse_secularmultiple/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build secularmultiple - run: | - ./setup install amuse-secularmultiple - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test secularmultiple - run: | - ./setup test amuse-secularmultiple - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-sei.yml b/.github/workflows/test-sei.yml deleted file mode 100644 index d71d107291..0000000000 --- a/.github/workflows/test-sei.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE sei - -on: - push: - paths: - - .github/workflows/test-sei.yml - - 'src/amuse_sei/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-sei.yml - - 'src/amuse_sei/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build sei - run: | - ./setup install amuse-sei - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test sei - run: | - ./setup test amuse-sei - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-simplex.yml b/.github/workflows/test-simplex.yml deleted file mode 100644 index 0266abe8e9..0000000000 --- a/.github/workflows/test-simplex.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE simplex - -on: - push: - paths: - - .github/workflows/test-simplex.yml - - 'src/amuse_simplex/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-simplex.yml - - 'src/amuse_simplex/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib healpix_cxx qhull pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build simplex - run: | - ./setup install amuse-simplex - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test simplex - run: | - ./setup test amuse-simplex - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-smalln.yml b/.github/workflows/test-smalln.yml deleted file mode 100644 index a293713dcd..0000000000 --- a/.github/workflows/test-smalln.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE smalln - -on: - push: - paths: - - .github/workflows/test-smalln.yml - - 'src/amuse_smalln/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-smalln.yml - - 'src/amuse_smalln/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build smalln - run: | - ./setup install amuse-smalln - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test smalln - run: | - ./setup test amuse-smalln - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-sphray.yml b/.github/workflows/test-sphray.yml deleted file mode 100644 index 3b04fcc294..0000000000 --- a/.github/workflows/test-sphray.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE sphray - -on: - push: - paths: - - .github/workflows/test-sphray.yml - - 'src/amuse_sphray/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-sphray.yml - - 'src/amuse_sphray/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build sphray - run: | - ./setup install amuse-sphray - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test sphray - run: | - ./setup test amuse-sphray - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-sse.yml b/.github/workflows/test-sse.yml deleted file mode 100644 index a68f805f32..0000000000 --- a/.github/workflows/test-sse.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE sse - -on: - push: - paths: - - .github/workflows/test-sse.yml - - 'src/amuse_sse/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-sse.yml - - 'src/amuse_sse/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build sse - run: | - ./setup install amuse-sse - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test sse - run: | - ./setup test amuse-sse - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-symple.yml b/.github/workflows/test-symple.yml deleted file mode 100644 index ac4e00c4ba..0000000000 --- a/.github/workflows/test-symple.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE symple - -on: - push: - paths: - - .github/workflows/test-symple.yml - - 'src/amuse_symple/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-symple.yml - - 'src/amuse_symple/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build symple - run: | - ./setup install amuse-symple - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test symple - run: | - ./setup test amuse-symple - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-tupan.yml b/.github/workflows/test-tupan.yml deleted file mode 100644 index 61f87da3f0..0000000000 --- a/.github/workflows/test-tupan.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE tupan - -on: - push: - paths: - - .github/workflows/test-tupan.yml - - 'src/amuse_tupan/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-tupan.yml - - 'src/amuse_tupan/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build tupan - run: | - ./setup install amuse-tupan - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test tupan - run: | - ./setup test amuse-tupan - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-twobody.yml b/.github/workflows/test-twobody.yml deleted file mode 100644 index 7ad65b80a5..0000000000 --- a/.github/workflows/test-twobody.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE twobody - -on: - push: - paths: - - .github/workflows/test-twobody.yml - - 'src/amuse_twobody/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-twobody.yml - - 'src/amuse_twobody/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build twobody - run: | - ./setup install amuse-twobody - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test twobody - run: | - ./setup test amuse-twobody - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/.github/workflows/test-vader.yml b/.github/workflows/test-vader.yml deleted file mode 100644 index e3a784d702..0000000000 --- a/.github/workflows/test-vader.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Test AMUSE vader - -on: - push: - paths: - - .github/workflows/test-vader.yml - - 'src/amuse_vader/**' - pull_request: - branches: - - main - paths: - - .github/workflows/test-vader.yml - - 'src/amuse_vader/**' - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build vader - run: | - ./setup install amuse-vader - - - name: Ensure we test only the installed package - run: | - ./setup distclean - - - name: Test vader - run: | - ./setup test amuse-vader - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/src/amuse_mesa_r15140/patches/series b/src/amuse_mesa_r15140/patches/series deleted file mode 100644 index b9086cf90d..0000000000 --- a/src/amuse_mesa_r15140/patches/series +++ /dev/null @@ -1,12 +0,0 @@ -0001-Dont-make-empty-output-directories.patch -0001-Fix-astero-builds-when-disabled.patch -0003-Add-mpi-to-utils.patch -0004-Fix-adipls-and-gyre-builds.patch -0001-Stop-crashes-when-profile-column-name-is-bad.patch -0001-Add-support-for-namelist-setting-from-a-string.patch -0001-declare-i.patch -0001-Get-photo-loading-working.patch -0001-Patch-error-messages.patch -0005-fix_mesa_utils_c_system.patch -0006-fix_cpp_and_lapack_template.patch -0007-configure-build-and-fix-clang.patch From 64e868ad9c6e046941bc581c7728416a7849700e Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 14 Apr 2026 16:55:48 +0200 Subject: [PATCH 37/40] undo another external commit --- .github/workflows/test-ext.yml | 92 ----------------------- src/amuse_adaptb/src/makefile | 7 +- src/amuse_gadget2/Makefile | 4 - src/amuse_gadget2/tests/test_gadget2.py | 2 +- src/amuse_hermite/tests/test_hermite.py | 2 +- src/amuse_hermite_grx/Makefile | 3 +- src/amuse_mesa_r15140/patches/series_mesa | 12 +++ 7 files changed, 21 insertions(+), 101 deletions(-) delete mode 100644 .github/workflows/test-ext.yml create mode 100644 src/amuse_mesa_r15140/patches/series_mesa diff --git a/.github/workflows/test-ext.yml b/.github/workflows/test-ext.yml deleted file mode 100644 index 8c022dcda3..0000000000 --- a/.github/workflows/test-ext.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Test AMUSE ic and ext - -on: - push: - paths: - - .github/workflows/test-ext.yml - - src/amuse/couple - - src/amuse/ext - - src/amuse/ic - - src/amuse/plot - pull_request: - branches: - - main - paths: - - .github/workflows/test-ext.yml - - src/amuse/couple - - src/amuse/ext - - src/amuse/ic - - src/amuse/plot - workflow_dispatch: - -jobs: - test: - runs-on: ${{ matrix.os }} - - strategy: - fail-fast: false - matrix: - os: - - ubuntu-latest - - macos-latest - - defaults: - run: - shell: bash -el {0} - - steps: - - name: Set up conda - uses: conda-incubator/setup-miniconda@v3 - with: - auto-update-conda: true - channels: conda-forge - channel-priority: strict - - - name: Show conda info - run: | - conda info - conda list - - - name: Install dependencies - run: | - conda install c-compiler cxx-compiler fortran-compiler 'gfortran<14' python pkgconfig coreutils patch curl tar unzip gzip bzip2 xz perl bison make cmake 'openmpi<5' gsl fftw gmp mpfr hdf5 netcdf4 libopenblas liblapack zlib pip wheel 'docutils>=0.6' 'mpi4py>=1.1.0' 'numpy>=1.2.2' 'h5py>=1.1.0' scipy pytest - - - name: Configure OpenMPI - if: ${{ matrix.os == 'ubuntu-latest' }} - run: | - mkdir -p "$HOME/.openmpi" - echo "btl_tcp_if_include = lo" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_base_oversubscribe = true" >>"$HOME/.openmpi/mca-params.conf" - echo "rmaps_default_mapping_policy = :oversubscribe" >>"$HOME/.openmpi/mca-params.conf" - - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - fetch-tags: true - - - name: Build framework - run: | - ./setup install amuse-framework - - - name: Install required codes - run: | - ./setup install amuse-bhtree amuse-bse amuse-evtwin amuse-fi amuse-fractalcluster amuse-gadget2 amuse-galactics amuse-halogen amuse-hermite amuse-hop amuse-kepler amuse-mesa-r2208 amuse-ph4 amuse-phigrape amuse-seba amuse-sse - - - name: Ensure we test only the installed packages - run: | - ./setup distclean - - - name: Test ext - env: - PYTEST_OPTS: "-s -v -x" - run: | - ./setup test amuse-ext - - - name: Archive build logs - if: always() - uses: actions/upload-artifact@v4 - with: - name: logs-${{ matrix.os }} - path: ${{ github.workspace }}/support/logs/ - if-no-files-found: warn diff --git a/src/amuse_adaptb/src/makefile b/src/amuse_adaptb/src/makefile index 9b768c69bb..fc25d24090 100644 --- a/src/amuse_adaptb/src/makefile +++ b/src/amuse_adaptb/src/makefile @@ -1,3 +1,8 @@ +# standard amuse configuration include +# config.mk will be made after ./configure has run +AMUSE_DIR?=../../../../.. +-include ${AMUSE_DIR}/config.mk + ################################################################### CXX ?= g++ @@ -21,7 +26,7 @@ all: $(OBJS) $(EXEC) $(CODELIB) ################################################################### integrator_MS.exe: $(OBJS) - $(CXX) $(CXXFLAGS) $(LDFLAGS) -o $(EXEC) $(OBJS) $(LIBS) + $(CXX) $(CXXFLAGS) -o $(EXEC) $(OBJS) $(LIBS) libadaptb.a: $(OBJS) rm -f $@ diff --git a/src/amuse_gadget2/Makefile b/src/amuse_gadget2/Makefile index 5339e8e060..43aee04791 100644 --- a/src/amuse_gadget2/Makefile +++ b/src/amuse_gadget2/Makefile @@ -81,11 +81,7 @@ package-%: %_contains python3 -m pip install -vv --no-cache-dir --no-deps --no-build-isolation --prefix ${PREFIX} packages/$* test-%: -ifneq ($(CI),) - cd packages/$* && pytest -k 'not noci' -else cd packages/$* && pytest -endif # Cleaning up diff --git a/src/amuse_gadget2/tests/test_gadget2.py b/src/amuse_gadget2/tests/test_gadget2.py index 8e5c678974..f5ef0d9341 100644 --- a/src/amuse_gadget2/tests/test_gadget2.py +++ b/src/amuse_gadget2/tests/test_gadget2.py @@ -881,7 +881,7 @@ def test18(self): self.assertAlmostEqual(instance.dm_particles.z, [0., 0., 0.4] | units.kpc, places=6) instance.stop() - def test19_noci(self): + def test19(self): particles = new_plummer_model(31) instance = Gadget2(self.default_converter, number_of_workers=1) diff --git a/src/amuse_hermite/tests/test_hermite.py b/src/amuse_hermite/tests/test_hermite.py index 04538f54ec..964c0819b8 100644 --- a/src/amuse_hermite/tests/test_hermite.py +++ b/src/amuse_hermite/tests/test_hermite.py @@ -579,7 +579,7 @@ def test15(self): instance.stop() - def test16_noci(self): + def test16(self): particles = new_plummer_model(200) particles.scale_to_standard() instance = Hermite() diff --git a/src/amuse_hermite_grx/Makefile b/src/amuse_hermite_grx/Makefile index 802d504913..c72c752e81 100644 --- a/src/amuse_hermite_grx/Makefile +++ b/src/amuse_hermite_grx/Makefile @@ -24,11 +24,10 @@ src/Hermite_GRX: | hermite_grx.tar.gz src # Building the code into a static library DEPFLAGS += $(STOPCOND_CFLAGS) -CFLAGS += $(DEPFLAGS) -pthread -std=c++11 +CFLAGS += $(DEPFLAGS) -pthread LDFLAGS += -pthread LDLIBS += $(STOPCOND_LIBS) -$(info libs: $(LDLIBS) sc: $(STOPCOND_LIBS)) CODELIB = src/Hermite_GRX/src/libhermite_grx.a diff --git a/src/amuse_mesa_r15140/patches/series_mesa b/src/amuse_mesa_r15140/patches/series_mesa new file mode 100644 index 0000000000..b9086cf90d --- /dev/null +++ b/src/amuse_mesa_r15140/patches/series_mesa @@ -0,0 +1,12 @@ +0001-Dont-make-empty-output-directories.patch +0001-Fix-astero-builds-when-disabled.patch +0003-Add-mpi-to-utils.patch +0004-Fix-adipls-and-gyre-builds.patch +0001-Stop-crashes-when-profile-column-name-is-bad.patch +0001-Add-support-for-namelist-setting-from-a-string.patch +0001-declare-i.patch +0001-Get-photo-loading-working.patch +0001-Patch-error-messages.patch +0005-fix_mesa_utils_c_system.patch +0006-fix_cpp_and_lapack_template.patch +0007-configure-build-and-fix-clang.patch From bacb3016ceaf517bff7c394d45d75be685058948 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 14 Apr 2026 17:01:46 +0200 Subject: [PATCH 38/40] another external change --- src/tests/ext_tests/test_distributed_particles.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/tests/ext_tests/test_distributed_particles.py b/src/tests/ext_tests/test_distributed_particles.py index bc047b7172..4cdf976dc5 100644 --- a/src/tests/ext_tests/test_distributed_particles.py +++ b/src/tests/ext_tests/test_distributed_particles.py @@ -8,7 +8,7 @@ from amuse.units import nbody_system from amuse.units import units -from amuse.units.quantities import is_quantity +from amuse.community import * from amuse.support.interface import InCodeComponentImplementation from amuse.io import read_set_from_file, write_set_to_file @@ -630,7 +630,7 @@ def test9(self): self.assertEqual(x[2:6].mass, [3, 10, 11, 12]) - def test10_noci(self): + def test10(self): x = DistributedParticles( size=40, number_of_workers=4 @@ -760,7 +760,7 @@ def test16(self): self.assertEqual(y.index, x.index) self.assertEqual(y.mass, x.mass) - def test17_noci(self): + def test17(self): test_results_path = self.get_path_to_results() filebase = os.path.join(test_results_path, "test_distributed_sets") for i in [0, 1]: @@ -789,7 +789,7 @@ def test17_noci(self): # number of workers > number of files # still problematic, because of non-existing attributes if nothing read - def test18_noci(self): + def test18(self): test_results_path = self.get_path_to_results() filebase = os.path.join(test_results_path, "test_distributed_sets") for i in [0, 1]: @@ -816,7 +816,7 @@ def test18_noci(self): self.assertEqual(x.index, z.index) self.assertEqual(x.mass, z.mass) - def test19_noci(self): + def test19(self): from .test_distributed_particles import distributed_king_generator from amuse.ic.kingmodel import MakeKingModel From e04527c70ba9f04732f3b93b894dda63c1ad0fec Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 14 Apr 2026 17:05:21 +0200 Subject: [PATCH 39/40] that's all of them, I think --- src/amuse_mi6/src/Vector3.h | 2 +- src/amuse_petar/Makefile | 11 +++-------- src/amuse_petar/tests/test_petar.py | 2 +- support/setup/installing.sh | 8 +------- support/setup/testing.sh | 5 +++-- 5 files changed, 9 insertions(+), 19 deletions(-) diff --git a/src/amuse_mi6/src/Vector3.h b/src/amuse_mi6/src/Vector3.h index c0335fae0b..f57753b4f6 100644 --- a/src/amuse_mi6/src/Vector3.h +++ b/src/amuse_mi6/src/Vector3.h @@ -114,7 +114,7 @@ class Vector3 } inline Vector3& operator /= (const double b){ - const double binv = 1.0/b; + register double binv = 1.0/b; v[0] *= binv; v[1] *= binv; v[2] *= binv; return *this; } diff --git a/src/amuse_petar/Makefile b/src/amuse_petar/Makefile index d050c1feb5..b4cebcc132 100644 --- a/src/amuse_petar/Makefile +++ b/src/amuse_petar/Makefile @@ -108,16 +108,11 @@ install-%: %_contains package-%: %_contains python3 -m pip install -vv --no-cache-dir --no-deps --no-build-isolation --prefix ${PREFIX} packages/$* - -# The tests use up to 5 workers, each of which will use all the cores via OpenMP, -# slowing things down enormously. By limiting the number of threads, the tests -# run in a reasonable time at least on my 8C/16T laptop. test-%: -ifneq ($(CI),) - cd packages/$* && OMP_NUM_THREADS=1 pytest -k 'not noci' -else + # The tests use up to 5 workers, each of which will use all the cores via OpenMP, + # slowing things down enormously. By limiting the number of threads, the tests + # run in a reasonable time at least on my 8C/16T laptop. cd packages/$* && OMP_NUM_THREADS=2 pytest -endif # Cleaning up diff --git a/src/amuse_petar/tests/test_petar.py b/src/amuse_petar/tests/test_petar.py index ee4327439e..0f56685e66 100644 --- a/src/amuse_petar/tests/test_petar.py +++ b/src/amuse_petar/tests/test_petar.py @@ -31,7 +31,7 @@ def test_reversed_time_allowed(self): class TestPetar(TestWithMPI): - def test_small_plummer_model_noci(self): + def test_small_plummer_model(self): particles = plummer.new_plummer_model(31) instance = Petar(number_of_workers=1) # , debugger="xterm") diff --git a/support/setup/installing.sh b/support/setup/installing.sh index 8a413f583e..102f6f05cf 100644 --- a/support/setup/installing.sh +++ b/support/setup/installing.sh @@ -83,7 +83,7 @@ install_framework() { fi done if [ -n "${to_install}" ] ; then - conda install -c conda-forge --override-channels -y ${to_install} + conda install -y ${to_install} fi fi @@ -187,20 +187,16 @@ install_package() { check_package "${package}" if ! is_subset "amuse-framework" "${INSTALLED_PACKAGES}" ; then - save_cmd="${cmd}" save_package="${package}" install_framework package="${save_package}" - cmd="${save_cmd}" fi if is_subset "${package}" "${NEEDS_SAPPORO_LIGHT}" ; then if ! is_subset "sapporo-light" "${INSTALLED_PACKAGES}" ; then - save_cmd="${cmd}" save_package="${package}" install_sapporo_light package="${save_package}" - cmd="${save_cmd}" fi fi @@ -211,11 +207,9 @@ install_package() { # If the code is e.g. CUDA-only, then there may not be a base package. if is_subset "${base_package}" "${EXTANT_PACKAGES}" ; then if ! is_subset "${base_package}" "${INSTALLED_PACKAGES}" ; then - save_cmd="${cmd}" save_package="${package}" install_package "${cmd}" "${base_package}" "${brief}" package="${save_package}" - cmd="${save_cmd}" fi fi fi diff --git a/support/setup/testing.sh b/support/setup/testing.sh index 275dc4b4b7..c948145ea7 100644 --- a/support/setup/testing.sh +++ b/support/setup/testing.sh @@ -7,7 +7,7 @@ ensure_pytest() { printf '%s\n\n' "Please activate a conda environment or virtual environment first." elif [ "a${ENV_TYPE}" = "aconda" ] ; then printf '\n%s\n' "Please use" - printf '\n %b\n' "conda install -c conda-forge --override-channels pytest" + printf '\n %b\n' "conda install pytest" printf '\n%s\n\n' "to install pytest, then try again." else printf '\n%s\n' "Please use" @@ -50,6 +50,7 @@ test_framework() { cd src/tests && pytest --import-mode=append core_tests compile_tests --ignore compile_tests/java_implementation -k 'not TestCDistributedImplementationInterface and not TestAsyncDistributed' ${PYTEST_OPTS} else cd src/tests && pytest --import-mode=append core_tests compile_tests --ignore compile_tests/java_implementation -k 'not TestCDistributedImplementationInterface and not TestAsyncDistributed and not noci' ${PYTEST_OPTS} + fi echo $? >"../../${ec_file}" @@ -80,7 +81,7 @@ test_amuse_ext() { log_file="$(log_file test amuse-ext)" ( - cd src/tests && pytest ext_tests ticket_tests --import-mode=append ${PYTEST_OPTS} -s -v -x -k "not noci and ${bad_ext_tests}" + cd src/tests && pytest ext_tests --import-mode=append ticket_tests ${PYTEST_OPTS} -k "${bad_ext_tests}" echo $? >"../../${ec_file}" ) 2>&1 | tee "${log_file}" From 725e811c7c471dca0b05086ccf09027e5c60464d Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 14 Apr 2026 17:12:34 +0200 Subject: [PATCH 40/40] final 2 --- support/setup/installing.sh | 2 +- support/setup/testing.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/support/setup/installing.sh b/support/setup/installing.sh index 102f6f05cf..b11933c2f2 100644 --- a/support/setup/installing.sh +++ b/support/setup/installing.sh @@ -83,7 +83,7 @@ install_framework() { fi done if [ -n "${to_install}" ] ; then - conda install -y ${to_install} + conda install -c conda-forge --override-channels -y ${to_install} fi fi diff --git a/support/setup/testing.sh b/support/setup/testing.sh index c948145ea7..d47990b251 100644 --- a/support/setup/testing.sh +++ b/support/setup/testing.sh @@ -7,7 +7,7 @@ ensure_pytest() { printf '%s\n\n' "Please activate a conda environment or virtual environment first." elif [ "a${ENV_TYPE}" = "aconda" ] ; then printf '\n%s\n' "Please use" - printf '\n %b\n' "conda install pytest" + printf '\n %b\n' "conda install -c conda-forge --override-channels pytest" printf '\n%s\n\n' "to install pytest, then try again." else printf '\n%s\n' "Please use"