Autoconf update (#765)

Updating to autoconf 2.71 for building the 'configure' script

Also updated 'config.guess' and 'config.sub' to the 2023 versions
This commit is contained in:
Rob Falgout 2023-02-16 09:42:07 -08:00 committed by GitHub
parent a30bd53628
commit 6907852618
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 5803 additions and 4411 deletions

View File

@ -1 +1,2 @@
fatal: No names found, cannot describe anything.
WARNING: unknown Fortran name-mangling scheme

View File

@ -17,9 +17,6 @@
/* Define to 1 if you have the <inttypes.h> header file. */
#undef HAVE_INTTYPES_H
/* Define to 1 if you have the <memory.h> header file. */
#undef HAVE_MEMORY_H
/* Define to 1 if using MLI */
#undef HAVE_MLI
@ -29,6 +26,9 @@
/* Define to 1 if you have the <stdint.h> header file. */
#undef HAVE_STDINT_H
/* Define to 1 if you have the <stdio.h> header file. */
#undef HAVE_STDIO_H
/* Define to 1 if you have the <stdlib.h> header file. */
#undef HAVE_STDLIB_H
@ -301,5 +301,7 @@
/* Define to the version of this package. */
#undef PACKAGE_VERSION
/* Define to 1 if you have the ANSI C header files. */
/* Define to 1 if all of the C90 standard headers exist (not just the ones
required in a freestanding environment). This macro is provided for
backward compatibility; new code need not use it. */
#undef STDC_HEADERS

1658
src/config/config.guess vendored

File diff suppressed because it is too large Load Diff

3000
src/config/config.sub vendored

File diff suppressed because it is too large Load Diff

View File

@ -59,7 +59,7 @@ m4_include([config/hypre_blas_macros.m4])
m4_include([config/hypre_lapack_macros.m4])
m4_include([config/hypre_macros_misc.m4])
AC_PREREQ(2.59)
AC_PREREQ([2.69])
AC_REVISION($Id$)
AC_INIT(M4_HYPRE_NAME, M4_HYPRE_VERSION)
AC_CONFIG_HEADERS([HYPRE_config.h:config/HYPRE_config.h.in])
@ -2157,11 +2157,11 @@ dnl *********************************************************************
if [test "$hypre_using_cuda" = "yes"]
then
AC_CHECK_HEADERS(["${CUDA_HOME}/include/cuda.h"], [hypre_found_cuda=yes; HYPRE_CUDA_PATH=${CUDA_HOME}])
AC_CHECK_HEADER(["${CUDA_HOME}/include/cuda.h"], [hypre_found_cuda=yes; HYPRE_CUDA_PATH=${CUDA_HOME}])
if test "x$hypre_found_cuda" != "xyes"
then
AC_CHECK_HEADERS(["${CUDA_PATH}/include/cuda.h"], [hypre_found_cuda=yes; HYPRE_CUDA_PATH=${CUDA_PATH}])
AC_CHECK_HEADER(["${CUDA_PATH}/include/cuda.h"], [hypre_found_cuda=yes; HYPRE_CUDA_PATH=${CUDA_PATH}])
fi
if test "x$hypre_found_cuda" != "xyes"
@ -2274,10 +2274,10 @@ AS_IF([ test x"$hypre_using_hip" == x"yes" ],
[ HYPRE_ROCM_PREFIX=/opt/rocm ])
AC_SUBST(HYPRE_ROCM_PREFIX)
AC_CHECK_HEADERS( ["${HYPRE_ROCM_PREFIX}/include/hip/hip_common.h"],
[hypre_found_hip=yes],
[AC_MSG_ERROR([unable to find ${HYPRE_ROCM_PREFIX}/include/hip/hip_common.h ... Ensure ROCm is installed and set ROCM_PATH environment variable to ROCm installation path.])] )],
[])
AC_CHECK_HEADER( ["${HYPRE_ROCM_PREFIX}/include/hip/hip_common.h"],
[hypre_found_hip=yes],
[AC_MSG_ERROR([unable to find ${HYPRE_ROCM_PREFIX}/include/hip/hip_common.h ... Ensure ROCm is installed and set ROCM_PATH environment variable to ROCm installation path.])] )],
[])
dnl *********************************************************************
dnl * Set raja options
@ -2607,9 +2607,9 @@ AS_IF([test x"$hypre_using_sycl" == x"yes"],
fi
AS_IF([test x"$hypre_using_onemklsparse" == x"yes" || test x"$hypre_using_onemklblas" == x"yes" || test x"$hypre_using_onemklrand" == x"yes"],
[AC_CHECK_HEADERS(["${MKLROOT}/include/mkl.h"],
[hypre_found_mkl=yes],
AC_MSG_ERROR([unable to find oneMKL ... Ensure that MKLROOT is set]))
[AC_CHECK_HEADER(["${MKLROOT}/include/mkl.h"],
[hypre_found_mkl=yes],
AC_MSG_ERROR([unable to find oneMKL ... Ensure that MKLROOT is set]))
HYPRE_SYCL_LIBS="${HYPRE_SYCL_LIBS} -qmkl -Wl,-export-dynamic -Wl,--start-group -Wl,--end-group -lsycl -lOpenCL -lpthread -lm -ldl"
HYPRE_SYCL_INCL="${HYPRE_SYCL_INCL} -qmkl -I${DPLROOT}/include -DMKL_ILP64 -I${MKLROOT}/include"
])
@ -2926,4 +2926,4 @@ dnl * Define the files to be configured and made
dnl *********************************************************************
AC_CONFIG_FILES([config/Makefile.config])
AC_OUTPUT()
AC_OUTPUT

View File

@ -15,15 +15,15 @@ dnl if it is not found. If ACTION-IF-FOUND is not specified,
dnl the default action will define HAVE_MPI.
dnl **********************************************************************
AC_DEFUN([AC_HYPRE_CHECK_MPI],
[AC_PREREQ(2.57)dnl
AC_PREREQ(2.50) dnl for AC_LANG_CASE
[AC_PREREQ([2.69])dnl
AC_PREREQ([2.69]) dnl for AC_LANG_CASE
if test x = x"$MPILIBS"; then
AC_LANG_CASE([C], [AC_CHECK_FUNC(MPI_Init, [MPILIBS=" "])],
[C++], [AC_CHECK_FUNC(MPI_Init, [MPILIBS=" "])],
[Fortran 77], [AC_MSG_CHECKING([for MPI_Init])
AC_TRY_LINK([],[ call MPI_Init], [MPILIBS=" "
AC_MSG_RESULT(yes)], [AC_MSG_RESULT(no)])])
AC_LINK_IFELSE([AC_LANG_PROGRAM([[]], [[ call MPI_Init]])],[MPILIBS=" "
AC_MSG_RESULT(yes)],[AC_MSG_RESULT(no)])])
fi
if test x = x"$MPILIBS"; then
@ -34,16 +34,16 @@ if test x = x"$MPILIBS"; then
AC_CHECK_LIB(mpich, MPI_Init, [MPILIBS="-lmpich"])
fi
dnl We have to use AC_TRY_COMPILE and not AC_CHECK_HEADER because the
dnl latter uses $CPP, not $CC (which may be mpicc).
dnl We have to use AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[]])],[],[]) and not
dnl AC_CHECK_HEADER because the latter uses $CPP, not $CC (which may be mpicc).
AC_LANG_CASE([C], [if test x != x"$MPILIBS"; then
AC_MSG_CHECKING([for mpi.h])
AC_TRY_COMPILE([#include <mpi.h>],[],[AC_MSG_RESULT(yes)], [MPILIBS=""
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <mpi.h>]], [[]])],[AC_MSG_RESULT(yes)],[MPILIBS=""
AC_MSG_RESULT(no)])
fi],
[C++], [if test x != x"$MPILIBS"; then
AC_MSG_CHECKING([for mpi.h])
AC_TRY_COMPILE([#include <mpi.h>],[],[AC_MSG_RESULT(yes)], [MPILIBS=""
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <mpi.h>]], [[]])],[AC_MSG_RESULT(yes)],[MPILIBS=""
AC_MSG_RESULT(no)])
fi])
@ -94,11 +94,11 @@ dnl *
dnl * Set compile FLAGS for optimization
dnl **********************************************************************
AC_DEFUN([AC_HYPRE_OPTIMIZATION_FLAGS],
[AC_PREREQ(2.57)dnl
[AC_PREREQ([2.69])dnl
if test "x${hypre_user_chose_cflags}" = "xno"
then
case `basename "${CC}"` in
case `basename ${CC}` in
gcc|mpigcc|mpicc)
CFLAGS="-O2"
if test "$hypre_using_openmp" = "yes" ; then
@ -138,7 +138,7 @@ fi
if test "x${hypre_user_chose_cxxflags}" = "xno"
then
case `basename "${CXX}"` in
case `basename ${CXX}` in
g++|gCC|mpig++|mpicxx|mpic++|mpiCC)
CXXFLAGS="-O2"
if test "$hypre_using_openmp" = "yes" ; then
@ -172,9 +172,9 @@ then
esac
fi
if test "x${hypre_user_chose_fflags}" = "xno"
if test "$hypre_using_fortran" = "yes" -a "x${hypre_user_chose_fflags}" = "xno"
then
case `basename "${FC}"` in
case `basename ${FC}` in
g77|gfortran|mpigfortran|mpif77)
FFLAGS="-O2"
if test "$hypre_using_openmp" = "yes" ; then
@ -214,11 +214,11 @@ dnl *
dnl * Set compile FLAGS for debug
dnl **********************************************************************
AC_DEFUN([AC_HYPRE_DEBUG_FLAGS],
[AC_PREREQ(2.57)dnl
[AC_PREREQ([2.69])dnl
if test "x${hypre_user_chose_cflags}" = "xno"
then
case `basename "${CC}"` in
case `basename ${CC}` in
gcc|mpigcc|mpicc)
CFLAGS="-g -Wall"
if test "$hypre_using_openmp" = "yes" ; then
@ -258,7 +258,7 @@ fi
if test "x${hypre_user_chose_cxxflags}" = "xno"
then
case `basename "${CXX}"` in
case `basename ${CXX}` in
g++|gCC|mpig++|mpicxx|mpic++|mpiCC)
CXXFLAGS="-g -Wall"
if test "$hypre_using_openmp" = "yes" ; then
@ -292,9 +292,9 @@ then
esac
fi
if test "x${hypre_user_chose_fflags}" = "xno"
if test "$hypre_using_fortran" = "yes" -a "x${hypre_user_chose_fflags}" = "xno"
then
case `basename "${FC}"` in
case `basename ${FC}` in
g77|gfortran|mpigfortran|mpif77)
FFLAGS="-g -Wall"
if test "$hypre_using_openmp" = "yes" ; then

5426
src/configure vendored

File diff suppressed because it is too large Load Diff

View File

@ -181,19 +181,19 @@ void hypre_smallest_abs_val( HYPRE_Int n,
__global__ void
hypreGPUKernel_InterpTruncationPass1_v1( hypre_DeviceItem &item,
#if defined(HYPRE_USING_SYCL)
char *shmem_ptr,
char *shmem_ptr,
#endif
HYPRE_Int nrows,
HYPRE_Real trunc_factor,
HYPRE_Int max_elmts,
HYPRE_Int *P_diag_i,
HYPRE_Int *P_diag_j,
HYPRE_Real *P_diag_a,
HYPRE_Int *P_offd_i,
HYPRE_Int *P_offd_j,
HYPRE_Real *P_offd_a,
HYPRE_Int *P_diag_i_new,
HYPRE_Int *P_offd_i_new )
HYPRE_Int nrows,
HYPRE_Real trunc_factor,
HYPRE_Int max_elmts,
HYPRE_Int *P_diag_i,
HYPRE_Int *P_diag_j,
HYPRE_Real *P_diag_a,
HYPRE_Int *P_offd_i,
HYPRE_Int *P_offd_j,
HYPRE_Real *P_offd_a,
HYPRE_Int *P_diag_i_new,
HYPRE_Int *P_offd_i_new )
{
const HYPRE_Int row = hypre_gpu_get_grid_thread_id<1, 1>(item);

View File

@ -55,24 +55,29 @@ hypreDevice_CSRSpGemmOnemklsparse(HYPRE_Int m,
/* sort copies of col indices and data for A and B */
/* WM: todo - this is currently necessary for correctness of oneMKL's matmat, but this may change in the future? */
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_csr_data(handle_A, m, k, oneapi::mkl::index_base::zero, d_ia, d_ja_sorted, d_a_sorted) );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_csr_data(handle_B, k, n, oneapi::mkl::index_base::zero, d_ib, d_jb_sorted, d_b_sorted) );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::sort_matrix(*hypre_HandleComputeStream(hypre_handle()), handle_A, {}).wait() );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::sort_matrix(*hypre_HandleComputeStream(hypre_handle()), handle_B, {}).wait() );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_csr_data(handle_A, m, k, oneapi::mkl::index_base::zero,
d_ia, d_ja_sorted, d_a_sorted) );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_csr_data(handle_B, k, n, oneapi::mkl::index_base::zero,
d_ib, d_jb_sorted, d_b_sorted) );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::sort_matrix(*hypre_HandleComputeStream(hypre_handle()),
handle_A, {}).wait() );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::sort_matrix(*hypre_HandleComputeStream(hypre_handle()),
handle_B, {}).wait() );
oneapi::mkl::sparse::matmat_descr_t descr = NULL;
oneapi::mkl::sparse::matmat_request req;
d_ic = hypre_TAlloc(HYPRE_Int, m + 1, HYPRE_MEMORY_DEVICE);
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_csr_data(handle_C, m, n, oneapi::mkl::index_base::zero, d_ic, d_jc, d_c) );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_csr_data(handle_C, m, n, oneapi::mkl::index_base::zero,
d_ic, d_jc, d_c) );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::init_matmat_descr(&descr) );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_matmat_data(descr,
oneapi::mkl::sparse::matrix_view_descr::general,
oneapi::mkl::transpose::nontrans,
oneapi::mkl::sparse::matrix_view_descr::general,
oneapi::mkl::transpose::nontrans,
oneapi::mkl::sparse::matrix_view_descr::general) );
oneapi::mkl::sparse::matrix_view_descr::general,
oneapi::mkl::transpose::nontrans,
oneapi::mkl::sparse::matrix_view_descr::general,
oneapi::mkl::transpose::nontrans,
oneapi::mkl::sparse::matrix_view_descr::general) );
/* get tmp_buffer1 size for work estimation */
req = oneapi::mkl::sparse::matmat_request::get_work_estimation_buf_size;
@ -152,7 +157,8 @@ hypreDevice_CSRSpGemmOnemklsparse(HYPRE_Int m,
hypre_TMemcpy(nnzC_h, nnzC_d, std::int64_t, 1, HYPRE_MEMORY_HOST, HYPRE_MEMORY_DEVICE);
d_jc = hypre_TAlloc(HYPRE_Int, *nnzC_h, HYPRE_MEMORY_DEVICE);
d_c = hypre_TAlloc(HYPRE_Complex, *nnzC_h, HYPRE_MEMORY_DEVICE);
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_csr_data(handle_C, m, n, oneapi::mkl::index_base::zero, d_ic, d_jc, d_c) );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_csr_data(handle_C, m, n, oneapi::mkl::index_base::zero,
d_ic, d_jc, d_c) );
/* finalize C */
req = oneapi::mkl::sparse::matmat_request::finalize;
@ -176,8 +182,10 @@ hypreDevice_CSRSpGemmOnemklsparse(HYPRE_Int m,
*d_c_out = d_c;
/* restore the original (unsorted) col indices and data to A and B and free sorted arrays */
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_csr_data(handle_A, m, k, oneapi::mkl::index_base::zero, d_ia, d_ja_sorted, d_a_sorted) );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_csr_data(handle_B, k, n, oneapi::mkl::index_base::zero, d_ib, d_jb_sorted, d_b_sorted) );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_csr_data(handle_A, m, k, oneapi::mkl::index_base::zero,
d_ia, d_ja_sorted, d_a_sorted) );
HYPRE_ONEMKL_CALL( oneapi::mkl::sparse::set_csr_data(handle_B, k, n, oneapi::mkl::index_base::zero,
d_ib, d_jb_sorted, d_b_sorted) );
hypre_TFree(d_a_sorted, HYPRE_MEMORY_DEVICE);
hypre_TFree(d_b_sorted, HYPRE_MEMORY_DEVICE);
hypre_TFree(d_ja_sorted, HYPRE_MEMORY_DEVICE);

View File

@ -54,7 +54,8 @@ hypreGPUKernel_CSRMatvecShuffleGT8(hypre_DeviceItem &item,
{
#if defined (HYPRE_USING_SYCL)
const HYPRE_Int grid_ngroups = item.get_group_range(2) * (HYPRE_SPMV_BLOCKDIM / K);
HYPRE_Int grid_group_id = (item.get_group(2) * HYPRE_SPMV_BLOCKDIM + item.get_local_id(2)) / K;
HYPRE_Int grid_group_id = (item.get_group(2) * HYPRE_SPMV_BLOCKDIM + item.get_local_id(
2)) / K;
const HYPRE_Int group_lane = item.get_local_id(2) & (K - 1);
#else
const HYPRE_Int grid_ngroups = gridDim.x * (HYPRE_SPMV_BLOCKDIM / K);