Tests
Suites
Latest Results
Search
Register
Login
Popular Tests
Flexible IO Tester
Timed Linux Kernel Compilation
Llama.cpp
Blender
Hashcat
PostgreSQL
Newest Tests
OpenVINO GenAI
Rustls
LiteRT
Recently Updated Tests
OpenRadioss
QuantLib
GROMACS
AOM AV1
FLAC Audio Encoding
FluidX3D
New & Recently Updated Tests
Recently Updated Suites
Machine Learning
Server Motherboard
HPC - High Performance Computing
New & Recently Updated Suites
Component Benchmarks
CPUs / Processors
GPUs / Graphics
OpenGL
Disks / Storage
Motherboards
File-Systems
Operating Systems
OpenBenchmarking.org
Corporate / Organization Info
Bug Reports / Feature Requests
High Performance Conjugate Gradient 1.0.2
pts/hpcg-1.0.2
- 03 April 2017 -
add -lstdc++ to LINKFLAGS
downloads.xml
<?xml version="1.0"?> <!--Phoronix Test Suite v7.0.1--> <PhoronixTestSuite> <Downloads> <Package> <URL>https://software.sandia.gov/hpcg/downloads/hpcg-1.1.tar.gz</URL> <MD5>e82240700004c612bb57869984ba7079</MD5> <SHA256>ddef264576ed68ac9bf643ad716e133b4e371cd63a4972198aaedb3976243134</SHA256> <FileSize>62496</FileSize> </Package> </Downloads> </PhoronixTestSuite>
install.sh
#!/bin/sh tar -zxvf hpcg-1.1.tar.gz cd hpcg-1.1 # Find MPI To Use if [ ! "X$MPI_PATH" = "X" ] && [ -d $MPI_PATH ] && [ -d $MPI_INCLUDE ] && [ -x $MPI_CC ] && [ -e $MPI_LIBS ] then # PRE-SET MPI echo "Using pre-set environment variables." elif [ -d /usr/lib/openmpi/include ] then # OpenMPI On Ubuntu MPI_PATH=/usr/lib/openmpi MPI_INCLUDE=/usr/lib/openmpi/include MPI_LIBS=/usr/lib/openmpi/lib/libmpi.so MPI_CC=/usr/bin/mpicxx.openmpi MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1 | cut -d ":" -f2` elif [ -d /usr/lib64/openmpi ] then # OpenMPI On RHEL MPI_PATH=/usr/lib64/openmpi MPI_INCLUDE=/usr/include/openmpi-x86_64/ MPI_LIBS=/usr/lib64/openmpi/lib/libmpi.so MPI_CC=/usr/lib64/openmpi/bin/mpicxx MPI_VERSION=`$MPI_CC -showme:version 2>&1 | grep MPI | cut -d "(" -f1 | cut -d ":" -f2` elif [ -d /usr/lib/mpich/include ] then # MPICH MPI_PATH=/usr/lib/mpich MPI_INCLUDE=/usr/lib/mpich/include MPI_LIBS=/usr/lib/libmpich.so.1.0 MPI_CC=/usr/lib/mpich/bin/mpicxx.mpich MPI_VERSION=`$MPI_CC -v 2>&1 | grep "MPICH version"` elif [ -d /usr/include/mpich2 ] then # MPICH2 MPI_PATH=/usr/include/mpich2 MPI_INCLUDE=/usr/include/mpich2 MPI_LIBS=/usr/lib/mpich2/lib/libmpich.so MPI_CC=/usr/bin/mpicxx.mpich2 MPI_VERSION=`$MPI_CC -v 2>&1 | grep "MPICH2 version"` elif [ -d /usr/include/mpich2-x86_64 ] then # MPICH2 MPI_PATH=/usr/include/mpich2-x86_64 MPI_INCLUDE=/usr/include/mpich2-x86_64 MPI_LIBS=/usr/lib64/mpich2/lib/libmpich.so MPI_CC=/usr/bin/mpicxx MPI_VERSION=`$MPI_CC -v 2>&1 | grep "MPICH2 version"` fi if [ ! "X$MPI_VERSION" = "X" ] then VERSION_INFO=$MPI_VERSION if [ ! "X$LA_VERSION" = "X" ] then VERSION_INFO="$LA_VERSION + $VERSION_INFO" fi echo $VERSION_INFO > ~/install-footnote fi if [ "X$CFLAGS_OVERRIDE" = "X" ] then CFLAGS="$CFLAGS -O3 -march=native -ffast-math -ftree-vectorize" else CFLAGS="$CFLAGS_OVERRIDE" fi if [ "X$MPI_LD" = "X" ] then MPI_LD=$MPI_CC fi # Make.pts generation echo "SHELL = /bin/sh CD = cd CP = cp LN_S = ln -s -f MKDIR = mkdir -p RM = /bin/rm -f TOUCH = touch TOPdir = . SRCdir = \$(TOPdir)/src INCdir = \$(TOPdir)/src BINdir = \$(TOPdir)/bin MPdir = $MPI_PATH MPinc = -I$MPI_INCLUDE MPlib = $MPI_LIBS HPCG_INCLUDES = -I\$(INCdir) -I\$(INCdir)/\$(arch) \$(MPinc) HPCG_LIBS = HPCG_OPTS = CXX = $MPI_CC CXXFLAGS = \$(HPCG_DEFS) -fopenmp $CFLAGS LINKER = $MPI_LD LINKFLAGS = \$(CXXFLAGS) -lstdc++ ARCHIVER = ar ARFLAGS = r RANLIB = echo " > setup/Make.pts ./configure arch=pts make echo $? > ~/install-exit-status cd ~ echo "#!/bin/sh cd hpcg-1.1/bin/ rm -f *.yaml if [ \"X\$MPI_NUM_THREADS\" = \"X\" ] then MPI_NUM_THREADS=\$NUM_CPU_CORES fi if [ ! \"X\$HOSTFILE\" = \"X\" ] && [ -f \$HOSTFILE ] then \$HOSTFILE=\"--hostfile \$HOSTFILE\" elif [ -f /etc/hostfile ] then \$HOSTFILE=\"--hostfile /etc/hostfile\" fi PATH=\$PATH:$MPI_PATH/bin LD_PRELOAD=$MPI_LIBS mpirun -np \$MPI_NUM_THREADS \$HOSTFILE xhpcg echo \$? > ~/test-exit-status cat *.yaml > \$LOG_FILE rm -f *.yaml" > hpcg chmod +x hpcg
results-definition.xml
<?xml version="1.0"?> <!--Phoronix Test Suite v7.0.1--> <PhoronixTestSuite> <ResultsParser> <OutputTemplate>HPCG result is VALID with a GFLOP/s rating of: #_RESULT_#</OutputTemplate> <LineHint>HPCG result is VALID with</LineHint> </ResultsParser> </PhoronixTestSuite>
test-definition.xml
<?xml version="1.0"?> <!--Phoronix Test Suite v7.0.1--> <PhoronixTestSuite> <TestInformation> <Title>High Performance Conjugate Gradient</Title> <AppVersion>1.1</AppVersion> <Description>HPCG is the High Performance Conjugate Gradient and is a new scientific benchmark from Sandia National Lans focused for super-computer testing with modern real-world workloads compared to HPCC.</Description> <Proportion>HIB</Proportion> <TimesToRun>3</TimesToRun> <PreInstallMessage>Supported install-time optional variables include $MPI_PATH, $MPI_INCLUDE, $MPI_CC, $MPI_LIBS, $CFLAGS, $LD_FLAGS, and $MPI_LD</PreInstallMessage> <PostInstallMessage>Supported run-time optional environment variables include $MPI_NUM_THREADS, $HOSTFILE</PostInstallMessage> </TestInformation> <TestProfile> <Version>1.0.2</Version> <SupportedPlatforms>Linux</SupportedPlatforms> <SoftwareType>Benchmark</SoftwareType> <TestType>Processor</TestType> <License>Free</License> <Status>Verified</Status> <ExternalDependencies>build-utilities, fortran-compiler, openmpi-development</ExternalDependencies> <EnvironmentSize>2.4</EnvironmentSize> <ProjectURL>http://software.sandia.gov/hpcg/default.php</ProjectURL> <InternalTags>SMP, MPI</InternalTags> <Maintainer>Michael Larabel</Maintainer> </TestProfile> </PhoronixTestSuite>