add mpi hostfile and buildall script
This commit is contained in:
parent
734edd2aa6
commit
4291d88e3b
66
buildall.sh
Normal file
66
buildall.sh
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# HPC Lab 统一构建脚本
|
||||||
|
# 使用 xmake 构建所有实验项目
|
||||||
|
|
||||||
|
set -e # 遇到错误立即退出
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(dirname "$0")"
|
||||||
|
cd "$SCRIPT_DIR"
|
||||||
|
|
||||||
|
# 检查 xmake 是否安装
|
||||||
|
if ! command -v xmake &> /dev/null; then
|
||||||
|
echo "错误: xmake 未安装,请先安装 xmake"
|
||||||
|
echo "安装方法: curl -fsSL https://xmake.io/shget.text | bash"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "=========================================="
|
||||||
|
echo "HPC Lab Code - 全项目构建"
|
||||||
|
echo "=========================================="
|
||||||
|
|
||||||
|
# 构建 lab1
|
||||||
|
echo "构建 lab1..."
|
||||||
|
cd lab1
|
||||||
|
xmake
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
# 构建 lab2/omp
|
||||||
|
echo "构建 lab2/omp..."
|
||||||
|
cd lab2/omp
|
||||||
|
xmake
|
||||||
|
cd ../..
|
||||||
|
|
||||||
|
# 构建 lab2/pthread
|
||||||
|
echo "构建 lab2/pthread..."
|
||||||
|
cd lab2/pthread
|
||||||
|
xmake
|
||||||
|
cd ../..
|
||||||
|
|
||||||
|
# 构建 lab3/nbody
|
||||||
|
echo "构建 lab3/nbody..."
|
||||||
|
cd lab3/nbody
|
||||||
|
xmake
|
||||||
|
cd ../..
|
||||||
|
|
||||||
|
# 构建 lab3/prime
|
||||||
|
echo "构建 lab3/prime..."
|
||||||
|
cd lab3/prime
|
||||||
|
xmake
|
||||||
|
cd ../..
|
||||||
|
|
||||||
|
# 构建 lab4
|
||||||
|
echo "构建 lab4..."
|
||||||
|
cd lab4
|
||||||
|
xmake
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
# 构建 work
|
||||||
|
echo "构建 work..."
|
||||||
|
cd work
|
||||||
|
xmake
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
echo "=========================================="
|
||||||
|
echo "构建完成!"
|
||||||
|
echo "=========================================="
|
||||||
@ -35,7 +35,7 @@ echo "Programs found. Starting tests..."
|
|||||||
|
|
||||||
# Test mpi_hello_world
|
# Test mpi_hello_world
|
||||||
echo "Testing mpi_hello_world with default settings:"
|
echo "Testing mpi_hello_world with default settings:"
|
||||||
mpirun "$MPI_HELLO"
|
mpirun --hostfile ~/mpi_hosts "$MPI_HELLO"
|
||||||
echo "mpi_hello_world test completed."
|
echo "mpi_hello_world test completed."
|
||||||
|
|
||||||
# Terms to test
|
# Terms to test
|
||||||
@ -48,7 +48,7 @@ echo "Testing mpi_pi with different terms and processes:"
|
|||||||
for procs in "${PROCS[@]}"; do
|
for procs in "${PROCS[@]}"; do
|
||||||
for terms in "${TERMS[@]}"; do
|
for terms in "${TERMS[@]}"; do
|
||||||
echo "Running mpi_pi with $procs processes and $terms terms:"
|
echo "Running mpi_pi with $procs processes and $terms terms:"
|
||||||
mpirun -np $procs "$MPI_PI" <<< $terms
|
mpirun --hostfile ~/mpi_hosts -np $procs "$MPI_PI" <<< $terms
|
||||||
echo ""
|
echo ""
|
||||||
done
|
done
|
||||||
done
|
done
|
||||||
|
|||||||
@ -27,7 +27,7 @@ echo "M,N,K,Time_ms" > $SERIAL_OUTPUT
|
|||||||
MATRIX_SIZES="512 1024 2048 4096"
|
MATRIX_SIZES="512 1024 2048 4096"
|
||||||
|
|
||||||
# MPI进程数配置
|
# MPI进程数配置
|
||||||
MPI_PROCESSES="1 2 4 9 16"
|
MPI_PROCESSES="1 2 3 6 9 12"
|
||||||
|
|
||||||
# OpenMP线程数配置
|
# OpenMP线程数配置
|
||||||
OPENMP_THREADS="1 2 4 8"
|
OPENMP_THREADS="1 2 4 8"
|
||||||
@ -75,7 +75,7 @@ for SIZE in $MATRIX_SIZES; do
|
|||||||
|
|
||||||
for NP in $MPI_PROCESSES; do
|
for NP in $MPI_PROCESSES; do
|
||||||
echo " MPI进程数: $NP"
|
echo " MPI进程数: $NP"
|
||||||
TIME=$(mpirun --oversubscribe -np $NP $BUILD_DIR/gemm_parallel $SIZE $SIZE $SIZE | grep "mpi matmul:" | awk '{print $3}')
|
TIME=$(mpirun --hostfile ~/mpi_hosts --oversubscribe -np $NP $BUILD_DIR/gemm_parallel $SIZE $SIZE $SIZE | grep "mpi matmul:" | awk '{print $3}')
|
||||||
|
|
||||||
if [ ! -z "$TIME" ]; then
|
if [ ! -z "$TIME" ]; then
|
||||||
SPEEDUP=$(echo "scale=4; $SERIAL_TIME / $TIME" | bc)
|
SPEEDUP=$(echo "scale=4; $SERIAL_TIME / $TIME" | bc)
|
||||||
@ -106,7 +106,7 @@ for SIZE in $MATRIX_SIZES; do
|
|||||||
TOTAL_PROCS=$((NP * NTHREADS))
|
TOTAL_PROCS=$((NP * NTHREADS))
|
||||||
echo " MPI进程数: $NP (总处理器数: $TOTAL_PROCS)"
|
echo " MPI进程数: $NP (总处理器数: $TOTAL_PROCS)"
|
||||||
|
|
||||||
TIME=$(mpirun --oversubscribe -np $NP $BUILD_DIR/gemm_parallel $SIZE $SIZE $SIZE | grep "mpi matmul:" | awk '{print $3}')
|
TIME=$(mpirun --hostfile ~/mpi_hosts --oversubscribe -np $NP $BUILD_DIR/gemm_parallel $SIZE $SIZE $SIZE | grep "mpi matmul:" | awk '{print $3}')
|
||||||
|
|
||||||
if [ ! -z "$TIME" ]; then
|
if [ ! -z "$TIME" ]; then
|
||||||
SPEEDUP=$(echo "scale=4; $SERIAL_TIME / $TIME" | bc)
|
SPEEDUP=$(echo "scale=4; $SERIAL_TIME / $TIME" | bc)
|
||||||
@ -144,7 +144,7 @@ for SIZE in $MATRIX_SIZES; do
|
|||||||
export OMP_NUM_THREADS=$NTHREADS
|
export OMP_NUM_THREADS=$NTHREADS
|
||||||
echo " MPI: $NP, OpenMP: $NTHREADS (总处理器: $TOTAL_PROCS)"
|
echo " MPI: $NP, OpenMP: $NTHREADS (总处理器: $TOTAL_PROCS)"
|
||||||
|
|
||||||
TIME=$(mpirun --oversubscribe -np $NP $BUILD_DIR/gemm_parallel $SIZE $SIZE $SIZE | grep "mpi matmul:" | awk '{print $3}')
|
TIME=$(mpirun --hostfile ~/mpi_hosts --oversubscribe -np $NP $BUILD_DIR/gemm_parallel $SIZE $SIZE $SIZE | grep "mpi matmul:" | awk '{print $3}')
|
||||||
|
|
||||||
if [ ! -z "$TIME" ]; then
|
if [ ! -z "$TIME" ]; then
|
||||||
SPEEDUP=$(echo "scale=4; $SERIAL_TIME / $TIME" | bc)
|
SPEEDUP=$(echo "scale=4; $SERIAL_TIME / $TIME" | bc)
|
||||||
@ -178,7 +178,7 @@ for SIZE in $MATRIX_SIZES; do
|
|||||||
export OMP_NUM_THREADS=$NTHREADS
|
export OMP_NUM_THREADS=$NTHREADS
|
||||||
echo " MPI: $NP, OpenMP: $NTHREADS (总处理器: $TOTAL_PROCS)"
|
echo " MPI: $NP, OpenMP: $NTHREADS (总处理器: $TOTAL_PROCS)"
|
||||||
|
|
||||||
TIME=$(mpirun --oversubscribe -np $NP $BUILD_DIR/gemm_optimized $SIZE $SIZE $SIZE | grep "optimized mpi matmul:" | awk '{print $4}')
|
TIME=$(mpirun --hostfile ~/mpi_hosts --oversubscribe -np $NP $BUILD_DIR/gemm_optimized $SIZE $SIZE $SIZE | grep "optimized mpi matmul:" | awk '{print $4}')
|
||||||
|
|
||||||
if [ ! -z "$TIME" ]; then
|
if [ ! -z "$TIME" ]; then
|
||||||
SPEEDUP=$(echo "scale=4; $SERIAL_TIME / $TIME" | bc)
|
SPEEDUP=$(echo "scale=4; $SERIAL_TIME / $TIME" | bc)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user