forked from microsoft/Megatron-DeepSpeed
-
Notifications
You must be signed in to change notification settings - Fork 12
/
train_aGPT_7B.sh
40 lines (32 loc) · 1.23 KB
/
train_aGPT_7B.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
#!/bin/bash --login
#PBS -q lustre_scaling
#PBS -A Aurora_Deployment
#PBS -j oe
#####################################
# AuroraGPT-7B
#
# Main production script for training
# AuroraGPT-7B @ ALCF
#####################################
# 1. Navigate into `$PBS_O_WORKDIR`
cd "${PBS_O_WORKDIR}" || exit
HERE=$(python3 -c 'import os; print(os.getcwd())') && export HERE
GIT_BRANCH=$(git branch --show-current) && export GIT_BRANCH
# 2. source `ALCF/helpers.sh`
source "${HERE}/ALCF/helpers.sh" || exit
# 3. call `setup` from `./ALCF/helpers.sh`
setup "$@" || exit
# export run_cmd="${run_cmd}"
echo "${run_cmd[@]}" | tee -a "${OUTPUT_LOG}"
# 4. Tell user where to find output
printf "[!! %s] View output at:\n %s\n" "$(printBlue "NOTE")" "$(printYellow "${OUTPUT_LOG}")" | tee -a "${OUTPUT_LOG}"
# # 5. Ignore the following strings on Intel XPU devices
# # (otherwise they'll clutter up logs)
# XPU_IGNORE_STRING="CCL_WARN|\ -\ INFO\ \-\ |real_accelerator\.py|numexpr\.utils|async_io|libaio"
# if [[ $(ezpz_get_machine_name) == "aurora" ]]; then
# module unload mpich && module load mpich
# fi
#
# 6. Evaluate ${run_cmd} and append outputs to ${OUTPUT_LOG}
# eval "${run_cmd[@]}" |& tee -a "${OUTPUT_LOG}"
eval "${run_cmd[*]}" |& tee -a "${OUTPUT_LOG}"