#!/bin/bash

tables=( 100 )

#tables=( 10 \
#100 \
#1000 \
#2000
#)

workers=( 2 4 8 16 )

#workers=( 2 \
#4 \
#8 \
#16
#)

prefix="0816headbusy"

# For now, either of "0" or "10kB" could be accepted
size="0"
#size="10rec"
#size="50rec"
#size="500rec"
#size="1000rec"
#size="10kB"

for j in "${workers[@]}"
do
    for i in "${tables[@]}"
    do
	for k in {1..10}
	do
	    echo -e "num_tables=${i}, size=${size}, num_workers=${j}, run #${k}"
	
	    # Run the pub and sub scripts in parallel, after doing the common setup
	    ./do_one_test_setup.sh $i ${size} $j $k $prefix &> ${prefix}_${i}t_${size}_${j}w_${k}.dat
#	    ./do_one_test_setup_SYNC.sh $i ${size} $j $k $prefix &> ${prefix}_${i}t_${size}_${j}w_${k}.dat
	    #
	    ./do_one_test_PUB.sh $i ${size} $j $k &> ${prefix}_${i}t_${size}_${j}w_${k}.dat_PUB & 
#	    ./do_one_test_SUB_SYNCDONE.sh $i ${size} $j $k &> ${prefix}_${i}t_${size}_${j}w_${k}.dat_SUB_SYNCDONE &
	    ./do_one_test_SUB.sh $i ${size} $j $k &> ${prefix}_${i}t_${size}_${j}w_${k}.dat_SUB &
            wait
#
	    pg_ctl --wait -D datapub stop
	    pg_ctl --wait -D datasub stop
#	
#	    Backup the data directory if needed
#	    cp -r datapub BAK_datapub_${prefix}_${i}t_${size}_${j}w_${k}
	done
    done
done
