add seed parameter

This commit is contained in:
Meeeeee6623 2021-10-19 16:31:34 -04:00
parent 7ba2202bf8
commit e7fd7d1a68
3 changed files with 7 additions and 4 deletions

View file

@ -3,9 +3,10 @@ for (( i = 0; i < 3; i++ )); do # 3 runs
mkdir /imagenet/MxNet/run_"${i}"/
echo "Created Folder run_${i}"
./runner -n 2 -b 192 --num-epochs 90 --mode train_val \
--save-frequency 1 \
--amp --dllogger-log /imagenet/MxNet/run_"${i}"/run_"${i}".log \
--workspace /imagenet/MxNet/run_"${i}"/ --data-backend synthetic \
--data-mxnet-threads 80 \
--data-root /imagenet --warmup 0
--data-root /imagenet --warmup 0 \
--seed "${i}"
echo "Run ${i} done"
done

View file

@ -6,6 +6,7 @@ for (( i = 0; i < 3; i++ )); do # 3 runs
python ./multiproc.py --nproc_per_node 2 ./main.py --arch resnet50 \
--amp --data-backend pytorch -j 20 --epochs 90 -b 192 \
--warmup 0 --raport-file /imagenet/PyTorch/run_"${i}"/run_"${i}".json \
--workspace /imagenet/PyTorch/run_"${i}" /imagenet
--workspace /imagenet/PyTorch/run_"${i}" /imagenet \
--seed "${i}"
echo "Run ${i} done"
done

View file

@ -226,7 +226,8 @@ def main(argv):
optimizers = [mlp_optimizer]
elif FLAGS.optimizer == 'adam':
embedding_optimizer = tfa.optimizers.LazyAdam(lr=FLAGS.learning_rate)
embedding_optimizer = tfa.optimizers.LazyAdam(lr=FLAGS.learning_rate)0
mlp_optimizer = tf.keras.optimizers.Adam(lr=FLAGS.learning_rate)
if FLAGS.amp:
embedding_optimizer = LossScaleOptimizer(embedding_optimizer,