forked from steffi7574/LayerParallelLearning
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsubmit_paramstudy.py
More file actions
65 lines (49 loc) · 1.78 KB
/
submit_paramstudy.py
File metadata and controls
65 lines (49 loc) · 1.78 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
#!/usr/bin/env python
import sys
import os
import copy
from batch_job import submit_job
from config import *
from util import *
# Specify runcommand for the cluster ("srun -n" on quartz or "mpirun -np" on elwe)
#runcommand = "srun -n"
runcommand = "mpirun -n"
# Specify name of training data folder
datafolder = "data"
# Specify the global config file
configfile = Config("config.cfg")
# Specify the varying parameters
gammatik = [1e-5]
gammaddt = [1e-5]
gammaclass = [1e-1, 1e-5]
npt = 2 # number of processors
# Submit a job for each parameter setup
for itik in range(len(gammatik)):
for iddt in range(len(gammaddt)):
for iclass in range(len(gammaclass)):
# Copy the global config file
konfig = copy.deepcopy(configfile)
# Change the config entry
konfig.gamma_tik = gammatik[itik]
konfig.gamma_ddt = gammaddt[iddt]
konfig.gamma_class = gammaclass[iclass]
# Specify jobname
jobname = \
"n1024opt" +\
"tik" + str(konfig.gamma_tik) +\
"ddt" + str(konfig.gamma_ddt) +\
"class" + str(konfig.gamma_class)
# create folder for the job
if os.path.exists(jobname):
pass
else:
os.mkdir(jobname)
# create a link to training and validation data
make_link(datafolder,jobname + "/" + datafolder)
# Create a config file
newconfigfile = jobname + ".cfg"
konfig.dump(jobname + "/" + newconfigfile)
# submit the job
os.chdir(jobname)
submit_job(jobname, runcommand, npt, "10:00:00", "../main", newconfigfile)
os.chdir("../")