-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathexperiment_weibull.py
executable file
·129 lines (106 loc) · 4.79 KB
/
experiment_weibull.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
#!/usr/bin/env python3
from __future__ import print_function
import argparse
import os.path
import random
import shelve
import sys
from numpy import zeros
import weibull_workload
import simulator
import schedulers
parser = argparse.ArgumentParser(description="Run our experiment replicating "
"settings by lu et al., "
"will be stored in "
"DIRNAME/res_SHAPE_SIGMA_LOAD_TIMESHAPE_NJOBS_SEED.s"
)
parser.add_argument('shape', type=float,
help="shape parameter for the distribution of job size; "
"the scale parameter is set to ensure mean=1")
parser.add_argument('dirname', help="directory in which to store results")
parser.add_argument('--sigma', type=float, default=0.5,
help="sigma parameter for the log-normal error function; "
"default is 0.5")
parser.add_argument('--load', type=float, default=0.9,
help="average load; default is 0.9")
parser.add_argument('--timeshape', type=float, default=1,
help="shape parameter for the Weibull distribution of "
"inter-arrival time; default is 1 (i.e., exponential "
"distribution)")
parser.add_argument('--njobs', type=int, default=10000,
help="number of jobs in the synthetic workload; default "
"is 10000")
parser.add_argument('--iterations', type=int, default=1,
help="number of times the experiment is run per "
"synthetic workload generated; default is 1")
parser.add_argument('--est_factor', type=float,
help="multiply estimated size by this value")
parser.add_argument('--normal_error', default=False, action='store_true',
help="error function distributed according to a normal "
"rather than a log-normal")
parser.add_argument('--seed', type=int, help="random seed")
args = parser.parse_args()
if args.seed is None:
seed = random.randrange(2 ** 32)
else:
seed = args.seed
random.seed(seed)
jobs = weibull_workload.workload(args.shape, args.load, args.njobs,
args.timeshape)
jobs = [(i, jobid, size) for i, (jobid, size) in enumerate(jobs)]
errfunc = (simulator.normal_error if args.normal_error
else simulator.lognorm_error)
if args.est_factor:
error = errfunc(args.sigma, args.est_factor)
else:
error = errfunc(args.sigma)
instances = [
('FIFO', schedulers.FIFO, simulator.identity, None),
('PS', schedulers.PS, simulator.identity, None),
('SRPT', schedulers.SRPT, simulator.identity, None),
('FSP', schedulers.FSP, simulator.identity, None),
('LAS', schedulers.LAS, simulator.identity, None),
('SRPTE', schedulers.SRPT, error, args.iterations),
('SRPTE+PS', schedulers.SRPT_plus_PS, error, args.iterations),
('SRPTE+LAS', schedulers.SRPT_plus_LAS, error, args.iterations),
('FSPE', schedulers.FSP, error, args.iterations),
('FSPE+PS', schedulers.FSP_plus_PS, error, args.iterations),
('FSPE+LAS', schedulers.FSP_plus_LAS, error, args.iterations),
('FSPE+DC', schedulers.FSPE_PS_DC, error, args.iterations),
]
jobids = [jobid for jobid, _, _ in jobs]
job_idxs = {jobid: i for i, jobid in enumerate(jobids)}
n_jobs = len(jobids)
job_start = {jobid: start for jobid, start, size in jobs}
basename = 'normal' if args.normal_error else 'res'
if args.est_factor:
fname_mask = '{}_{}_{}_{}_{}_{}_{}_{}.s'
fname = fname_mask.format(basename, args.shape, args.sigma, args.load,
args.timeshape, args.njobs, args.est_factor,
seed)
else:
fname_mask = '{}_{}_{}_{}_{}_{}_{}.s'
fname = fname_mask.format(basename, args.shape, args.sigma, args.load,
args.timeshape, args.njobs, seed)
final_results = shelve.open(os.path.join(args.dirname, fname))
for name, scheduler, errfunc, args.iterations in instances:
print(name, end='')
if args.iterations is None:
# if no. of iterations is None, it means that a single pass is
# enough (no randomness there)
if name in final_results:
continue
else:
args.iterations = 1
scheduler_results = final_results.get(name, [])
for i in range(args.iterations - len(scheduler_results)):
results = list(simulator.simulator(jobs, scheduler, errfunc))
sojourns = zeros(n_jobs)
for compl, jobid in results:
sojourns[job_idxs[jobid]] = compl - job_start[jobid]
scheduler_results.append(sojourns)
print('', sojourns.mean(), end='')
sys.stdout.flush()
print()
final_results[name] = scheduler_results
final_results.close()