-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathexperiment.py
executable file
·76 lines (65 loc) · 3.2 KB
/
experiment.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
import numpy as np
import itertools
from argparse import ArgumentParser
from joblib import Parallel, delayed
from distutils import dir_util
from util import read_json
from dataset import generate_dataset
from relabeling import (noisy_baseline,
clean_baseline,
passive_relabeling,
uncertainty_sampling,
uncertainty_relabeling,
weighted_uncertainty_relabeling,
robust_uncertainty_relabeling,
robust_ml_uncertainty_relabeling,
robust_em_uncertainty_relabeling,
robust_map_uncertainty_relabeling,
robust_ub_weighted_uncertainty_relabeling,
robust_ml_weighted_uncertainty_relabeling,
robust_em_weighted_uncertainty_relabeling,
robust_map_weighted_uncertainty_relabeling)
def run_experiment(r, alg, dataset, experiment, params, solver_params):
print('%s, trial: %i, dataset %s, experiment %s' % (alg, r, dataset, experiment))
# sample a new dataset
Sx, Sy_clean, Sy_noise, Tx, Ty = generate_dataset(r, dataset, params)
# perform learning
alg_func_name = alg.replace(' ', '_')
func = eval(alg_func_name)
params['random_state'] = r
scores = func(Sx, Sy_clean, Sy_noise, Tx, Ty, params, solver_params)
output_path = 'output/experiment/%s/%s/%s' % (experiment, dataset, alg_func_name)
dir_util.mkpath(output_path)
np.save('%s/%02d.npy' % (output_path, r), scores)
if __name__ == '__main__':
parser = ArgumentParser(description="Relabeling")
parser.add_argument('--experiment', type=str, default='', nargs='?')
parser.add_argument('--algorithm', type=str, default='', nargs='?')
parser.add_argument('--dataset', type=str, default='', nargs='?')
parser.add_argument('--trial', type=int, default=-1, nargs='?')
parser.add_argument('--jobs', type=int, default=1, nargs='?')
args = parser.parse_args()
experiment = args.experiment
algorithm = args.algorithm
dataset = args.dataset
trial = args.trial
n_jobs = args.jobs
experiment_params = read_json('config/experiment.json')
dataset_params = read_json('config/dataset.json')
solver_params = read_json('config/solver.json')
if algorithm == '':
relabeling_params = read_json('config/relabeling.json')
ub_alg = 'robust ub weighted uncertainty relabeling' # not implemented for deep network
algs = [k for k, v in relabeling_params['algs'].items() if
v == 1 and not (experiment == 'deep' and k == ub_alg)]
else:
algs = [algorithm]
# add parameters to specific dataset and solver parameters
solver_params.update(experiment_params[experiment]['solver'])
dataset_params[dataset].update(experiment_params[experiment]['dataset'])
results = dict.fromkeys(algs)
params = dataset_params[dataset]
parallel = Parallel(n_jobs=n_jobs, verbose=50)
R = range(params['n_repeat']) if trial == -1 else [trial]
parallel(delayed(run_experiment)(r, alg, dataset, experiment, params, solver_params) for alg, r in
itertools.product(algs, R))