-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmain_singlepixel.py
106 lines (96 loc) · 3.6 KB
/
main_singlepixel.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
from data import MNIST, BernoulliSyntheticDataset, MNISTWavelet, Synthetic, BagOfWords
from recovery import NA_ALISTA, IHT
from train import run_experiment
from baseline import run_experiment_baseline, NeighborGenerator
from sensing_matrices import CompletelyLearned, Pixel, LeftDRegularGraph, LoadedFromNumpy, ConstructedPooling
from noise import GaussianNoise, StudentNoise, Noiseless
import numpy as np
from conf import device
import matplotlib.pyplot as plt
import pandas as pd
import os
def save_log(results, name):
if len(results) == 2:
train_logs, test_logs = results
else:
test_logs = results
train_logs = False
pd.DataFrame(test_logs).to_csv(name + "_test.csv", index=False)
if train_logs:
pd.DataFrame(train_logs).to_csv(name + "_train.csv", index=False)
n = 784
s = 50
from data import BernoulliSyntheticDataset
model = IHT(15, s)
epochs = 250
if not os.path.exists("results"):
os.makedirs("results")
for data in [MNISTWavelet(), Synthetic(n, s, s, BernoulliSyntheticDataset, batch_size=512)]:
for m in [50, 200]:
for seed in range(0, 10):
losses = []
scalars = []
# Find optimal scaling factor!
for scalar in np.linspace(0.1 , 1, 55):
losses.append(run_experiment(
n=n,
sensing_matrix=Pixel(m, n, 32, scalar, seed, False),
model=model,
data=data,
use_mse=True,
train_matrix=False,
use_median=False,
noise=GaussianNoise(40),
epochs=0,
positive_threshold=0.01,
lr=0.0002,
))
scalars.append(scalar)
save_log(losses[np.argmin([x[1][0]["test_nmse"] for x in losses])], "results/singlepixel_random_" +data.name+"seed_"+str(seed)+"m_"+str(m))
losses = [x[1][0]["test_nmse"] for x in losses]
# Run Ours
save_log(run_experiment(
n=n,
sensing_matrix=Pixel(m, n, 32, scalars[np.argmin(losses)]*0.9, seed, False),
model=model,
data=data,
use_mse=True,
train_matrix=True,
use_median=False,
noise=GaussianNoise(40),
epochs=epochs,
positive_threshold=0.01,
lr=0.0002,
), "results/singlepixel_learned_"+data.name+"seed_"+str(seed)+"m_"+str(m))
# Run Simulated Annealing Baseline
save_log(run_experiment_baseline(
n=n,
sensing_matrix=NeighborGenerator(Pixel(m, n, 32, scalars[np.argmin(losses)]*0.9, seed, False).to(device)),
model=model,
data=data,
use_mse=True,
train_matrix=True,
use_median=False,
noise=GaussianNoise(40),
epochs=epochs,
positive_threshold=0.01,
initial_temperature= 0.0012,
temperature_decay=0.9997,
greedy=False,
), "results/singlepixel_baseline_"+data.name+"seed_"+str(seed)+"m_"+str(m)+"_2")
# Run Greedy Baseline
save_log(run_experiment_baseline(
n=n,
sensing_matrix=NeighborGenerator(Pixel(m, n, 32, scalars[np.argmin(losses)]*0.9, seed, False).to(device)),
model=model,
data=data,
use_mse=True,
train_matrix=True,
use_median=False,
noise=GaussianNoise(40),
epochs=epochs,
positive_threshold=0.01,
initial_temperature=None,
temperature_decay=None,
greedy=True,
), "results/singlepixel_baseline_greedy_"+data.name+"seed_"+str(seed)+"m_"+str(m)+"_2")