Skip to content
Snippets Groups Projects
Commit c194bbbf authored by Cassandra Grzonkowski's avatar Cassandra Grzonkowski
Browse files

visualization local applied

parent e2890c82
No related branches found
No related tags found
No related merge requests found
......@@ -26,8 +26,8 @@ def setup_parser(folder_given, folder_given_2=None, folder_given_3=None, folder_
out.add_argument('--folder_13',
default=f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/{folder_given_6}/',
type=str, help="Path to load model parameter")
out.add_argument('--save_folder',
default=f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/epoch_1_models_v_3_epoch_2/',
out.add_argument('--save_folder', # epoch_1_models_v_3
default=f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/epoch_1_models_v_2/test/', # epoch_1_models_v_2/test
type=str, help="Path to load model parameter")
return out
......@@ -39,22 +39,43 @@ def plot(data, label, save_path):
plt.legend()
plt.savefig(save_path)
def plot_accuracy(data_1, label_1, data_2, label_2, data_3, label_3, save_path):
fig = plt.figure()
data_1 = np.array(data_1)
data_2 = np.array(data_2)
data_3 = np.array(data_3)
def plot_accuracy(data_1, label_1, data_2, label_2, data_3, label_3,
data_4, label_4, data_5, label_5, data_6, label_6, save_path):
fig = plt.figure(figsize=(16, 8))
#data_1 = np.array(data_1)
data_1 = torch.cat(data_1, dim=0)
#data_2 = np.array(data_2)
data_2 = torch.cat(data_2, dim=0)
#data_3 = np.array(data_3)
data_3 = torch.cat(data_3, dim=0)
#data_4 = np.array(data_4)
data_4 = torch.cat(data_4, dim=0)
#data_5 = np.array(data_5)
data_5 = torch.cat(data_5, dim=0)
#data_6 = np.array(data_6)
data_6 = torch.cat(data_6, dim=0)
plt.plot(np.arange(0, len(data_1)), data_1, label=label_1)
plt.plot(np.arange(0, len(data_2)), data_2, label=label_2)
plt.plot(np.arange(0, len(data_3)), data_3, label=label_3)
plt.legend()
plt.xlabel("batch")
plt.ylabel("ratio")
plt.plot(np.arange(0, len(data_4)), data_4, label=label_4)
plt.plot(np.arange(0, len(data_5)), data_5, label=label_5)
plt.plot(np.arange(0, len(data_6)), data_6, label=label_6)
size = 30
plt.legend(loc='upper left', prop={'size': 22})
step = 867
token = []
epochs = [0, 1, 2, 3, 4, 5]
for epoch in epochs:
token.extend([epoch] * step)
plt.xticks(range(0, len(data_1), step), token[::step], fontsize=22)
plt.yticks(fontsize=22)
plt.xlabel("epoch", fontsize=size)
plt.ylabel("accuracy", fontsize=size)
plt.savefig(save_path)
def plot_accuracy_1(save_path, data_1, label_1, data_2, label_2, data_3=None, label_3=None, data_4=None, label_4=None,
data_5=None, label_5=None, data_6=None, label_6=None):
fig = plt.figure()
def plot_accuracy_1(save_path, type, data_1, label_1, data_2, label_2, data_3=None, label_3=None, data_4=None, label_4=None,
data_5=None, label_5=None, data_6=None, label_6=None, data_7=None, label_7=None, data_8=None, label_8=None):
fig = plt.figure(figsize=(15, 8))
data_1 = np.array(data_1)
plt.plot(np.arange(0, len(data_1)), data_1, label=label_1)
data_2 = np.array(data_2)
......@@ -71,54 +92,274 @@ def plot_accuracy_1(save_path, data_1, label_1, data_2, label_2, data_3=None, la
if data_6 is not None:
data_6 = np.array(data_6)
plt.plot(np.arange(0, len(data_6)), data_6, label=label_6)
plt.legend()
plt.xlabel("batch")
plt.ylabel("ratio")
if data_7 is not None:
data_7 = np.array(data_7)
plt.plot(np.arange(0, len(data_7)), data_7, label=label_7)
if data_8 is not None:
data_8 = np.array(data_8)
plt.plot(np.arange(0, len(data_8)), data_8, label=label_8)
size = 30
plt.legend(loc='upper left', prop={'size': 22})
plt.xticks(fontsize=22)
plt.yticks(fontsize=22)
plt.xlabel("batch", fontsize=size)
plt.ylabel(type, fontsize=size)
plt.savefig(save_path)
def plot_ppl_loss(data_1, label_1, data_2, label_2, save_path):
fig = plt.figure()
data_1 = np.array(data_1)
data_2 = np.array(data_2)
def plot_ppl_loss(type, data_1, label_1, data_2, label_2, data_3, label_3, data_4, label_4, save_path):
fig = plt.figure(figsize=(16, 8))
#data_1 = np.array(data_1)
data_1 = torch.cat(data_1, dim=0)
#data_2 = np.array(data_2)
data_2 = torch.cat(data_2, dim=0)
#data_3 = np.array(data_3)
data_3 = torch.cat(data_3, dim=0)
#data_4 = np.array(data_4)
data_4 = torch.cat(data_4, dim=0)
plt.plot(np.arange(0, len(data_1)), data_1, label=label_1)
plt.plot(np.arange(0, len(data_2)), data_2, label=label_2)
plt.legend()
plt.xlabel("batch")
plt.ylabel("ratio")
plt.plot(np.arange(0, len(data_3)), data_3, label=label_3)
plt.plot(np.arange(0, len(data_4)), data_4, label=label_4)
size = 30
plt.legend(loc='upper left', prop={'size': 22})
#plt.xticks(fontsize=22)
step = 867
token = []
epochs = [0, 1, 2, 3, 4, 5]
for epoch in epochs:
token.extend([epoch] * step)
plt.xticks(range(0, len(data_1), step), token[::step], fontsize=22)
plt.yticks(fontsize=22)
plt.xlabel("epoch", fontsize=size)
plt.ylabel(type, fontsize=size)
plt.savefig(save_path)
def plot_ppl_loss_1(save_path, data_1, label_1, data_2, label_2, data_3=None, label_3=None, data_4=None, label_4=None,
data_5=None, label_5=None, data_6=None, label_6=None):
fig = plt.figure()
def plot_ppl_loss_1(type, save_path, data_1, label_1, data_2, label_2, data_3=None, label_3=None, data_4=None, label_4=None,
data_5=None, label_5=None, data_6=None, label_6=None, data_7=None, label_7=None, data_8=None, label_8=None):
fig = plt.figure(figsize=(10, 8))
data_1 = np.array(data_1)
plt.plot(np.arange(0, len(data_1)), data_1, label=label_1)
data_2 = np.array(data_2)
plt.plot(np.arange(0, len(data_2)), data_2, label=label_2)
if data_3 is not None:
data_3 = np.array(data_3)
plt.plot(np.arange(0, len(data_3)), data_3, label=label_3)
if data_4 is not None:
data_4 = np.array(data_4)
plt.plot(np.arange(0, len(data_4)), data_4, label=label_4)
if data_5 is not None:
data_5 = np.array(data_5)
plt.plot(np.arange(0, len(data_5)), data_5, label=label_5)
if data_6 is not None:
data_6 = np.array(data_6)
plt.plot(np.arange(0, len(data_6)), data_6, label=label_6)
plt.legend()
plt.xlabel("batch")
plt.ylabel("ratio")
if data_7 is not None:
data_7 = np.array(data_7)
plt.plot(np.arange(0, len(data_7)), data_7, label=label_7)
if data_8 is not None:
data_8 = np.array(data_8)
plt.plot(np.arange(0, len(data_8)), data_8, label=label_8)
size = 30
plt.legend(prop={'size': 22})
plt.xticks(fontsize=22)
plt.yticks(fontsize=22)
plt.xlabel("batch", fontsize=size)
plt.ylabel(type, fontsize=size)
plt.savefig(save_path)
if __name__ == '__main__':
version_1 = True
version = "version_01"
if version == "version_01":
#folder_given = "epoch_3_eval/Model_et_epoch_3_fraxtil"
#folder_given = "epoch_3_eval/thresholding_epoch_3_fraxtil"
#folder_given = "combine_tokens_approach_2803_s_e_token_no_u_token"
folder_given = "Model_empty_token_approach_t_2"
#folder_given = "thresholding_2503_all_data_3_new_comb_t_100"
#folder_given = "Model_lrt_approach_without_thresholding_cpu"
folder_given_2 = "Model_lrt_approach_cpu"
#folder_given = "Model_lrt_approach_and_thresholding_cpu"
approach = "ET"
approach_1 = "ST"
#parser = setup_parser(folder_given)
#args, unknown = parser.parse_known_args()
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
save_folder = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/long_models/results/'
folder = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/long_models/ET_t2/'
folder_2 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/long_models/ST_t2/'
epochs = [1, 2, 3, 4, 5, 6]
all_ppl = []
all_ppl_len = []
all_ppl_2 = []
all_ppl_2_len = []
all_ppl_v_2 = []
all_ppl_v_2_len= []
all_ppl_v_2_2 = []
all_ppl_v_2_2_len = []
all_loss = []
all_loss_len = []
all_loss_2 = []
all_loss_2_len = []
all_loss_v_2 = []
all_loss_v_2_len = []
all_loss_v_2_2 = []
all_loss_v_2_2_len = []
all_loss_v_3 = []
all_loss_v_3_len = []
all_loss_v_3_2 = []
all_loss_v_3_2_len = []
all_accuracy = []
all_accuracy_len = []
all_accuracy_2 = []
all_accuracy_2_len = []
all_accuracy_v_2 = []
all_accuracy_v_2_len = []
all_accuracy_v_2_2 = []
all_accuracy_v_2_2_len = []
all_accuracy_v_3 = []
all_accuracy_v_3_len = []
all_accuracy_v_3_2 = []
all_accuracy_v_3_2_len = []
for epoch in epochs:
ppl = torch.load(f"{folder}ppl_batches_epoch_{epoch}")
all_ppl.append(ppl)
all_ppl_len.append(len(ppl))
ppl_2 = torch.load(f"{folder_2}ppl_batches_epoch_{epoch}")
all_ppl_2.append(ppl_2)
all_ppl_2_len.append(len(ppl_2))
ppl_v_2 = torch.load(f"{folder}ppl_v_2_batches_epoch_{epoch}")
all_ppl_v_2.append(ppl_v_2)
all_ppl_v_2_len.append(len(ppl_v_2))
ppl_v_2_2 = torch.load(f"{folder_2}ppl_v_2_batches_epoch_{epoch}")
all_ppl_v_2_2.append(ppl_v_2_2)
all_ppl_v_2_2_len.append(len(ppl_v_2_2))
#ms_per_batch = torch.load(f"{folder}ms_all_batches_epoch_{epoch}")
#ms_per_batch_2 = torch.load(f"{folder_2}ms_all_batches_epoch_{epoch}")
loss = torch.load(f"{folder}loss_epoch_{epoch}")
all_loss.append(loss)
all_loss_len.append(len(loss))
loss_2 = torch.load(f"{folder_2}loss_epoch_{epoch}")
all_loss_2.append(loss_2)
all_loss_2_len.append(len(loss_2))
accuracy = torch.load(f"{folder}accuracy_epoch_{epoch}")
all_accuracy.append(accuracy)
all_accuracy_len.append(len(accuracy))
accuracy_2 = torch.load(f"{folder_2}accuracy_epoch_{epoch}")
all_accuracy_2.append(accuracy_2)
all_accuracy_2_len.append(len(accuracy_2))
loss_v_2 = torch.load(f"{folder}loss_v_2_epoch_{epoch}")
all_loss_v_2.append(loss_v_2)
all_loss_v_2_len.append(len(loss_v_2))
loss_v_2_2 = torch.load(f"{folder_2}loss_v_2_epoch_{epoch}")
all_loss_v_2_2.append(loss_v_2_2)
all_loss_v_2_2_len.append(len(loss_v_2_2))
accuracy_v_2 = torch.load(f"{folder}accuracy_v_2_epoch_{epoch}")
all_accuracy_v_2.append(accuracy_v_2)
all_accuracy_v_2_len.append(len(accuracy_v_2))
accuracy_v_2_2 = torch.load(f"{folder_2}accuracy_v_2_epoch_{epoch}")
all_accuracy_v_2_2.append(accuracy_v_2_2)
all_accuracy_v_2_2_len.append(len(accuracy_v_2_2))
accuracy_v_3 = torch.load(f"{folder}accuracy_v_3_epoch_{epoch}")
all_accuracy_v_3.append(accuracy_v_3)
all_accuracy_v_3_len.append(len(accuracy_v_3))
accuracy_v_3_2 = torch.load(f"{folder_2}accuracy_v_3_epoch_{epoch}")
all_accuracy_v_3_2.append(accuracy_v_3_2)
all_accuracy_v_3_2_len.append(len(accuracy_v_3_2))
#with open(f"{folder}accuracy.txt", 'r') as fp:
# avg_prob_non_empty_pre = fp.read()
#avg_prob_non_empty_pre = avg_prob_non_empty_pre.split("\n")
#avg_prob_non_empty = [float(t) for t in avg_prob_non_empty_pre[:-1]]
plot_accuracy(all_accuracy, "ET_accuracy", all_accuracy_2, "ST_accuracy",
all_accuracy_v_2, "ET_accuracy_step", all_accuracy_v_2_2, "ST_accuracy_step",
all_accuracy_v_3, "ET_accuracy_anystep", all_accuracy_v_3_2, "ST_accuracy_anystep",
f"{save_folder}{approach}_{approach_1}_accuracy_epoch_{epoch}.pdf")
plot_ppl_loss("perplexity", all_ppl, "ET_ppl", all_ppl_2, "ST_ppl",
all_ppl_v_2, "ET_ppl_step", all_ppl_v_2_2, "ST_ppl_step",
f"{save_folder}{approach}_{approach_1}_ppl_epoch_{epoch}.pdf")
#plot(ms_per_batch, "ms_per_batch", f"{args.save_folder}{approach}_ms_per_batch_epoch_{epoch}.pdf")
plot_ppl_loss("loss", all_loss, "ET_loss", all_loss_2, "ST_loss",
all_loss_v_2, "ET_loss_step", all_loss_v_2_2, "ST_loss_step",
f"{save_folder}{approach}_{approach_1}_loss_epoch_{epoch}.pdf")
elif version == "version_0":
#folder_given = "epoch_3_eval/Model_et_epoch_3_fraxtil"
#folder_given = "epoch_3_eval/thresholding_epoch_3_fraxtil"
#folder_given = "combine_tokens_approach_2803_s_e_token_no_u_token"
folder_given = "Model_empty_token_approach_t_2"
#folder_given = "thresholding_2503_all_data_3_new_comb_t_100"
#folder_given = "Model_lrt_approach_without_thresholding_cpu"
folder_given_2 = "Model_lrt_approach_cpu"
#folder_given = "Model_lrt_approach_and_thresholding_cpu"
approach = "ET"
approach_1 = "ST"
#parser = setup_parser(folder_given)
#args, unknown = parser.parse_known_args()
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
save_folder = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/epoch_1_models_v_2/overview_2/'
folder = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/{folder_given}/'
folder_2 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/{folder_given_2}/'
epoch = 1
ppl = torch.load(f"{folder}ppl_batches_epoch_{epoch}")
ppl_2 = torch.load(f"{folder_2}ppl_batches_epoch_{epoch}")
ppl_v_2 = torch.load(f"{folder}ppl_v_2_batches_epoch_{epoch}")
ppl_v_2_2 = torch.load(f"{folder_2}ppl_v_2_batches_epoch_{epoch}")
#ms_per_batch = torch.load(f"{folder}ms_all_batches_epoch_{epoch}")
#ms_per_batch_2 = torch.load(f"{folder_2}ms_all_batches_epoch_{epoch}")
loss = torch.load(f"{folder}loss_epoch_{epoch}")
loss_2 = torch.load(f"{folder_2}loss_epoch_{epoch}")
accuracy = torch.load(f"{folder}accuracy_epoch_{epoch}")
accuracy_2 = torch.load(f"{folder_2}accuracy_epoch_{epoch}")
loss_v_2 = torch.load(f"{folder}loss_v_2_epoch_{epoch}")
loss_v_2_2 = torch.load(f"{folder_2}loss_v_2_epoch_{epoch}")
accuracy_v_2 = torch.load(f"{folder}accuracy_v_2_epoch_{epoch}")
accuracy_v_2_2 = torch.load(f"{folder_2}accuracy_v_2_epoch_{epoch}")
accuracy_v_3 = torch.load(f"{folder}accuracy_v_3_epoch_{epoch}")
accuracy_v_3_2 = torch.load(f"{folder_2}accuracy_v_3_epoch_{epoch}")
#with open(f"{folder}accuracy.txt", 'r') as fp:
# avg_prob_non_empty_pre = fp.read()
#avg_prob_non_empty_pre = avg_prob_non_empty_pre.split("\n")
#avg_prob_non_empty = [float(t) for t in avg_prob_non_empty_pre[:-1]]
if version_1:
plot_accuracy(accuracy, "ET_accuracy", accuracy_2, "ST_accuracy",
accuracy_v_2, "ET_accuracy_step", accuracy_v_2_2, "ST_accuracy_step",
accuracy_v_3, "ET_accuracy_anystep", accuracy_v_3_2, "ST_accuracy_anystep",
f"{save_folder}{approach}_{approach_1}_accuracy_epoch_{epoch}.pdf")
plot_ppl_loss("perplexity", ppl, "ET_ppl", ppl_2, "ST_ppl",
ppl_v_2, "ET_ppl_step", ppl_v_2_2, "ST_ppl_step",
f"{save_folder}{approach}_{approach_1}_ppl_epoch_{epoch}.pdf")
#plot(ms_per_batch, "ms_per_batch", f"{args.save_folder}{approach}_ms_per_batch_epoch_{epoch}.pdf")
plot_ppl_loss("loss", loss, "ET_loss", loss_2, "ST_loss",
loss_v_2, "ET_loss_step", loss_v_2_2, "ST_loss_step",
f"{save_folder}{approach}_{approach_1}_loss_epoch_{epoch}.pdf")
elif version == "version_1":
#folder_given = "epoch_3_eval/Model_et_epoch_3_fraxtil"
folder_given = "epoch_3_eval/thresholding_epoch_3_fraxtil"
#folder_given = "epoch_3_eval/thresholding_epoch_3_fraxtil"
folder_given = "combine_tokens_approach_2803_s_e_token_no_u_token"
#folder_given = "Model_empty_token_approach_t_2"
#folder_given = "thresholding_2503_all_data_3_new_comb_t_100"
#folder_given = "Model_lrt_approach_without_thresholding_cpu"
#folder_given = "Model_lrt_approach_cpu"
#folder_given = "Model_lrt_approach_and_thresholding_cpu"
approach = "Empty_token_approach_no_t"
parser = setup_parser(folder_given)
args, unknown = parser.parse_known_args()
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
......@@ -143,13 +384,12 @@ if __name__ == '__main__':
#avg_prob_non_empty = [float(t) for t in avg_prob_non_empty_pre[:-1]]
plot_accuracy(accuracy, "accuracy", accuracy_v_2, "accuracy_v_2", accuracy_v_3, "accuracy_v_3", f"{folder}_accuracy_epoch_{epoch}.pdf")
plot_ppl_loss(ppl, "ppl", ppl_v_2, "ppl_v_2", f"{folder}_ppl_epoch_{epoch}.pdf")
plot(ms_per_batch, "ms_per_batch", f"{folder}_ms_per_batch_epoch_{epoch}.pdf")
plot_ppl_loss(loss, "loss", loss_v_2, "loss_v_2", f"{folder}_loss_epoch_{epoch}.pdf")
plot_accuracy(accuracy, "accuracy", accuracy_v_2, "accuracy_step", accuracy_v_3, "accuracy_anystep", f"{args.save_folder}{approach}_accuracy_epoch_{epoch}.pdf")
plot_ppl_loss("perplexity", ppl, "ppl", ppl_v_2, "ppl_step", f"{args.save_folder}{approach}_ppl_epoch_{epoch}.pdf")
plot(ms_per_batch, "ms_per_batch", f"{args.save_folder}{approach}_ms_per_batch_epoch_{epoch}.pdf")
plot_ppl_loss("loss", loss, "loss", loss_v_2, "loss_step", f"{args.save_folder}{approach}_loss_epoch_{epoch}.pdf")
else:
elif version == "version_2":
# version 2, per main approach, compare values such as accuracy, accuracy_v_2, ...
# naive approach
......@@ -158,127 +398,164 @@ if __name__ == '__main__':
#folder_3_name = None
#approach = "naive_approach"
# single empty token approach, wrong ordering?
#folder_1_name = "combine_tokens_approach_2803_s_e_token_no_u_token"
#folder_2_name = "Model_empty_token_approach_t_2"
#folder_3_name = "thresholding_2503_all_data_3_new_comb_t_100"
#approach = "empty_approach"
folder_1_name = "naive_approach_s_e_token_no_t"
folder_2_name = "Model_naive_approach_t_2"
# folder_3_name = None
approach_1 = "naive"
# empty approach
folder_11_name = "combine_tokens_approach_2803_s_e_token_no_u_token"
folder_12_name = "Model_empty_token_approach_t_2"
folder_13_name = "thresholding_2503_all_data_3_new_comb_t_100"
approach_1 = "empty_approach"
folder_3_name = "combine_tokens_approach_2803_s_e_token_no_u_token"
folder_4_name = "Model_empty_token_approach_t_2"
folder_5_name = "thresholding_2503_all_data_3_new_comb_t_100"
approach_2 = "ET"
#lrt approach
folder_1_name = "Model_lrt_approach_without_thresholding_cpu"
folder_2_name = "Model_lrt_approach_cpu"
folder_3_name = "Model_lrt_approach_and_thresholding_cpu"
approach = "lrt"
folder_6_name = "Model_lrt_approach_without_thresholding_cpu"
folder_7_name = "Model_lrt_approach_cpu"
folder_8_name = "Model_lrt_approach_and_thresholding_cpu"
approach_3 = "ST"
# for single approach
#approach_1 = None
#folder_11_name = None
#folder_12_name = None
#folder_13_name = None
#parser = setup_parser(folder_1_name, folder_2_name, folder_3_name)
parser = setup_parser(folder_11_name, folder_12_name, folder_13_name, folder_1_name, folder_2_name, folder_3_name)
save_folder = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/epoch_1_models_v_2/all_2/'
folder_1 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/{folder_1_name}/'
folder_2 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/{folder_2_name}/'
folder_3 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/{folder_3_name}/'
folder_4 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/{folder_4_name}/'
folder_5 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/{folder_5_name}/'
folder_6 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/{folder_6_name}/'
folder_7 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/{folder_7_name}/'
folder_8 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/{folder_8_name}/'
#parser = setup_parser(folder_11_name, folder_12_name, folder_13_name, folder_1_name, folder_2_name, folder_3_name)
#parser = setup_parser(folder_1, folder_2, folder_3)
args, unknown = parser.parse_known_args()
os.makedirs(args.save_folder, exist_ok=True)
#args, unknown = parser.parse_known_args()
os.makedirs(save_folder, exist_ok=True)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
folder = args.folder
folder_2 = args.folder_2
if folder_3_name is None:
folder_3 = None
else:
folder_3 = args.folder_3
if folder_11_name is None:
folder_11 = None
else:
folder_11 = args.folder_11
if folder_12_name is None:
folder_12 = None
else:
folder_12 = args.folder_12
if folder_13_name is None:
folder_13 = None
else:
folder_13 = args.folder_13
save_folder = args.save_folder
#folder_3 = args.folder_3
epoch = 3
ppl = torch.load(f"{folder}ppl_batches_epoch_{epoch}")
epoch = 1
ppl_1 = torch.load(f"{folder_1}ppl_batches_epoch_{epoch}")
ppl_2 = torch.load(f"{folder_2}ppl_batches_epoch_{epoch}")
if folder_3 is not None:
ppl_3 = torch.load(f"{folder_3}ppl_batches_epoch_{epoch}")
if folder_11 is not None:
ppl_11 = torch.load(f"{folder_11}ppl_batches_epoch_{epoch}")
if folder_12 is not None:
ppl_12 = torch.load(f"{folder_12}ppl_batches_epoch_{epoch}")
if folder_13 is not None:
ppl_13 = torch.load(f"{folder_13}ppl_batches_epoch_{epoch}")
ppl_v_2 = torch.load(f"{folder}ppl_v_2_batches_epoch_{epoch}")
if folder_4 is not None:
ppl_4 = torch.load(f"{folder_4}ppl_batches_epoch_{epoch}")
if folder_5 is not None:
ppl_5 = torch.load(f"{folder_5}ppl_batches_epoch_{epoch}")
if folder_6 is not None:
ppl_6 = torch.load(f"{folder_6}ppl_batches_epoch_{epoch}")
if folder_7 is not None:
ppl_7 = torch.load(f"{folder_7}ppl_batches_epoch_{epoch}")
if folder_8 is not None:
ppl_8 = torch.load(f"{folder_8}ppl_batches_epoch_{epoch}")
ppl_v_2_1 = torch.load(f"{folder_1}ppl_v_2_batches_epoch_{epoch}")
ppl_v_2_2 = torch.load(f"{folder_2}ppl_v_2_batches_epoch_{epoch}")
if folder_3 is not None:
ppl_v_2_3 = torch.load(f"{folder_3}ppl_v_2_batches_epoch_{epoch}")
if folder_11 is not None:
ppl_v_2_11 = torch.load(f"{folder_11}ppl_v_2_batches_epoch_{epoch}")
if folder_12 is not None:
ppl_v_2_12 = torch.load(f"{folder_12}ppl_v_2_batches_epoch_{epoch}")
if folder_13 is not None:
ppl_v_2_13 = torch.load(f"{folder_13}ppl_v_2_batches_epoch_{epoch}")
if folder_4 is not None:
ppl_v_2_4 = torch.load(f"{folder_4}ppl_v_2_batches_epoch_{epoch}")
if folder_5 is not None:
ppl_v_2_5 = torch.load(f"{folder_5}ppl_v_2_batches_epoch_{epoch}")
if folder_6 is not None:
ppl_v_2_6 = torch.load(f"{folder_6}ppl_v_2_batches_epoch_{epoch}")
if folder_7 is not None:
ppl_v_2_7 = torch.load(f"{folder_7}ppl_v_2_batches_epoch_{epoch}")
if folder_8 is not None:
ppl_v_2_8 = torch.load(f"{folder_8}ppl_v_2_batches_epoch_{epoch}")
#ms_per_batch = torch.load(f"{folder}ms_all_batches_epoch_{epoch}")
#ms_per_batch_2 = torch.load(f"{folder_2}ms_all_batches_epoch_{epoch}")
#ms_per_batch_3 = torch.load(f"{folder_3}ms_all_batches_epoch_{epoch}")
loss = torch.load(f"{folder}loss_epoch_{epoch}")
loss_1 = torch.load(f"{folder_1}loss_epoch_{epoch}")
loss_2 = torch.load(f"{folder_2}loss_epoch_{epoch}")
if folder_3 is not None:
loss_3 = torch.load(f"{folder_3}loss_epoch_{epoch}")
if folder_11 is not None:
loss_11 = torch.load(f"{folder_11}loss_epoch_{epoch}")
if folder_12 is not None:
loss_12 = torch.load(f"{folder_12}loss_epoch_{epoch}")
if folder_13 is not None:
loss_13 = torch.load(f"{folder_13}loss_epoch_{epoch}")
accuracy = torch.load(f"{folder}accuracy_epoch_{epoch}")
if folder_4 is not None:
loss_4 = torch.load(f"{folder_4}loss_epoch_{epoch}")
if folder_5 is not None:
loss_5 = torch.load(f"{folder_5}loss_epoch_{epoch}")
if folder_6 is not None:
loss_6 = torch.load(f"{folder_6}loss_epoch_{epoch}")
if folder_7 is not None:
loss_7 = torch.load(f"{folder_7}loss_epoch_{epoch}")
if folder_8 is not None:
loss_8 = torch.load(f"{folder_8}loss_epoch_{epoch}")
accuracy_1 = torch.load(f"{folder_1}accuracy_epoch_{epoch}")
accuracy_2 = torch.load(f"{folder_2}accuracy_epoch_{epoch}")
if folder_3 is not None:
accuracy_3 = torch.load(f"{folder_3}accuracy_epoch_{epoch}")
if folder_11 is not None:
accuracy_11 = torch.load(f"{folder_11}accuracy_epoch_{epoch}")
if folder_12 is not None:
accuracy_12 = torch.load(f"{folder_12}accuracy_epoch_{epoch}")
if folder_13 is not None:
accuracy_13 = torch.load(f"{folder_13}accuracy_epoch_{epoch}")
loss_v_2 = torch.load(f"{folder}loss_v_2_epoch_{epoch}")
if folder_4 is not None:
accuracy_4 = torch.load(f"{folder_4}accuracy_epoch_{epoch}")
if folder_5 is not None:
accuracy_5 = torch.load(f"{folder_5}accuracy_epoch_{epoch}")
if folder_6 is not None:
accuracy_6 = torch.load(f"{folder_6}accuracy_epoch_{epoch}")
if folder_7 is not None:
accuracy_7 = torch.load(f"{folder_7}accuracy_epoch_{epoch}")
if folder_8 is not None:
accuracy_8 = torch.load(f"{folder_8}accuracy_epoch_{epoch}")
loss_v_2_1 = torch.load(f"{folder_1}loss_v_2_epoch_{epoch}")
loss_v_2_2 = torch.load(f"{folder_2}loss_v_2_epoch_{epoch}")
if folder_3 is not None:
loss_v_2_3 = torch.load(f"{folder_3}loss_v_2_epoch_{epoch}")
if folder_11 is not None:
loss_v_2_11 = torch.load(f"{folder_11}loss_v_2_epoch_{epoch}")
if folder_12 is not None:
loss_v_2_12 = torch.load(f"{folder_12}loss_v_2_epoch_{epoch}")
if folder_13 is not None:
loss_v_2_13 = torch.load(f"{folder_13}loss_v_2_epoch_{epoch}")
accuracy_v_2 = torch.load(f"{folder}accuracy_v_2_epoch_{epoch}")
if folder_4 is not None:
loss_v_2_4 = torch.load(f"{folder_4}loss_v_2_epoch_{epoch}")
if folder_5 is not None:
loss_v_2_5 = torch.load(f"{folder_5}loss_v_2_epoch_{epoch}")
if folder_6 is not None:
loss_v_2_6 = torch.load(f"{folder_6}loss_v_2_epoch_{epoch}")
if folder_7 is not None:
loss_v_2_7 = torch.load(f"{folder_7}loss_v_2_epoch_{epoch}")
if folder_8 is not None:
loss_v_2_8 = torch.load(f"{folder_8}loss_v_2_epoch_{epoch}")
accuracy_v_2_1 = torch.load(f"{folder_1}accuracy_v_2_epoch_{epoch}")
accuracy_v_2_2 = torch.load(f"{folder_2}accuracy_v_2_epoch_{epoch}")
if folder_3 is not None:
accuracy_v_2_3 = torch.load(f"{folder_3}accuracy_v_2_epoch_{epoch}")
if folder_11 is not None:
accuracy_v_2_11 = torch.load(f"{folder_11}accuracy_v_2_epoch_{epoch}")
if folder_12 is not None:
accuracy_v_2_12 = torch.load(f"{folder_12}accuracy_v_2_epoch_{epoch}")
if folder_13 is not None:
accuracy_v_2_13 = torch.load(f"{folder_13}accuracy_v_2_epoch_{epoch}")
accuracy_v_3 = torch.load(f"{folder}accuracy_v_3_epoch_{epoch}")
if folder_4 is not None:
accuracy_v_2_4 = torch.load(f"{folder_4}accuracy_v_2_epoch_{epoch}")
if folder_5 is not None:
accuracy_v_2_5 = torch.load(f"{folder_5}accuracy_v_2_epoch_{epoch}")
if folder_6 is not None:
accuracy_v_2_6 = torch.load(f"{folder_6}accuracy_v_2_epoch_{epoch}")
if folder_7 is not None:
accuracy_v_2_7 = torch.load(f"{folder_7}accuracy_v_2_epoch_{epoch}")
if folder_8 is not None:
accuracy_v_2_8 = torch.load(f"{folder_8}accuracy_v_2_epoch_{epoch}")
accuracy_v_3_1 = torch.load(f"{folder_1}accuracy_v_3_epoch_{epoch}")
accuracy_v_3_2 = torch.load(f"{folder_2}accuracy_v_3_epoch_{epoch}")
if folder_3 is not None:
accuracy_v_3_3 = torch.load(f"{folder_3}accuracy_v_3_epoch_{epoch}")
if folder_11 is not None:
accuracy_v_3_11 = torch.load(f"{folder_11}accuracy_v_3_epoch_{epoch}")
if folder_12 is not None:
accuracy_v_3_12 = torch.load(f"{folder_12}accuracy_v_3_epoch_{epoch}")
if folder_13 is not None:
accuracy_v_3_13 = torch.load(f"{folder_13}accuracy_v_3_epoch_{epoch}")
if folder_4 is not None:
accuracy_v_3_4 = torch.load(f"{folder_4}accuracy_v_3_epoch_{epoch}")
if folder_5 is not None:
accuracy_v_3_5 = torch.load(f"{folder_5}accuracy_v_3_epoch_{epoch}")
if folder_6 is not None:
accuracy_v_3_6 = torch.load(f"{folder_6}accuracy_v_3_epoch_{epoch}")
if folder_7 is not None:
accuracy_v_3_7 = torch.load(f"{folder_7}accuracy_v_3_epoch_{epoch}")
if folder_8 is not None:
accuracy_v_3_8 = torch.load(f"{folder_8}accuracy_v_3_epoch_{epoch}")
# with open(f"{folder}accuracy.txt", 'r') as fp:
# avg_prob_non_empty_pre = fp.read()
......@@ -287,71 +564,174 @@ if __name__ == '__main__':
# avg_prob_non_empty = [float(t) for t in avg_prob_non_empty_pre[:-1]]
# other variant
if approach_1 is not None:
plot_accuracy_1(f"{save_folder}{approach}_{approach_1}_accuracy_epoch_{epoch}.pdf",
accuracy_11, "et_accuracy", accuracy_12, "et_accuracy_t_2", accuracy_13, "et_accuracy_t_100",
accuracy, "lrt_accuracy", accuracy_2, "lrt_accuracy_t_2", accuracy_3, "lrt_accuracy_t_100")
plot_accuracy_1(f"{save_folder}{approach}_{approach_1}_accuracy_v_2_epoch_{epoch}.pdf",
accuracy_v_2_11, "et_accuracy", accuracy_v_2_12, "et_accuracy_t_2", accuracy_v_2_13, "et_accuracy_t_100",
accuracy_v_2, "lrt_accuracy", accuracy_v_2_2, "lrt_accuracy_t_2", accuracy_v_2_3, "lrt_accuracy_t_100")
plot_accuracy_1(f"{save_folder}{approach}_{approach_1}_accuracy_v_3_epoch_{epoch}.pdf",
accuracy_v_3_11, "et_accuracy", accuracy_v_3_12, "et_accuracy_t_2", accuracy_v_3_13, "et_accuracy_t_100",
accuracy_v_3, "lrt_accuracy", accuracy_v_3_2, "lrt_accuracy_t_2", accuracy_v_3_3, "lrt_accuracy_t_100")
elif folder_3 is not None:
#if approach_2 is not None:
plot_accuracy_1(f"{save_folder}{approach_1}_{approach_2}_{approach_3}_accuracy_epoch_{epoch}.pdf", "accuracy",
accuracy_1, "naive", accuracy_2, "naive_t_2",
accuracy_3, "et", accuracy_4, "et_t_2", accuracy_5, "et_t_100",
accuracy_6, "st", accuracy_7, "st_t_2", accuracy_8, "st_t_100")
plot_accuracy_1(f"{save_folder}{approach_1}_{approach_2}_{approach_3}_accuracy_v_2_epoch_{epoch}.pdf", "accuracy_step",
accuracy_v_2_1, "naive", accuracy_v_2_2, "naive_t_2",
accuracy_v_2_3, "et", accuracy_v_2_4, "et_t_2", accuracy_v_2_5, "et_t_100",
accuracy_v_2_6, "st", accuracy_v_2_7, "st_t_2", accuracy_v_2_8, "st_t_100")
plot_accuracy_1(f"{save_folder}{approach_1}_{approach_2}_{approach_3}_accuracy_v_3_epoch_{epoch}.pdf", "accuracy_anystep",
accuracy_v_3_1, "naive", accuracy_v_3_2, "naive_t_2",
accuracy_v_3_3, "et", accuracy_v_3_4, "et_t_2", accuracy_v_3_5, "et_t_100",
accuracy_v_3_6, "st", accuracy_v_3_7, "st_t_2", accuracy_v_3_8, "st_t_100")
# elif folder_3 is not None:
# plot_accuracy_1(f"{save_folder}{approach}_accuracy_epoch_{epoch}.pdf",
# accuracy, "accuracy", accuracy_2, "accuracy_t_2", accuracy_3, "accuracy_t_100")
# plot_accuracy_1(f"{save_folder}{approach}_accuracy_v_2_epoch_{epoch}.pdf",
# accuracy_v_2, "accuracy_step", accuracy_v_2_2, "accuracy_step_t_2", accuracy_v_2_3, "accuracy_step_t_100")
# plot_accuracy_1(f"{save_folder}{approach}_accuracy_v_3_epoch_{epoch}.pdf",
# accuracy_v_3, "accuracy_anystep", accuracy_v_3_2, "accuracy_anystep_t_2", accuracy_v_3_3, "accuracy_anystep_t_100")
# else:
# plot_accuracy_1(f"{save_folder}{approach}_accuracy_epoch_{epoch}.pdf",
# accuracy, "accuracy", accuracy_2, "accuracy_t_2")
#
# plot_accuracy_1(f"{save_folder}{approach}_accuracy_v_2_epoch_{epoch}.pdf",
# accuracy_v_2, "accuracy_step", accuracy_v_2_2, "accuracy_step_t_2")
#
# plot_accuracy_1(f"{save_folder}{approach}_accuracy_v_3_epoch_{epoch}.pdf",
# accuracy_v_3, "accuracy_anystep", accuracy_v_3_2, "accuracy_anystep_t_2")
# other variant
#if approach_1 is not None:
plot_ppl_loss_1("loss", f"{save_folder}{approach_1}_{approach_2}_{approach_3}_loss_epoch_{epoch}.pdf",
loss_1, "naive", loss_2, "naive_t_2",
loss_3, "et", loss_4, "et_t_2", loss_5, "et_t_100",
loss_6, "st", loss_7, "st_t_2", loss_8, "st_t_100")
plot_ppl_loss_1("loss_step", f"{save_folder}{approach_1}_{approach_2}_{approach_3}_loss_v_2_epoch_{epoch}.pdf",
loss_v_2_1, "naive", loss_v_2_2, "naive_t_2",
loss_v_2_3, "et", loss_v_2_4, "et_t_2", loss_v_2_5, "et_t_100",
loss_v_2_6, "st", loss_v_2_7, "st_t_2", loss_v_2_8, "st_t_100")
plot_ppl_loss_1("perplexity", f"{save_folder}{approach_1}_{approach_2}_{approach_3}_ppl_epoch_{epoch}.pdf",
ppl_1, "naive", ppl_2, "naive_t_2",
ppl_3, "et", ppl_4, "et_t_2", ppl_5, "et_t_100",
ppl_6, "st", ppl_7, "st_t_2", ppl_8, "st_t_100")
plot_ppl_loss_1("perplexity_step", f"{save_folder}{approach_1}_{approach_2}_{approach_3}_ppl_v_2_epoch_{epoch}.pdf",
ppl_v_2_1, "naive", ppl_v_2_2, "naive_t_2",
ppl_v_2_3, "et", ppl_v_2_4, "et_t_2", ppl_v_2_5, "et_t_100",
ppl_v_2_6, "st", ppl_v_2_7, "st_t_2", ppl_v_2_8, "st_t_100")
# elif folder_3 is not None:
# plot_ppl_loss_1("loss", f"{save_folder}{approach}_loss_epoch_{epoch}.pdf",
# loss, "loss", loss_2, "loss_t_2", loss_3, "loss_t_100")
# plot_ppl_loss_1("loss", f"{save_folder}{approach}_loss_v_2_epoch_{epoch}.pdf",
# loss_v_2, "loss_step", loss_v_2_2, "loss_step_t_2", loss_v_2_3, "loss_step_t_100")
# plot_ppl_loss_1("perplexity", f"{save_folder}{approach}_ppl_epoch_{epoch}.pdf",
# ppl, "ppl", ppl_2, "ppl_t_2", ppl_3, "ppl_t_100")
# plot_ppl_loss_1("perplexity", f"{save_folder}{approach}_ppl_v_2_epoch_{epoch}.pdf",
# ppl_v_2, "ppl_step", ppl_v_2_2, "ppl_step_t_2", ppl_v_2_3, "ppl_step_t_100")
# else:
# plot_ppl_loss_1("loss", f"{save_folder}{approach}_loss_epoch_{epoch}.pdf",
# loss, "loss", loss_2, "loss_t_2")
# plot_ppl_loss_1("loss", f"{save_folder}{approach}_loss_v_2_epoch_{epoch}.pdf",
# loss_v_2, "loss_step", loss_v_2_2, "loss_step_t_2")
#
# plot_ppl_loss_1("perplexity", f"{save_folder}{approach}_ppl_epoch_{epoch}.pdf",
# ppl, "ppl", ppl_2, "ppl_t_2")
# plot_ppl_loss_1("perplexity", f"{save_folder}{approach}_ppl_v_2_epoch_{epoch}.pdf",
# ppl_v_2, "ppl_step", ppl_v_2_2, "ppl_step_t_2")
elif version == "version_3":
# test set für alle 4 approaches et_t_2, et_t_100, st_t_2, st_t_100
save_folder = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/epoch_3_eval/fraxtil/'
folder_1 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/epoch_3_eval/fraxtil/Model_et_epoch_3_fraxtil/'
folder_2 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/epoch_3_eval/fraxtil/thresholding_epoch_3_fraxtil/'
folder_3 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/epoch_3_eval/fraxtil/Model_lrt_approach_epoch_3_farxtil/'
folder_4 = f'C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/epoch_3_eval/fraxtil/Model_lrt_approach_and_thesholding_epoch_3_fraxtil/'
approach = "all_models_fraxtil"
epoch = 1
ppl = torch.load(f"{folder_1}ppl_batches_epoch_{epoch}")
ppl_2 = torch.load(f"{folder_2}ppl_batches_epoch_{epoch}")
if folder_3 is not None:
ppl_3 = torch.load(f"{folder_3}ppl_batches_epoch_{epoch}")
if folder_4 is not None:
ppl_4 = torch.load(f"{folder_4}ppl_batches_epoch_{epoch}")
ppl_v_2 = torch.load(f"{folder_1}ppl_v_2_batches_epoch_{epoch}")
ppl_v_2_2 = torch.load(f"{folder_2}ppl_v_2_batches_epoch_{epoch}")
if folder_3 is not None:
ppl_v_2_3 = torch.load(f"{folder_3}ppl_v_2_batches_epoch_{epoch}")
if folder_4 is not None:
ppl_v_2_4 = torch.load(f"{folder_4}ppl_v_2_batches_epoch_{epoch}")
loss = torch.load(f"{folder_1}loss_epoch_{epoch}")
loss_2 = torch.load(f"{folder_2}loss_epoch_{epoch}")
if folder_3 is not None:
loss_3 = torch.load(f"{folder_3}loss_epoch_{epoch}")
if folder_4 is not None:
loss_4 = torch.load(f"{folder_4}loss_epoch_{epoch}")
accuracy = torch.load(f"{folder_1}accuracy_epoch_{epoch}")
accuracy_2 = torch.load(f"{folder_2}accuracy_epoch_{epoch}")
if folder_3 is not None:
accuracy_3 = torch.load(f"{folder_3}accuracy_epoch_{epoch}")
if folder_4 is not None:
accuracy_4 = torch.load(f"{folder_4}accuracy_epoch_{epoch}")
loss_v_2 = torch.load(f"{folder_1}loss_v_2_epoch_{epoch}")
loss_v_2_2 = torch.load(f"{folder_2}loss_v_2_epoch_{epoch}")
if folder_3 is not None:
loss_v_2_3 = torch.load(f"{folder_3}loss_v_2_epoch_{epoch}")
if folder_4 is not None:
loss_v_2_4 = torch.load(f"{folder_4}loss_v_2_epoch_{epoch}")
accuracy_v_2 = torch.load(f"{folder_1}accuracy_v_2_epoch_{epoch}")
accuracy_v_2_2 = torch.load(f"{folder_2}accuracy_v_2_epoch_{epoch}")
if folder_3 is not None:
accuracy_v_2_3 = torch.load(f"{folder_3}accuracy_v_2_epoch_{epoch}")
if folder_4 is not None:
accuracy_v_2_4 = torch.load(f"{folder_4}accuracy_v_2_epoch_{epoch}")
accuracy_v_3 = torch.load(f"{folder_1}accuracy_v_3_epoch_{epoch}")
accuracy_v_3_2 = torch.load(f"{folder_2}accuracy_v_3_epoch_{epoch}")
if folder_3 is not None:
accuracy_v_3_3 = torch.load(f"{folder_3}accuracy_v_3_epoch_{epoch}")
if folder_4 is not None:
accuracy_v_3_4 = torch.load(f"{folder_4}accuracy_v_3_epoch_{epoch}")
if folder_3 is not None:
plot_accuracy_1(f"{save_folder}{approach}_accuracy_epoch_{epoch}.pdf",
accuracy, "accuracy", accuracy_2, "accuracy_t_2", accuracy_3, "accuracy_t_100")
accuracy, "accuracy_et_t_2", accuracy_2, "accuracy_et_t_100",
accuracy_3, "accuracy_st_t_2", accuracy_4, "accuracy_st_t_100")
plot_accuracy_1(f"{save_folder}{approach}_accuracy_v_2_epoch_{epoch}.pdf",
accuracy_v_2, "accuracy", accuracy_v_2_2, "accuracy_t_2", accuracy_v_2_3, "accuracy_t_100")
accuracy_v_2, "accuracy_step_et_t_2", accuracy_v_2_2, "accuracy_step_et_t_100",
accuracy_v_2_3, "accuracy_step_st_t_2", accuracy_v_2_4, "accuracy_step_st_t_100")
plot_accuracy_1(f"{save_folder}{approach}_accuracy_v_3_epoch_{epoch}.pdf",
accuracy_v_3, "accuracy", accuracy_v_3_2, "accuracy_t_2", accuracy_v_3_3, "accuracy_t_100")
accuracy_v_3, "accuracy_anystep_et_t_2", accuracy_v_3_2, "accuracy_anystep_et_t_100",
accuracy_v_3_3, "accuracy_anystep_st_t_2", accuracy_v_3_4, "accuracy_anystep_st_t_100")
plot_ppl_loss_1("loss", f"{save_folder}{approach}_loss_epoch_{epoch}.pdf",
loss, "loss_et_t_2", loss_2, "loss_et_t_100", loss_3, "loss_st_t_2", loss_4, "loss_st_t_100")
plot_ppl_loss_1("loss", f"{save_folder}{approach}_loss_v_2_epoch_{epoch}.pdf",
loss_v_2, "loss_step_et_t_2", loss_v_2_2, "loss_step_et_t_100",
loss_v_2_3, "loss_step_st_t_2", loss_v_2_4, "loss_step_st_t_100")
plot_ppl_loss_1("perplexity", f"{save_folder}{approach}_ppl_epoch_{epoch}.pdf",
ppl, "ppl_et_t_2", ppl_2, "ppl_et_t_100", ppl_3, "ppl_st_t_2", ppl_4, "ppl_st_t_100")
plot_ppl_loss_1("perplexity", f"{save_folder}{approach}_ppl_v_2_epoch_{epoch}.pdf",
ppl_v_2, "ppl_step_et_t_2", ppl_v_2_2, "ppl_step_et_t_100",
ppl_v_2_3, "ppl_step_st_t_2", ppl_v_2_4, "ppl_step_st_t_100")
else:
plot_accuracy_1(f"{save_folder}{approach}_accuracy_epoch_{epoch}.pdf",
accuracy, "accuracy", accuracy_2, "accuracy_t_2")
plot_accuracy_1(f"{save_folder}{approach}_accuracy_v_2_epoch_{epoch}.pdf",
accuracy_v_2, "accuracy", accuracy_v_2_2, "accuracy_t_2")
accuracy_v_2, "accuracy_step", accuracy_v_2_2, "accuracy_step_t_2")
plot_accuracy_1(f"{save_folder}{approach}_accuracy_v_3_epoch_{epoch}.pdf",
accuracy_v_3, "accuracy", accuracy_v_3_2, "accuracy_t_2")
# other variant
if approach_1 is not None:
plot_ppl_loss_1(f"{save_folder}{approach}_{approach_1}_loss_epoch_{epoch}.pdf",
loss_11, "et_loss", loss_12, "et_loss_t_2", loss_13, "et_loss_t_100",
loss, "lrt_loss", loss_2, "lrt_loss_t_2", loss_3, "lrt_loss_t_100")
plot_ppl_loss_1(f"{save_folder}{approach}_{approach_1}_loss_v_2_epoch_{epoch}.pdf",
loss_v_2_11, "et_loss", loss_v_2_12, "et_loss_t_2", loss_v_2_13, "et_loss_t_100",
loss_v_2, "lrt_loss", loss_v_2_2, "lrt_loss_t_2", loss_v_2_3, "lrt_loss_t_100")
plot_ppl_loss_1(f"{save_folder}{approach}_{approach_1}_ppl_epoch_{epoch}.pdf",
ppl_11, "et_ppl", ppl_12, "et_ppl_t_2", ppl_13, "et_ppl_t_100",
ppl, "lrt_ppl", ppl_2, "lrt_ppl_t_2", ppl_3, "lrt_ppl_t_100")
plot_ppl_loss_1(f"{save_folder}{approach}_{approach_1}_ppl_v_2_epoch_{epoch}.pdf",
ppl_v_2_11, "et_ppl", ppl_v_2_12, "et_ppl_t_2", ppl_v_2_13, "et_ppl_t_100",
ppl_v_2, "lrt_ppl", ppl_v_2_2, "lrt_ppl_t_2", ppl_v_2_3, "lrt_ppl_t_100")
elif folder_3 is not None:
plot_ppl_loss_1(f"{save_folder}{approach}_loss_epoch_{epoch}.pdf",
loss, "loss", loss_2, "loss_t_2", loss_3, "loss_t_100")
plot_ppl_loss_1(f"{save_folder}{approach}_loss_v_2_epoch_{epoch}.pdf",
loss_v_2, "loss", loss_v_2_2, "loss_t_2", loss_v_2_3, "loss_t_100")
plot_ppl_loss_1(f"{save_folder}{approach}_ppl_epoch_{epoch}.pdf",
ppl, "ppl", ppl_2, "ppl_t_2", ppl_3, "ppl_t_100")
plot_ppl_loss_1(f"{save_folder}{approach}_ppl_v_2_epoch_{epoch}.pdf",
ppl_v_2, "ppl", ppl_v_2_2, "ppl_t_2", ppl_v_2_3, "ppl_t_100")
else:
plot_ppl_loss_1(f"{save_folder}{approach}_loss_epoch_{epoch}.pdf",
accuracy_v_3, "accuracy_anystep", accuracy_v_3_2, "accuracy_anystep_t_2")
plot_ppl_loss_1("loss", f"{save_folder}{approach}_loss_epoch_{epoch}.pdf",
loss, "loss", loss_2, "loss_t_2")
plot_ppl_loss_1(f"{save_folder}{approach}_loss_v_2_epoch_{epoch}.pdf",
loss_v_2, "loss", loss_v_2_2, "loss_t_2")
plot_ppl_loss_1(f"{save_folder}{approach}_ppl_epoch_{epoch}.pdf",
plot_ppl_loss_1("loss", f"{save_folder}{approach}_loss_v_2_epoch_{epoch}.pdf",
loss_v_2, "loss_step", loss_v_2_2, "loss_step_t_2")
plot_ppl_loss_1("perplexity", f"{save_folder}{approach}_ppl_epoch_{epoch}.pdf",
ppl, "ppl", ppl_2, "ppl_t_2")
plot_ppl_loss_1(f"{save_folder}{approach}_ppl_v_2_epoch_{epoch}.pdf",
ppl_v_2, "ppl", ppl_v_2_2, "ppl_t_2")
plot_ppl_loss_1("perplexity", f"{save_folder}{approach}_ppl_v_2_epoch_{epoch}.pdf",
ppl_v_2, "ppl_step", ppl_v_2_2, "ppl_step_t_2")
......
from sklearn.metrics import confusion_matrix
import seaborn as sn
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
#y_pred = []
#y_true = []
# iterate over test data
#for inputs, labels in testloader:
# output = net(inputs) # Feed Network
# output = (torch.max(torch.exp(output), 1)[1]).data.cpu().numpy()
# y_pred.extend(output) # Save Prediction
# labels = labels.data.cpu().numpy()
# y_true.extend(labels) # Save Truth
# constant for classes
classes = ('empty', 'non empty')
# Build confusion matrix
#cf_matrix = confusion_matrix(y_true, y_pred)
cf_matrix = [[26715, 0], [1038, 0]]
#cf_matrix = [[2461, 404], [201, 949]]
df_cm = pd.DataFrame(cf_matrix / np.sum(cf_matrix, axis=1)[:, None], index=[i for i in classes],
columns=[i for i in classes])
plt.figure(figsize=(12, 7))
sn.set(font_scale=2.0)
sn.heatmap(df_cm, annot=True, annot_kws={'size': 25})
plt.savefig('C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/confusion_matrix_naive_no_t.pdf')
#plt.savefig('C:/Users/cassi/OneDrive/Desktop/Master_Thesis/master_thesis/confusion_matrix_step_prediction_testing_et.pdf')
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment