-
Notifications
You must be signed in to change notification settings - Fork 0
/
early_stopping.py
84 lines (75 loc) · 2.54 KB
/
early_stopping.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
"""
__author__: Abhishek Thakur
"""
import torch
import numpy as np
# try:
# import torch_xla.core.xla_model as xm
#
# _xla_available = True
# except ImportError:
# _xla_available = False
class EarlyStopping:
def __init__(self, patience=7, mode="max", delta=0.0001, tpu=False):
self.patience = patience
self.counter = 0
self.mode = mode
self.best_score = None
self.early_stop = False
self.tpu = tpu
self.delta = delta
if self.mode == "min":
self.val_score = np.Inf
else:
self.val_score = -np.Inf
# if self.tpu and not _xla_available:
# raise Exception(
# "You want to use TPUs but you dont have pytorch_xla installed"
# )
def __call__(self, epoch_score, model, model_path):
if self.mode == "min":
score = -1.0 * epoch_score
else:
score = np.copy(epoch_score)
if self.best_score is None:
self.best_score = score
self.save_checkpoint(epoch_score, model, model_path)
elif score < self.best_score + self.delta:
self.counter += 1
# if self.tpu:
# xm.master_print(
# "EarlyStopping counter: {} out of {}".format(
# self.counter, self.patience
# )
# )
# else:
print(
"EarlyStopping counter: {} out of {}".format(
self.counter, self.patience
)
)
if self.counter >= self.patience:
self.early_stop = True
else:
self.best_score = score
self.save_checkpoint(epoch_score, model, model_path)
self.counter = 0
def save_checkpoint(self, epoch_score, model, model_path):
if epoch_score not in [-np.inf, np.inf, -np.nan, np.nan]:
# if self.tpu:
# xm.master_print(
# "Validation score improved ({} --> {}). Saving model!".format(
# self.val_score, epoch_score
# )
# )
# else:
print(
"Validation score improved ({} --> {}). Saving model!".format(
self.val_score, epoch_score
)
)
# if self.tpu:
# xm.save(model.state_dict(), model_path)
# else:
torch.save(model.state_dict(), model_path)
self.val_score = epoch_score