-
Notifications
You must be signed in to change notification settings - Fork 0
/
configClasses.py
53 lines (42 loc) · 1.21 KB
/
configClasses.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import torch
from embeddingreg import embedding
class DefaultConfig(object):
LR = 0.001
L1_REG = 0
IS_INCREMENTAL = True
ITERS = 1
EPOCHS = 2
# BATCH_SIZE = 64
IS_CONVOLUTIONAL = False
NEXT_TASK_LR = None
NEXT_TASK_EPOCHS = None
# EWC_SAMPLE_SIZE = 250
# EWC_IMPORTANCE = 1000
USE_CL = True
CL_TEC = embedding
CL_PAR = {'penalty_importance': 8,
'weights_type': 'distance',
'sample_size': 20,
'distance': 'euclidean',
'supervised': False,
'memorized_task_size': 300,
'normalize': True,
'online': False,
}
USE_TENSORBOARD = True
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
MODEL_NAME = ''
SAVE_PATH = '.'
RUN_NAME = 'default'
LOSS = 'cross_entropy'
OPTIMIZER = 'SGD'
def __str__(self):
fields = [a for a in dir(self) if not a.startswith('__')]
s = 'CONFIG PARAMETERS\n'
for f in fields:
s += f+': '+str(getattr(self, f))+'\n'
return s
class Embedding(DefaultConfig):
CL_TEC = embedding
# CL_PAR = {'margin': 0.5}
# super.CL_TEC_PARAMETERS['margin'] = 0.5