Closed hyoeun98 closed 2 years ago
def get_scheduler(optimizer):
...
return scheduler
def get_dataset():
...
return dataset
def get_transform(dataset):
...
return transform
def get_loss_optim(model):
...
return criterion, optimizer
def get_logger(save_dir)
...
return logger
코드 기능에 변형은없이 간결하게 만들어 보았습니다
def train(data_dir, model_dir, args):
# -- settings
use_cuda = torch.cuda.is_available()
device = torch.device("cuda" if use_cuda else "cpu")
scaler = torch.cuda.amp.GradScaler()
print("K fold CV :", args.KfoldCV)
seed_everything(args.seed)
save_dir = increment_path(os.path.join(model_dir, args.name)) # ./model/exp
model = get_model(device)
dataset = get_dataset()
transform = get_transform(dataset)
dataset.set_transform(transform) # dataset에 transform 할당
criterion, optimizer = get_loss_optim(model)
scheduler = get_scheduler(optimizer)
logger = get_logger(save_dir)
val_ratio = args.val_ratio
best_val_acc = 0
best_val_loss = np.inf # 무한
best_val_f1 = 0
early_stopping = EarlyStopping(patience = args.early_stop, verbose = True) # early stopping
# train start
...
변경점
dataset.py
inference.py
loss.py
train.py
best model config.json