-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathutils.py
More file actions
68 lines (55 loc) · 2.05 KB
/
utils.py
File metadata and controls
68 lines (55 loc) · 2.05 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import os
import json
import torch
import logging
import shutil
import pickle
def read_json_file(file_path):
with open(file_path) as file:
params = json.load(file)
return params
def write_json_file(output, file_path):
with open(file_path, 'w') as file:
json.dump(output,file)
def set_logger(log_path):
""" Set up logger to store training info
All data is stored to the directory `model_dir/train.log`
@params log_path (string): path to save the train.log file
"""
logger = logging.getLogger()
logger.setLevel(logging.INFO)
if not logger.handlers:
# Log to file
file_handler = logging.FileHandler(log_path)
file_handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s: %(message)s'))
logger.addHandler(file_handler)
# Logging to console
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(logging.Formatter('%(message)s'))
logger.addHandler(stream_handler)
def pad(idxs_to_pad, pad_idx):
max_len = len(sorted(idxs_to_pad, key=len, reverse=True)[0])
for idxs in idxs_to_pad:
idxs.extend([pad_idx] * (max_len - len(idxs)))
return idxs_to_pad
def save_dict_to_pkl(d, path):
out_file = open(path, 'wb')
pickle.dump(d, out_file)
def save_checkpoint(state, checkpoint, is_best):
filepath = os.path.join(checkpoint, 'last.pth.tar')
if not os.path.exists(checkpoint):
print("checkpoint directory doesnt exist. Making directory {}".format(checkpoint))
os.mkdir(checkpoint)
else:
print("Checkpoint directory exists")
torch.save(state, filepath)
if is_best:
shutil.copyfile(filepath, os.path.join(checkpoint, 'best.pth.tar'))
def load_checkpoint(checkpoint, model, optimizer=None):
if not os.path.exists(checkpoint):
raise ("File doesn't exist {}".format(checkpoint))
checkpoint = torch.load(checkpoint)
model.load_state_dict(checkpoint['state_dict'])
if optimizer:
optimizer.load_state_dict(checkpoint['optim_dict'])
return checkpoint