|
|
|
@ -57,7 +57,7 @@ def train(hyp):
|
|
|
|
|
yaml.dump(hyp, f, sort_keys=False)
|
|
|
|
|
with open(Path(log_dir) / 'opt.yaml', 'w') as f:
|
|
|
|
|
yaml.dump(vars(opt), f, sort_keys=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
epochs = opt.epochs # 300
|
|
|
|
|
batch_size = opt.batch_size # 64
|
|
|
|
|
weights = opt.weights # initial training weights
|
|
|
|
@ -68,7 +68,8 @@ def train(hyp):
|
|
|
|
|
data_dict = yaml.load(f, Loader=yaml.FullLoader) # model dict
|
|
|
|
|
train_path = data_dict['train']
|
|
|
|
|
test_path = data_dict['val']
|
|
|
|
|
nc = 1 if opt.single_cls else int(data_dict['nc']) # number of classes
|
|
|
|
|
nc, names = (1, ['item']) if opt.single_cls else (int(data_dict['nc']), data_dict['names']) # number classes, names
|
|
|
|
|
assert len(names) == nc, '%g names found for nc=%g dataset in %s' % (len(names), nc, opt.data) # check
|
|
|
|
|
|
|
|
|
|
# Remove previous results
|
|
|
|
|
for f in glob.glob('*_batch*.jpg') + glob.glob(results_file):
|
|
|
|
|