From f5da528d280af5d106e80eceddefd427cf9aed32 Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Fri, 14 Aug 2020 11:53:44 -0700 Subject: [PATCH] reformat code --- .github/workflows/rebase.yml | 16 ++++++++-------- detect.py | 2 +- test.py | 2 +- utils/datasets.py | 2 +- utils/torch_utils.py | 6 ++++-- 5 files changed, 15 insertions(+), 13 deletions(-) diff --git a/.github/workflows/rebase.yml b/.github/workflows/rebase.yml index ae69fef..e86c577 100644 --- a/.github/workflows/rebase.yml +++ b/.github/workflows/rebase.yml @@ -11,11 +11,11 @@ jobs: if: github.event.issue.pull_request != '' && contains(github.event.comment.body, '/rebase') runs-on: ubuntu-latest steps: - - name: Checkout the latest code - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - name: Automatic Rebase - uses: cirrus-actions/rebase@1.3.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Checkout the latest code + uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Automatic Rebase + uses: cirrus-actions/rebase@1.3.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/detect.py b/detect.py index 61e9a0a..60dd059 100644 --- a/detect.py +++ b/detect.py @@ -13,7 +13,7 @@ from numpy import random from models.experimental import attempt_load from utils.datasets import LoadStreams, LoadImages from utils.general import ( - check_img_size, non_max_suppression, apply_classifier, scale_coords, + check_img_size, non_max_suppression, apply_classifier, scale_coords, xyxy2xywh, plot_one_box, strip_optimizer, set_logging) from utils.torch_utils import select_device, load_classifier, time_synchronized diff --git a/test.py b/test.py index fc0ed62..d72a792 100644 --- a/test.py +++ b/test.py @@ -13,7 +13,7 @@ from tqdm import tqdm from models.experimental import attempt_load from utils.datasets import create_dataloader from utils.general import ( - coco80_to_coco91_class, check_dataset, check_file, check_img_size, compute_loss, non_max_suppression, scale_coords, + coco80_to_coco91_class, check_dataset, check_file, check_img_size, compute_loss, non_max_suppression, scale_coords, xyxy2xywh, clip_coords, plot_images, xywh2xyxy, box_iou, output_to_target, ap_per_class, set_logging) from utils.torch_utils import select_device, time_synchronized diff --git a/utils/datasets.py b/utils/datasets.py index be21a2e..252d14d 100755 --- a/utils/datasets.py +++ b/utils/datasets.py @@ -423,7 +423,7 @@ class LoadImagesAndLabels(Dataset): # for training/testing ne += 1 # print('empty labels for image %s' % self.img_files[i]) # file empty # os.system("rm '%s' '%s'" % (self.img_files[i], self.label_files[i])) # remove - if rank in [-1,0]: + if rank in [-1, 0]: pbar.desc = 'Scanning labels %s (%g found, %g missing, %g empty, %g duplicate, for %g images)' % ( cache_path, nf, nm, ne, nd, n) if nf == 0: diff --git a/utils/torch_utils.py b/utils/torch_utils.py index 3489e5c..0906d58 100644 --- a/utils/torch_utils.py +++ b/utils/torch_utils.py @@ -12,6 +12,7 @@ import torchvision.models as models logger = logging.getLogger(__name__) + def init_seeds(seed=0): torch.manual_seed(seed) @@ -43,7 +44,7 @@ def select_device(device='', batch_size=None): if i == 1: s = ' ' * len(s) logger.info("%sdevice%g _CudaDeviceProperties(name='%s', total_memory=%dMB)" % - (s, i, x[i].name, x[i].total_memory / c)) + (s, i, x[i].name, x[i].total_memory / c)) else: logger.info('Using CPU') @@ -144,7 +145,8 @@ def model_info(model, verbose=False): except: fs = '' - logger.info('Model Summary: %g layers, %g parameters, %g gradients%s' % (len(list(model.parameters())), n_p, n_g, fs)) + logger.info( + 'Model Summary: %g layers, %g parameters, %g gradients%s' % (len(list(model.parameters())), n_p, n_g, fs)) def load_classifier(name='resnet101', n=2):