Fix a GPU memory leak in detect. No need to calculate gradients in inference. (#900)

Co-authored-by: JKO095 <juho-pekka.koponen@wartsila.com>
This commit is contained in:
jpkoponen 2022-10-08 00:50:04 +03:00 committed by GitHub
parent 8b616af63a
commit 072f76c72c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -84,7 +84,8 @@ def detect(save_img=False):
# Inference
t1 = time_synchronized()
pred = model(img, augment=opt.augment)[0]
with torch.no_grad(): # Calculating gradients would cause a GPU memory leak
pred = model(img, augment=opt.augment)[0]
t2 = time_synchronized()
# Apply NMS