Skip to content

Commit 4dd46c3

Browse files
committed
comment print no debug gpu
1 parent c9c7488 commit 4dd46c3

1 file changed

Lines changed: 21 additions & 21 deletions

File tree

evaluation_function/evaluation.py

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -22,15 +22,15 @@ def evaluation_function(
2222
answer: Any,
2323
params: Params,
2424
) -> Result:
25-
25+
start_total = time.time()
2626

2727
global _model_cache
2828

29-
print("### Answer: ", answer)
30-
print("### Response: ", response)
31-
print("### Params: ", params)
29+
#print("### Answer: ", answer)
30+
#print("### Response: ", response)
31+
#print("### Params: ", params)
3232

33-
start_total = time.time()
33+
3434

3535
draw_images = params.get("draw_images", True)
3636
model_name = params.get("model_name", "model.pt")
@@ -48,7 +48,7 @@ def evaluation_function(
4848

4949
target_class = params.get("target", None)
5050

51-
print("Target class:", target_class)
51+
# print("Target class:", target_class)
5252

5353
feedback_items = []
5454

@@ -349,28 +349,28 @@ def analyze_images(images, draw_images=True):
349349
# print response structure for debugging purposes
350350
try:
351351
append_feedback("DEBUG Response Structure", f"**Response Structur:** {repr(response)}")
352-
print("DEBUG Response Structure:", repr(response))
352+
#print("DEBUG Response Structure:", repr(response))
353353
except Exception as e:
354354
append_feedback("Failed to print response structure", f"{e}")
355-
print("Failed to print response structure", e)
355+
#print("Failed to print response structure", e)
356356

357357
if params.get('debug', False):
358358

359359
# also check if YOLO can use GPU (torch.cuda availability)
360-
try:
361-
import torch
362-
gpu_available = torch.cuda.is_available()
363-
except ImportError:
364-
gpu_available = "Error checking GPU availability"
360+
#try:
361+
# import torch
362+
# gpu_available = torch.cuda.is_available()
363+
#except ImportError:
364+
# gpu_available = "Error checking GPU availability"
365365
# sometimes the model itself has a .device attribute
366-
try:
367-
model_device = getattr(model, 'device', None)
368-
if hasattr(model_device, 'type'):
369-
model_device = model_device.type
370-
except Exception:
371-
model_device = None
372-
print(f"DEBUG GPU Available: {gpu_available}, {model_device}")
373-
append_feedback("DEBUG GPU Available", f"- **GPU Available:** `{gpu_available}`\n- **Model Device:** `{model_device}`")
366+
#try:
367+
# model_device = getattr(model, 'device', None)
368+
# if hasattr(model_device, 'type'):
369+
# model_device = model_device.type
370+
#except Exception:
371+
# model_device = None
372+
#print(f"DEBUG GPU Available: {gpu_available}, {model_device}")
373+
#append_feedback("DEBUG GPU Available", f"- **GPU Available:** `{gpu_available}`\n- **Model Device:** `{model_device}`")
374374

375375
# include all annotated/uploaded images in debug output
376376
for idx, (img, _, _) in enumerate(annotated_images):

0 commit comments

Comments
 (0)