-
Notifications
You must be signed in to change notification settings - Fork 6
Expand file tree
/
Copy pathclient.py
More file actions
1858 lines (1571 loc) · 83.9 KB
/
client.py
File metadata and controls
1858 lines (1571 loc) · 83.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# pylint: disable=too-many-lines
import logging
import os
import time
import warnings
from functools import partial
from io import BufferedReader, BytesIO
from typing import Any, Callable, List, Optional, Tuple, Union
from groundlight_openapi_client import Configuration
from groundlight_openapi_client.api.detector_groups_api import DetectorGroupsApi
from groundlight_openapi_client.api.detectors_api import DetectorsApi
from groundlight_openapi_client.api.image_queries_api import ImageQueriesApi
from groundlight_openapi_client.api.labels_api import LabelsApi
from groundlight_openapi_client.api.month_to_date_account_info_api import MonthToDateAccountInfoApi
from groundlight_openapi_client.api.user_api import UserApi
from groundlight_openapi_client.exceptions import NotFoundException, UnauthorizedException
from groundlight_openapi_client.model.b_box_geometry_request import BBoxGeometryRequest
from groundlight_openapi_client.model.bounding_box_mode_configuration import BoundingBoxModeConfiguration
from groundlight_openapi_client.model.count_mode_configuration import CountModeConfiguration
from groundlight_openapi_client.model.detector_creation_input_request import DetectorCreationInputRequest
from groundlight_openapi_client.model.detector_group_request import DetectorGroupRequest
from groundlight_openapi_client.model.label_value_request import LabelValueRequest
from groundlight_openapi_client.model.multi_class_mode_configuration import MultiClassModeConfiguration
from groundlight_openapi_client.model.patched_detector_request import PatchedDetectorRequest
from groundlight_openapi_client.model.roi_request import ROIRequest
from groundlight_openapi_client.model.status_enum import StatusEnum
from model import (
ROI,
AccountMonthToDateInfo,
BBoxGeometry,
BinaryClassificationResult,
Detector,
DetectorGroup,
ImageQuery,
ModeEnum,
PaginatedDetectorList,
PaginatedImageQueryList,
)
from urllib3.exceptions import InsecureRequestWarning
from urllib3.util.retry import Retry
from groundlight.binary_labels import Label, convert_internal_label_to_display
from groundlight.config import API_TOKEN_MISSING_HELP_MESSAGE, API_TOKEN_VARIABLE_NAME, DISABLE_TLS_VARIABLE_NAME
from groundlight.encodings import url_encode_dict
from groundlight.images import ByteStreamWrapper, parse_supported_image_types
from groundlight.internalapi import (
GroundlightApiClient,
NotFoundError,
iq_is_answered,
iq_is_confident,
sanitize_endpoint_url,
)
from groundlight.optional_imports import Image, np
logger = logging.getLogger("groundlight.sdk")
# Set urllib3 request timeout to something modern and fast.
# The system defaults can be stupidly long
# It used to take >8 min to timeout to a bad IP address
DEFAULT_REQUEST_TIMEOUT = 10 # seconds
class GroundlightClientError(Exception):
pass
class ApiTokenError(GroundlightClientError):
pass
class EdgeNotAvailableError(GroundlightClientError):
"""Raised when an edge-only method is called against a non-edge endpoint."""
class Groundlight: # pylint: disable=too-many-instance-attributes,too-many-public-methods
"""
Client for accessing the Groundlight cloud service. Provides methods to create visual detectors,
submit images for analysis, and retrieve predictions.
The API token (auth) is specified through the **GROUNDLIGHT_API_TOKEN** environment variable by
default.
If you are using a Groundlight Edge device, you can specify the endpoint through the
**GROUNDLIGHT_ENDPOINT** environment variable.
**Example usage**::
gl = Groundlight()
detector = gl.get_or_create_detector(
name="door_detector",
query="Is the door open?",
confidence_threshold=0.9
)
# Submit image and get prediction
image_query = gl.submit_image_query(
detector=detector,
image="path/to/image.jpg",
wait=30.0
)
print(f"Answer: {image_query.result.label}")
# Async submission with human review
image_query = gl.ask_async(
detector=detector,
image="path/to/image.jpg",
human_review="ALWAYS"
)
# Later, get the result
image_query = gl.wait_for_confident_result(
image_query=image_query,
confidence_threshold=0.95,
timeout_sec=60.0
)
:param endpoint: Optional custom API endpoint URL. If not specified, uses the default Groundlight endpoint.
:param api_token: Authentication token for API access. If not provided, will attempt to read from
the "GROUNDLIGHT_API_TOKEN" environment variable.
:param disable_tls_verification: If True, disables SSL/TLS certificate verification for API calls.
When not specified, checks the "DISABLE_TLS_VERIFY" environment variable (1=disable, 0=enable).
Certificate verification is enabled by default.
Warning: Only disable verification when connecting to a Groundlight Edge Endpoint using
self-signed certificates. For security, always keep verification enabled when using the
Groundlight cloud service.
:return: Groundlight client instance
"""
DEFAULT_WAIT: float = 30.0
POLLING_INITIAL_DELAY = 0.25
POLLING_EXPONENTIAL_BACKOFF = 1.3 # This still has the nice backoff property that the max number of requests
# is O(log(time)), but with 1.3 the guarantee is that the call will return no more than 30% late
def __init__(
self,
endpoint: Optional[str] = None,
api_token: Optional[str] = None,
disable_tls_verification: Optional[bool] = None,
http_transport_retries: Optional[Union[int, Retry]] = None,
):
"""
Initialize a new Groundlight client instance.
:param endpoint: Optional custom API endpoint URL. If not specified, uses the default Groundlight endpoint.
:param api_token: Authentication token for API access.
If not provided, will attempt to read from the "GROUNDLIGHT_API_TOKEN" environment variable.
:param disable_tls_verification: If True, disables SSL/TLS certificate verification for API calls.
When not specified, checks the "DISABLE_TLS_VERIFY" environment variable (1=disable, 0=enable).
Certificate verification is enabled by default.
Warning: Only disable verification when connecting to a Groundlight Edge Endpoint using self-signed
certificates. For security, always keep verification enabled when using the Groundlight cloud service.
:param http_transport_retries: Overrides urllib3 `PoolManager` retry policy for HTTP/HTTPS (forwarded to
`Configuration.retries`). Not the same as SDK 5xx retries handled by `RequestsRetryDecorator`.
:return: Groundlight client
"""
# Specify the endpoint
self.endpoint = sanitize_endpoint_url(endpoint)
self.configuration = Configuration(host=self.endpoint)
if http_transport_retries is not None:
# Once we upgrade openapitools to ^7.7.0, retries can be passed into the constructor of Configuration above.
self.configuration.retries = http_transport_retries
if not api_token:
try:
# Retrieve the API token from environment variable
api_token = os.environ[API_TOKEN_VARIABLE_NAME]
except KeyError as e:
raise ApiTokenError(API_TOKEN_MISSING_HELP_MESSAGE) from e
if not api_token:
raise ApiTokenError("No API token found. GROUNDLIGHT_API_TOKEN environment variable is set but blank")
self.api_token_prefix = api_token[:12]
should_disable_tls_verification = disable_tls_verification
if should_disable_tls_verification is None:
should_disable_tls_verification = bool(int(os.environ.get(DISABLE_TLS_VARIABLE_NAME, 0)))
if should_disable_tls_verification:
logger.warning(
"Disabling SSL/TLS certificate verification. This should only be used when connecting to an endpoint"
" with a self-signed certificate."
)
warnings.simplefilter("ignore", InsecureRequestWarning)
self.configuration.verify_ssl = False
self.configuration.assert_hostname = False
self.configuration.api_key["ApiToken"] = api_token
self.api_client = GroundlightApiClient(self.configuration)
self.detectors_api = DetectorsApi(self.api_client)
self.detector_group_api = DetectorGroupsApi(self.api_client)
self.images_api = ImageQueriesApi(self.api_client)
self.image_queries_api = ImageQueriesApi(self.api_client)
self.user_api = UserApi(self.api_client)
self.labels_api = LabelsApi(self.api_client)
self.month_to_date_api = MonthToDateAccountInfoApi(self.api_client)
self.logged_in_user = "(not-logged-in)"
self._verify_connectivity()
def __repr__(self) -> str:
# Don't call the API here because that can get us stuck in a loop rendering exception strings
return f"Logged in as {self.logged_in_user} to Groundlight at {self.endpoint}"
def _verify_connectivity(self) -> None:
"""
Verify that the client can connect to the Groundlight service, and raise a helpful
exception if it cannot.
"""
try:
# a simple query to confirm that the endpoint & API token are working
self.logged_in_user = self.whoami()
if self._user_is_privileged():
logger.warning(
"WARNING: The current user has elevated permissions. Please verify such permissions are necessary"
" for your current operation"
)
except UnauthorizedException as e:
msg = (
f"Invalid API token '{self.api_token_prefix}...' connecting to endpoint "
f"'{self.endpoint}'. Endpoint is responding, but API token is probably invalid."
)
raise ApiTokenError(msg) from e
except Exception as e:
msg = (
f"Error connecting to Groundlight using API token '{self.api_token_prefix}...'"
f" at endpoint '{self.endpoint}'. Endpoint might be invalid or unreachable? "
"Check https://status.groundlight.ai/ for service status."
f"Original Error was: {str(e)}"
)
raise GroundlightClientError(msg) from e
@staticmethod
def _fixup_image_query(iq: ImageQuery) -> ImageQuery:
"""
Process the wire-format image query to make it more usable.
"""
# Note: This might go away once we clean up the mapping logic server-side.
# we have to check that result is not None because the server will return a result of None if want_async=True
if isinstance(iq.result, BinaryClassificationResult):
iq.result.label = convert_internal_label_to_display(iq, iq.result.label)
return iq
def whoami(self) -> str:
"""
Return the username (email address) associated with the current API token.
This method verifies that the API token is valid and returns the email address of the authenticated user.
It can be used to confirm that authentication is working correctly.
**Example usage**::
gl = Groundlight()
username = gl.whoami()
print(f"Authenticated as {username}")
:return: The email address of the authenticated user
:raises ApiTokenError: If the API token is invalid
:raises GroundlightClientError: If there are connectivity issues with the Groundlight service
"""
obj = self.user_api.who_am_i(_request_timeout=DEFAULT_REQUEST_TIMEOUT)
return obj["email"]
def _user_is_privileged(self) -> bool:
"""
Return a boolean indicating whether the user is privileged.
Privleged users have elevated permissions, so care should be taken when using a privileged account.
"""
obj = self.user_api.who_am_i()
return obj["is_superuser"]
def get_detector(
self,
id: Union[str, Detector], # pylint: disable=redefined-builtin
request_timeout: Optional[Union[float, Tuple[float, float]]] = None,
) -> Detector:
"""
Get a Detector by id.
**Example usage**::
gl = Groundlight()
detector = gl.get_detector(id="det_12345")
print(detector)
:param id: the detector id
:param request_timeout: The request timeout for the image query submission API request. Most users will not need
to modify this. If not set, the default value will be used.
:return: Detector
"""
if isinstance(id, Detector):
# Short-circuit
return id
try:
request_timeout = request_timeout if request_timeout is not None else DEFAULT_REQUEST_TIMEOUT
obj = self.detectors_api.get_detector(id=id, _request_timeout=request_timeout)
except NotFoundException as e:
raise NotFoundError(f"Detector with id '{id}' not found") from e
return Detector.parse_obj(obj.to_dict())
def get_detector_by_name(self, name: str) -> Detector:
"""
Get a Detector by name.
**Example usage**::
gl = Groundlight()
detector = gl.get_detector_by_name(name="door_detector")
print(detector)
:param name: the detector name
:return: Detector
"""
return self.api_client._get_detector_by_name(name) # pylint: disable=protected-access
def list_detectors(self, page: int = 1, page_size: int = 10) -> PaginatedDetectorList:
"""
Retrieve a paginated list of detectors associated with your account.
**Example usage**::
gl = Groundlight()
# Get first page of 5 detectors
detectors = gl.list_detectors(page=1, page_size=5)
for detector in detectors.items:
print(detector)
:param page: The page number to retrieve (1-based indexing). Use this parameter to navigate
through multiple pages of detectors.
:param page_size: The number of detectors to return per page.
:return: PaginatedDetectorList containing the requested page of detectors and pagination metadata
"""
obj = self.detectors_api.list_detectors(
page=page, page_size=page_size, _request_timeout=DEFAULT_REQUEST_TIMEOUT
)
return PaginatedDetectorList.parse_obj(obj.to_dict())
def _prep_create_detector( # noqa: PLR0913 # pylint: disable=too-many-arguments, too-many-locals
self,
name: str,
query: str,
*,
group_name: Optional[str] = None,
confidence_threshold: Optional[float] = None,
patience_time: Optional[float] = None,
pipeline_config: Optional[str] = None,
edge_pipeline_config: Optional[str] = None,
metadata: Union[dict, str, None] = None,
priming_group_id: Optional[str] = None,
) -> Detector:
"""
A helper function to prepare the input for creating a detector. Individual create_detector
methods may add to the input before calling the API.
"""
detector_creation_input = DetectorCreationInputRequest(
name=name,
query=query,
pipeline_config=pipeline_config,
edge_pipeline_config=edge_pipeline_config,
)
if group_name is not None:
detector_creation_input.group_name = group_name
if metadata is not None:
detector_creation_input.metadata = str(url_encode_dict(metadata, name="metadata", size_limit_bytes=1024))
if confidence_threshold:
detector_creation_input.confidence_threshold = confidence_threshold
if isinstance(patience_time, int):
patience_time = float(patience_time)
if patience_time:
detector_creation_input.patience_time = patience_time
if priming_group_id is not None:
detector_creation_input.priming_group_id = priming_group_id
return detector_creation_input
def create_detector( # noqa: PLR0913
self,
name: str,
query: str,
*,
mode: ModeEnum = ModeEnum.BINARY,
group_name: Optional[str] = None,
confidence_threshold: Optional[float] = None,
patience_time: Optional[float] = None,
pipeline_config: Optional[str] = None,
edge_pipeline_config: Optional[str] = None,
metadata: Union[dict, str, None] = None,
class_names: Optional[Union[List[str], str]] = None,
priming_group_id: Optional[str] = None,
) -> Detector:
"""
Create a new Detector with a given name and query.
By default will create a binary detector but alternate modes can be created by passing in a mode argument.
Text recognition detectors are in Beta, and can be created through the
ExperimentalApi via the :meth:`ExperimentalApi.create_text_recognition_detector` method.
**Example usage**::
gl = Groundlight()
# Create a basic binary detector
detector = gl.create_detector(
name="dog-on-couch-detector",
query="Is there a dog on the couch?",
confidence_threshold=0.9,
patience_time=30.0
)
# Create a detector with metadata
detector = gl.create_detector(
name="door-monitor",
query="Is the door open?",
metadata={"location": "front-entrance", "building": "HQ"},
confidence_threshold=0.95
)
# Create a detector in a specific group
detector = gl.create_detector(
name="vehicle-monitor",
query="Are there vehicles are in the parking lot?",
group_name="parking-monitoring",
patience_time=60.0
)
:param name: A short, descriptive name for the detector. This name should be unique within your account
and help identify the detector's purpose.
:param query: The question that the detector will answer about images. For binary classification,
this should be a yes/no question (e.g. "Is there a person in the image?").
:param group_name: Optional name of a group to organize related detectors together. If not specified,
the detector will be placed in the default group.
:param mode: The mode of the detector. Defaults to ModeEnum.BINARY.
:param confidence_threshold: A value between 0.5 and 1 that sets the minimum confidence level required
for the ML model's predictions. If confidence is below this threshold,
the query may be sent for human review.
:param patience_time: The maximum time in seconds that Groundlight will attempt to generate a
confident prediction before falling back to human review. Defaults to 30 seconds.
:param pipeline_config: Advanced usage only. Configuration string needed to instantiate a specific
prediction pipeline for this detector.
:param edge_pipeline_config: Advanced usage only. Configuration for the edge inference pipeline.
If not specified, the mode's default edge pipeline is used.
:param metadata: A dictionary or JSON string containing custom key/value pairs to associate with
the detector (limited to 1KB). This metadata can be used to store additional
information like location, purpose, or related system IDs. You can retrieve this
metadata later by calling `get_detector()`.
:param class_names: The name or names of the class to use for the detector. Only used for multi-class
and counting detectors.
:param priming_group_id: Optional ID of an existing PrimingGroup to associate with this detector.
PrimingGroup IDs are provided by Groundlight representatives. If you would like
to use a priming_group_id, please reach out to your Groundlight representative.
:return: The created Detector object
"""
if mode == ModeEnum.BINARY:
if class_names is not None:
raise ValueError("class_names is not supported for binary detectors")
return self.create_binary_detector(
name=name,
query=query,
group_name=group_name,
confidence_threshold=confidence_threshold,
patience_time=patience_time,
pipeline_config=pipeline_config,
edge_pipeline_config=edge_pipeline_config,
metadata=metadata,
priming_group_id=priming_group_id,
)
if mode == ModeEnum.COUNT:
if class_names is None:
raise ValueError("class_names is required for counting detectors")
if isinstance(class_names, list):
raise ValueError("class_names must be a single string for counting detectors")
return self.create_counting_detector(
name=name,
query=query,
class_name=class_names,
group_name=group_name,
confidence_threshold=confidence_threshold,
patience_time=patience_time,
pipeline_config=pipeline_config,
edge_pipeline_config=edge_pipeline_config,
metadata=metadata,
priming_group_id=priming_group_id,
)
if mode == ModeEnum.MULTI_CLASS:
if class_names is None:
raise ValueError("class_names is required for multi-class detectors")
if isinstance(class_names, str):
raise ValueError("class_names must be a list for multi-class detectors")
return self.create_multiclass_detector(
name=name,
query=query,
class_names=class_names,
group_name=group_name,
confidence_threshold=confidence_threshold,
patience_time=patience_time,
pipeline_config=pipeline_config,
edge_pipeline_config=edge_pipeline_config,
metadata=metadata,
priming_group_id=priming_group_id,
)
raise ValueError(
f"Unsupported mode: {mode}, check if your desired mode is only supported in the ExperimentalApi"
)
def get_or_create_detector( # noqa: PLR0913
self,
name: str,
query: str,
*,
group_name: Optional[str] = None,
confidence_threshold: Optional[float] = None,
pipeline_config: Optional[str] = None,
edge_pipeline_config: Optional[str] = None,
metadata: Union[dict, str, None] = None,
) -> Detector:
"""
Tries to look up the Detector by name. If a Detector with that name, query, and
confidence exists, return it. Otherwise, create a Detector with the specified query and
config.
**Example usage**::
gl = Groundlight()
detector = gl.get_or_create_detector(
name="service-counter-usage",
query="Is there a customer at the service counter?",
group_name="retail-analytics",
confidence_threshold=0.95,
metadata={"location": "store-123"}
)
:param name: A short, descriptive name for the detector. This name should be unique within your account
and help identify the detector's purpose.
:param query: The question that the detector will answer about images. For binary classification,
this should be a yes/no question (e.g. "Is there a person in the image?").
:param group_name: Optional name of a group to organize related detectors together. If not specified,
the detector will be placed in the default group.
:param confidence_threshold: A value between 0.5 and 1 that sets the minimum confidence level required
for the ML model's predictions. If confidence is below this threshold,
the query may be sent for human review.
:param pipeline_config: Advanced usage only. Configuration string needed to instantiate a specific
prediction pipeline for this detector.
:param edge_pipeline_config: Advanced usage only. Configuration for the edge inference pipeline.
If not specified, the mode's default edge pipeline is used.
:param metadata: A dictionary or JSON string containing custom key/value pairs to associate with
the detector (limited to 1KB). This metadata can be used to store additional
information like location, purpose, or related system IDs. You can retrieve this
metadata later by calling `get_detector()`.
:return: Detector with the specified configuration
:raises ValueError: If an existing detector is found but has different configuration
:raises ApiTokenError: If API token is invalid
:raises GroundlightClientError: For other API errors
.. note::
If a detector with the given name exists, this method verifies that its
configuration (query, group_name, etc.) matches what was requested. If
there are any mismatches, it raises ValueError rather than modifying the
existing detector.
"""
try:
existing_detector = self.get_detector_by_name(name)
except NotFoundError:
logger.debug(f"We could not find a detector with name='{name}'. So we will create a new detector ...")
return self.create_detector(
name=name,
query=query,
group_name=group_name,
confidence_threshold=confidence_threshold,
pipeline_config=pipeline_config,
edge_pipeline_config=edge_pipeline_config,
metadata=metadata,
)
# TODO: We may soon allow users to update the retrieved detector's fields.
if existing_detector.query != query:
raise ValueError(
f"Found existing detector with name={name} (id={existing_detector.id}) but the queries don't match."
f" The existing query is '{existing_detector.query}'.",
)
if group_name is not None and existing_detector.group_name != group_name:
raise ValueError(
f"Found existing detector with name={name} (id={existing_detector.id}) but the group names don't"
f" match. The existing group name is '{existing_detector.group_name}'.",
)
if confidence_threshold is not None and existing_detector.confidence_threshold != confidence_threshold:
raise ValueError(
f"Found existing detector with name={name} (id={existing_detector.id}) but the confidence"
" thresholds don't match. The existing confidence threshold is"
f" {existing_detector.confidence_threshold}.",
)
return existing_detector
def get_image_query(self, id: str) -> ImageQuery: # pylint: disable=redefined-builtin
"""
Get an ImageQuery by its ID. This is useful for retrieving the status and results of a
previously submitted query.
**Example Usage:**
gl = Groundlight()
# Get an existing image query by ID
image_query = gl.get_image_query("iq_abc123")
# Get the result if available
if image_query.result is not None:
print(f"Answer: {image_query.result.label}")
print(f"Source: {image_query.result.source}")
print(f"Confidence: {image_query.result.confidence}") # e.g. 0.98
:param id: The ImageQuery ID to look up. This ID is returned when submitting a new ImageQuery.
:return: ImageQuery object containing the query details and results
"""
obj = self.image_queries_api.get_image_query(id=id, _request_timeout=DEFAULT_REQUEST_TIMEOUT)
if obj.result_type == "counting" and getattr(obj.result, "label", None):
obj.result.pop("label")
obj.result["count"] = None
iq = ImageQuery.parse_obj(obj.to_dict())
return self._fixup_image_query(iq)
def list_image_queries(
self, page: int = 1, page_size: int = 10, detector_id: Union[str, None] = None
) -> PaginatedImageQueryList:
"""
List all image queries associated with your account, with pagination support.
**Example Usage**::
gl = Groundlight()
# Get first page of 10 image queries
queries = gl.list_image_queries(page=1, page_size=10)
# Access results
for query in queries.results:
print(f"Query ID: {query.id}")
print(f"Result: {query.result.label if query.result else 'No result yet'}")
:param page: The page number to retrieve (1-based indexing). Use this parameter to navigate
through multiple pages of image queries.
:param page_size: Number of image queries to return per page. Default is 10.
:return: PaginatedImageQueryList containing the requested page of image queries and pagination metadata
like total count and links to next/previous pages.
"""
params: dict[str, Any] = {"page": page, "page_size": page_size, "_request_timeout": DEFAULT_REQUEST_TIMEOUT}
if detector_id:
params["detector_id"] = detector_id
obj = self.image_queries_api.list_image_queries(**params)
image_queries = PaginatedImageQueryList.parse_obj(obj.to_dict())
if image_queries.results is not None:
image_queries.results = [self._fixup_image_query(iq) for iq in image_queries.results]
return image_queries
def get_month_to_date_usage(self) -> AccountMonthToDateInfo:
"""
Get the account's month-to-date usage information including image queries (IQs),
escalations, and active detectors with their respective limits.
**Example Usage:**
gl = Groundlight()
# Get month-to-date usage information
usage = gl.get_month_to_date_usage()
# Access usage metrics
print(f"Image queries used: {usage.iqs} / {usage.iqs_limit}")
print(f"Escalations used: {usage.escalations} / {usage.escalations_limit}")
print(f"Active detectors: {usage.active_detectors} / {usage.active_detectors_limit}")
:return: AccountMonthToDateInfo object containing usage metrics and limits
"""
obj = self.month_to_date_api.month_to_date_account_info(_request_timeout=DEFAULT_REQUEST_TIMEOUT)
return AccountMonthToDateInfo.model_validate(obj.to_dict())
def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments, too-many-locals
self,
detector: Union[Detector, str],
image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray],
wait: Optional[float] = None,
patience_time: Optional[float] = None,
confidence_threshold: Optional[float] = None,
human_review: Optional[str] = None,
want_async: bool = False,
inspection_id: Optional[str] = None,
metadata: Union[dict, str, None] = None,
image_query_id: Optional[str] = None,
request_timeout: Optional[Union[float, Tuple[float, float]]] = None,
) -> ImageQuery:
"""
Evaluates an image with Groundlight. This is the core method for getting predictions about images.
**Example Usage**::
from groundlight import Groundlight
from PIL import Image
gl = Groundlight()
det = gl.get_or_create_detector(
name="parking-space",
query="Is there a car in the leftmost parking space?"
)
# Basic synchronous usage
image = "path/to/image.jpg"
image_query = gl.submit_image_query(detector=det, image=image)
print(f"The answer is {image_query.result.label}")
# Asynchronous usage with custom confidence
image = Image.open("path/to/image.jpg")
image_query = gl.submit_image_query(
detector=det,
image=image,
wait=0, # Don't wait for result
confidence_threshold=0.95,
want_async=True
)
print(f"Submitted image_query {image_query.id}")
# With metadata and mandatory human review
image_query = gl.submit_image_query(
detector=det,
image=image,
metadata={"location": "entrance", "camera": "cam1"},
human_review="ALWAYS"
)
.. note::
This method supports both synchronous and asynchronous workflows, configurable confidence thresholds,
and optional human review.
.. seealso::
:meth:`ask_confident` for a simpler synchronous workflow
:meth:`ask_async` for a simpler asynchronous workflow
:meth:`ask_ml` for faster ML predictions without waiting for a confident answer
:param detector: the Detector object, or string id of a detector like `det_12345`
:param image: The image, in several possible formats:
- filename (string) of a jpeg file
- byte array or BytesIO or BufferedReader with jpeg bytes
- numpy array with values 0-255 and dimensions (H,W,3) in BGR order
(Note OpenCV uses BGR not RGB. `img[:, :, ::-1]` will reverse the channels)
- PIL Image: Any binary format must be JPEG-encoded already.
Any pixel format will get converted to JPEG at high quality before sending to service.
:param wait: How long to poll (in seconds) for a confident answer. This is a client-side timeout.
Default is 30.0. Set to 0 for async operation.
:param patience_time: How long to wait (in seconds) for a confident answer for this image query.
The longer the patience_time, the more likely Groundlight will arrive at a confident answer.
Within patience_time, Groundlight will update ML predictions based on stronger findings,
and, additionally, Groundlight will prioritize human review of the image query if necessary.
This is a soft server-side timeout. If not set, use the detector's patience_time.
:param confidence_threshold: The confidence threshold to wait for.
If not set, use the detector's confidence threshold.
:param human_review: If `None` or `DEFAULT`, send the image query for human review
only if the ML prediction is not confident.
If set to `ALWAYS`, always send the image query for human review.
If set to `NEVER`, never send the image query for human review.
:param want_async: If True, return immediately without waiting for result.
Must set `wait=0` when using this option.
:param inspection_id: Most users will omit this. For accounts with Inspection Reports enabled,
this is the ID of the inspection to associate with the image query.
:param metadata: A dictionary or JSON string of custom key/value metadata to associate with
the image query (limited to 1KB). You can retrieve this metadata later by calling
`get_image_query()`.
:param image_query_id: The ID for the image query. This is to enable specific functionality
and is not intended for general external use. If not set, a random ID
will be generated.
:param request_timeout: The request timeout for the image query submission API request. Most users will not need
to modify this. If not set, the default value will be used.
:return: ImageQuery with query details and result (if wait > 0)
:raises ValueError: If wait > 0 when want_async=True
:raises ApiTokenError: If API token is invalid
:raises GroundlightClientError: For other API errors
"""
if wait is None:
wait = self.DEFAULT_WAIT
detector_id = detector.id if isinstance(detector, Detector) else detector
image_bytesio: ByteStreamWrapper = parse_supported_image_types(image)
params = {
"detector_id": detector_id,
"body": image_bytesio,
"_request_timeout": request_timeout if request_timeout is not None else DEFAULT_REQUEST_TIMEOUT,
}
if patience_time is not None:
params["patience_time"] = patience_time
if confidence_threshold is not None:
params["confidence_threshold"] = confidence_threshold
if human_review is not None:
params["human_review"] = human_review
if inspection_id: # consider an empty string to mean there is no inspection
params["inspection_id"] = inspection_id
if want_async is True:
# If want_async is True, we don't want to wait for a result. As a result wait must be set to 0 to use
# want_async.
if wait != 0:
raise ValueError(
"wait must be set to 0 to use want_async. Using wait and want_async at the same time is incompatible." # noqa: E501
)
params["want_async"] = str(bool(want_async))
if metadata is not None:
# Currently, our backend server puts the image in the body data of the API request,
# which means we need to put the metadata in the query string. To do that safely, we
# url- and base64-encode the metadata.
params["metadata"] = url_encode_dict(metadata, name="metadata", size_limit_bytes=1024)
if image_query_id is not None:
params["image_query_id"] = image_query_id
raw_image_query = self.image_queries_api.submit_image_query(**params)
image_query = ImageQuery.parse_obj(raw_image_query.to_dict())
if wait > 0:
if confidence_threshold is None:
threshold = self.get_detector(detector).confidence_threshold
else:
threshold = confidence_threshold
image_query = self.wait_for_confident_result(image_query, confidence_threshold=threshold, timeout_sec=wait)
return self._fixup_image_query(image_query)
def ask_confident( # noqa: PLR0913 # pylint: disable=too-many-arguments
self,
detector: Union[Detector, str],
image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray],
confidence_threshold: Optional[float] = None,
wait: Optional[float] = None,
metadata: Union[dict, str, None] = None,
inspection_id: Optional[str] = None,
) -> ImageQuery:
"""
Evaluates an image with Groundlight, waiting until an answer above the confidence threshold
is reached or the wait period has passed.
**Example usage**::
gl = Groundlight()
image_query = gl.ask_confident(
detector="det_12345",
image="path/to/image.jpg",
confidence_threshold=0.9,
wait=30.0
)
if image_query.result.confidence >= 0.9:
print(f"Confident answer: {image_query.result.label}")
else:
print("Could not get confident answer within timeout")
:param detector: the Detector object, or string id of a detector like `det_12345`
:param image: The image, in several possible formats:
- filename (string) of a jpeg file
- byte array or BytesIO or BufferedReader with jpeg bytes
- numpy array with values 0-255 and dimensions (H,W,3) in BGR order
(Note OpenCV uses BGR not RGB. `img[:, :, ::-1]` will reverse the channels)
- PIL Image
Any binary format must be JPEG-encoded already. Any pixel format will get
converted to JPEG at high quality before sending to service.
:param confidence_threshold: The confidence threshold to wait for.
If not set, use the detector's confidence threshold.
:param wait: How long to wait (in seconds) for a confident answer.
:param metadata: A dictionary or JSON string of custom key/value metadata to associate with
the image query (limited to 1KB). You can retrieve this metadata later by calling
`get_image_query()`.
:param inspection_id: Most users will omit this. For accounts with Inspection Reports enabled,
this is the ID of the inspection to associate with the image query.
:return: ImageQuery containing the prediction result
:raises ApiTokenError: If API token is invalid
:raises GroundlightClientError: For other API errors
.. seealso::
:meth:`ask_ml` for getting the first ML prediction without waiting an answer above the confidence threshold
:meth:`ask_async` for submitting queries asynchronously
:meth:`submit_image_query` for submitting queries with more control over the process
"""
return self.submit_image_query(
detector,
image,
confidence_threshold=confidence_threshold,
wait=wait,
patience_time=wait,
human_review=None,
metadata=metadata,
inspection_id=inspection_id,
)
def ask_ml( # noqa: PLR0913 # pylint: disable=too-many-arguments, too-many-locals
self,
detector: Union[Detector, str],
image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray],
wait: Optional[float] = None,
metadata: Union[dict, str, None] = None,
inspection_id: Optional[str] = None,
) -> ImageQuery:
"""
Evaluates an image with Groundlight, getting the first ML prediction without waiting
for high confidence or human review.
**Example usage**::
gl = Groundlight()
detector = gl.get_detector("det_12345") # or create one with create_detector()
# Get quick ML prediction for an image
image_query = gl.ask_ml(detector, "path/to/image.jpg")
# The image_query may have low confidence since we're never waiting for human review
print(f"Quick ML prediction: {image_query.result.label}")
print(f"Confidence: {image_query.result.confidence}")
# You can also pass metadata to track additional information
image_query = gl.ask_ml(
detector=detector,
image="path/to/image.jpg",
metadata={"camera_id": "front_door", "timestamp": "2023-01-01T12:00:00Z"}
)
:param detector: the Detector object, or string id of a detector like `det_12345`
:param image: The image, in several possible formats:
- filename (string) of a jpeg file
- byte array or BytesIO or BufferedReader with jpeg bytes
- numpy array with values 0-255 and dimensions (H,W,3) in BGR order
(Note OpenCV uses BGR not RGB. `img[:, :, ::-1]` will reverse the channels)
- PIL Image
Any binary format must be JPEG-encoded already. Any pixel format will get
converted to JPEG at high quality before sending to service.
:param wait: How long to wait (in seconds) for any ML prediction.
Default is 30.0 seconds.
:param metadata: A dictionary or JSON string of custom key/value metadata to associate with
the image query (limited to 1KB). You can retrieve this metadata later by calling
`get_image_query()`.
:param inspection_id: Most users will omit this. For accounts with Inspection Reports enabled,
this is the ID of the inspection to associate with the image query.
:return: ImageQuery containing the ML prediction
:raises ApiTokenError: If API token is invalid
:raises GroundlightClientError: For other API errors
.. note::
This method returns the first available ML prediction, which may have low confidence.
For answers above a configured confidence_threshold, use :meth:`ask_confident` instead.
.. seealso::
:meth:`ask_confident` for waiting until a high-confidence prediction is available
:meth:`ask_async` for submitting queries asynchronously
"""
iq = self.submit_image_query(
detector,
image,
wait=0,
metadata=metadata,
inspection_id=inspection_id,
)
if iq_is_answered(iq):
return iq
wait = self.DEFAULT_WAIT if wait is None else wait
return self.wait_for_ml_result(iq, timeout_sec=wait)
def ask_async( # noqa: PLR0913 # pylint: disable=too-many-arguments
self,
detector: Union[Detector, str],
image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray],
patience_time: Optional[float] = None,
confidence_threshold: Optional[float] = None,
human_review: Optional[str] = None,
metadata: Union[dict, str, None] = None,
inspection_id: Optional[str] = None,
) -> ImageQuery:
"""
Submit an image query asynchronously. This is equivalent to calling `submit_image_query`
with `want_async=True` and `wait=0`.