|
31 | 31 | UPDATE_RELATIONSHIPV2, |
32 | 32 | DELETE_RELATIONSHIP, |
33 | 33 | CURSOR_QUERY_V1, |
| 34 | + DEFERRED_RESPONSE_QUERY, |
34 | 35 | CREATE_INSTANCE, |
35 | 36 | INTEGRATION_JOB_VALUES, |
36 | 37 | INTEGRATION_INSTANCE_EVENT_VALUES, |
@@ -265,6 +266,86 @@ def _limit_and_skip_query( |
265 | 266 | page += 1 |
266 | 267 |
|
267 | 268 | return {"data": results} |
| 269 | + |
| 270 | + def query_with_deferred_response(self, query, cursor=None): |
| 271 | + """ |
| 272 | + Execute a J1QL query that returns a deferred response for handling large result sets. |
| 273 | + |
| 274 | + Args: |
| 275 | + query (str): The J1QL query to execute |
| 276 | + cursor (str, optional): Pagination cursor for subsequent requests |
| 277 | + |
| 278 | + Returns: |
| 279 | + list: Combined results from all paginated responses |
| 280 | + """ |
| 281 | + all_query_results = [] |
| 282 | + current_cursor = cursor |
| 283 | + |
| 284 | + while True: |
| 285 | + variables = { |
| 286 | + "query": query, |
| 287 | + "deferredResponse": "FORCE", |
| 288 | + "cursor": current_cursor, |
| 289 | + "flags": {"variableResultSize": True} |
| 290 | + } |
| 291 | + |
| 292 | + payload = { |
| 293 | + "query": DEFERRED_RESPONSE_QUERY, |
| 294 | + "variables": variables |
| 295 | + } |
| 296 | + |
| 297 | + # Use session with retries for reliability |
| 298 | + max_retries = 5 |
| 299 | + backoff_factor = 2 |
| 300 | + |
| 301 | + for attempt in range(1, max_retries + 1): |
| 302 | + |
| 303 | + session = requests.Session() |
| 304 | + retries = Retry(total=5, backoff_factor=1, status_forcelist=[502, 503, 504, 429]) |
| 305 | + session.mount('https://', HTTPAdapter(max_retries=retries)) |
| 306 | + |
| 307 | + # Get the download URL |
| 308 | + url_response = session.post( |
| 309 | + self.graphql_url, |
| 310 | + headers=self.headers, |
| 311 | + json=payload, |
| 312 | + timeout=60 |
| 313 | + ) |
| 314 | + |
| 315 | + if url_response.status_code == 429: |
| 316 | + retry_after = int(url_response.headers.get("Retry-After", backoff_factor ** attempt)) |
| 317 | + print(f"Rate limited. Retrying in {retry_after} seconds...") |
| 318 | + time.sleep(retry_after) |
| 319 | + else: |
| 320 | + break # Exit on success or other non-retryable error |
| 321 | + |
| 322 | + if url_response.ok: |
| 323 | + |
| 324 | + download_url = url_response.json()['data']['queryV1']['url'] |
| 325 | + |
| 326 | + # Poll the download URL until results are ready |
| 327 | + while True: |
| 328 | + download_response = session.get(download_url, timeout=60).json() |
| 329 | + status = download_response['status'] |
| 330 | + |
| 331 | + if status != 'IN_PROGRESS': |
| 332 | + break |
| 333 | + |
| 334 | + time.sleep(0.2) # Sleep 200 milliseconds between checks |
| 335 | + |
| 336 | + # Add results to the collection |
| 337 | + all_query_results.extend(download_response['data']) |
| 338 | + |
| 339 | + # Check for more pages |
| 340 | + if 'cursor' in download_response: |
| 341 | + current_cursor = download_response['cursor'] |
| 342 | + else: |
| 343 | + break |
| 344 | + |
| 345 | + else: |
| 346 | + print(f"Request failed after {max_retries} attempts. Status: {url_response.status_code}") |
| 347 | + |
| 348 | + return all_query_results |
268 | 349 |
|
269 | 350 | def _execute_syncapi_request(self, endpoint: str, payload: Dict = None) -> Dict: |
270 | 351 | """Executes POST request to SyncAPI endpoints""" |
|
0 commit comments