|
2 | 2 |
|
3 | 3 | import os
|
4 | 4 | import re
|
5 |
| -import sys |
6 | 5 | import json
|
7 | 6 | import time
|
| 7 | +import exceptions |
8 | 8 | import urllib.error
|
9 | 9 | import urllib.request
|
10 | 10 | from ldap3 import Server, Connection, ALL, ALL_ATTRIBUTES, SAFE_SYNC
|
|
27 | 27 | TEST_UNIX_CLUSTER_ID = 10
|
28 | 28 | TEST_LDAP_TARGET_ID = 9
|
29 | 29 |
|
30 |
| - |
31 |
| -TIMEOUT_MIN = 5 |
32 |
| -TIMEOUT_MULTIPLE = 5 |
| 30 | +# Value for the base of the exponential backoff |
| 31 | +TIMEOUT_BASE = 5 |
33 | 32 | MAX_RETRIES = 5
|
34 | 33 |
|
35 | 34 |
|
@@ -83,28 +82,29 @@ def call_api2(method, target, endpoint, authstr, **kw):
|
83 | 82 | def call_api3(method, target, data, endpoint, authstr, **kw):
|
84 | 83 | req = mkrequest(method, target, data, endpoint, authstr, **kw)
|
85 | 84 | retries = 0
|
86 |
| - currentTimeout = TIMEOUT_MIN |
87 |
| - requestingStart = time.time() |
| 85 | + current_timeout = TIMEOUT_BASE |
| 86 | + total_timeout = 0 |
88 | 87 | payload = None
|
89 |
| - while payload == None: |
| 88 | + while retries <= MAX_RETRIES: |
90 | 89 | try:
|
91 |
| - resp = urllib.request.urlopen(req, timeout=currentTimeout) |
92 |
| - if retries > 0: |
93 |
| - print(f"Succeeded for request {req.full_url} after {retries} retries.") |
| 90 | + resp = urllib.request.urlopen(req, timeout=current_timeout) |
94 | 91 | payload = resp.read()
|
95 |
| - except urllib.error.URLError as exception: |
96 |
| - if retries < MAX_RETRIES: |
97 |
| - print(f"Error: {exception} for request {req.full_url}, sleeping for {currentTimeout} seconds and retrying.") |
98 |
| - time.sleep(currentTimeout) |
99 |
| - currentTimeout *= TIMEOUT_MULTIPLE |
100 |
| - retries += 1 |
101 |
| - else: |
102 |
| - requestingStop = time.time() |
103 |
| - sys.exit( |
104 |
| - f"Exception raised after maximum number of retries reached after {requestingStop - requestingStart} seconds. Retries: {retries}. " |
| 92 | + break |
| 93 | + # exception catching, mainly for request timeouts and "Service Temporarily Unavailable" (Rate limiting). |
| 94 | + except urllib.error.HTTPError as exception: |
| 95 | + if retries >= MAX_RETRIES: |
| 96 | + raise exceptions.URLRequestError( |
| 97 | + "Exception raised after maximum number of retries reached after total backoff of " + |
| 98 | + f"{total_timeout} seconds. Retries: {retries}. " |
105 | 99 | + f"Exception reason: {exception}.\n Request: {req.full_url}"
|
106 | 100 | )
|
107 | 101 |
|
| 102 | + print("waiting for seconds: " + str(current_timeout)) |
| 103 | + time.sleep(current_timeout) |
| 104 | + total_timeout += current_timeout |
| 105 | + current_timeout *= TIMEOUT_BASE |
| 106 | + retries += 1 |
| 107 | + |
108 | 108 | return json.loads(payload) if payload else None
|
109 | 109 |
|
110 | 110 |
|
|
0 commit comments