feat: task 9 — add _graph_request helper with retry on 429/503
Add module-level _graph_request() that wraps requests.request() with: - Up to 3 retries on HTTP 429 (rate limited) and 503 (unavailable) - Exponential backoff capped at 60 s, honouring Retry-After header - Default timeout=30 s injected via setdefault (caller can override) Wire all 13 retry-eligible API calls through _graph_request(). The 3 file-upload requests.put(data=f) calls are kept direct since an open stream cannot be re-read after the first attempt. Add 9 unit tests covering: success path, 429/503 retry, Retry-After header, max-retry exhaustion, timeout injection and override. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -74,6 +74,28 @@ def save_settings(new_settings):
|
||||
with open(SETTINGS_FILE, 'w', encoding='utf-8') as f:
|
||||
json.dump(new_settings, f, indent=4)
|
||||
|
||||
# --- GRAPH API REQUEST HELPER ---
|
||||
_RETRY_STATUSES = {429, 503}
|
||||
_MAX_RETRIES = 3
|
||||
|
||||
def _graph_request(method, url, **kwargs):
|
||||
"""Thin wrapper around requests.request with retry for 429/503.
|
||||
|
||||
Retries up to _MAX_RETRIES times when Graph API signals rate limiting
|
||||
(429) or transient unavailability (503), honouring the Retry-After header
|
||||
when present. A default timeout of 30 s is injected if the caller does
|
||||
not supply one. File-upload calls that pass an open stream as data=
|
||||
should use requests.put() directly, since a stream cannot be re-read.
|
||||
"""
|
||||
kwargs.setdefault("timeout", 30)
|
||||
for attempt in range(_MAX_RETRIES):
|
||||
res = requests.request(method, url, **kwargs)
|
||||
if res.status_code not in _RETRY_STATUSES:
|
||||
return res
|
||||
retry_after = int(res.headers.get("Retry-After", 2 ** attempt))
|
||||
time.sleep(min(retry_after, 60))
|
||||
return res # Return last response after exhausting retries
|
||||
|
||||
settings = load_settings()
|
||||
CLIENT_ID = settings.get("client_id")
|
||||
TENANT_ID = settings.get("tenant_id")
|
||||
@@ -1170,7 +1192,7 @@ class SharePointApp(wx.Frame):
|
||||
status_text = "Sletter" if self.lang == "da" else "Deleting"
|
||||
self.set_status(f"{status_text} {count+1}/{total}: '{item['name']}'...")
|
||||
url = f"https://graph.microsoft.com/v1.0/drives/{item['drive_id']}/items/{item['id']}"
|
||||
res = requests.delete(url, headers=self.headers, timeout=30)
|
||||
res = _graph_request("DELETE", url, headers=self.headers, timeout=30)
|
||||
if res.status_code in [204, 200]:
|
||||
count += 1
|
||||
else:
|
||||
@@ -1238,7 +1260,7 @@ class SharePointApp(wx.Frame):
|
||||
def _create_folder_sync(self, name, drive_id, parent_id):
|
||||
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{parent_id}/children"
|
||||
body = {"name": name, "folder": {}, "@microsoft.graph.conflictBehavior": "rename"}
|
||||
res = requests.post(url, headers=self.headers, json=body, timeout=30)
|
||||
res = _graph_request("POST", url, headers=self.headers, json=body, timeout=30)
|
||||
if res.status_code in [200, 201]:
|
||||
return res.json().get('id')
|
||||
return None
|
||||
@@ -1282,7 +1304,7 @@ class SharePointApp(wx.Frame):
|
||||
self.set_status(f"{self.get_txt('msg_rename')}...")
|
||||
url = f"https://graph.microsoft.com/v1.0/drives/{item['drive_id']}/items/{item['id']}"
|
||||
body = {"name": new_name}
|
||||
res = requests.patch(url, headers=self.headers, json=body, timeout=30)
|
||||
res = _graph_request("PATCH", url, headers=self.headers, json=body, timeout=30)
|
||||
if res.status_code in [200, 201]:
|
||||
self.set_status(self.get_txt("msg_success"))
|
||||
self._refresh_current_view()
|
||||
@@ -1319,7 +1341,7 @@ class SharePointApp(wx.Frame):
|
||||
|
||||
def _download_file_sync_call(self, drive_id, item_id, dest_path):
|
||||
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{item_id}/content"
|
||||
res = requests.get(url, headers=self.headers, timeout=30)
|
||||
res = _graph_request("GET", url, headers=self.headers, timeout=30)
|
||||
if res.status_code == 200:
|
||||
with open(dest_path, 'wb') as f:
|
||||
f.write(res.content)
|
||||
@@ -1338,7 +1360,7 @@ class SharePointApp(wx.Frame):
|
||||
|
||||
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{folder_id}/children"
|
||||
while url:
|
||||
res = requests.get(url, headers=self.headers, timeout=30)
|
||||
res = _graph_request("GET", url, headers=self.headers, timeout=30)
|
||||
if res.status_code == 200:
|
||||
res_data = res.json()
|
||||
items = res_data.get('value', [])
|
||||
@@ -1717,7 +1739,7 @@ class SharePointApp(wx.Frame):
|
||||
url = "https://graph.microsoft.com/v1.0/sites?search=*"
|
||||
|
||||
while url:
|
||||
res = requests.get(url, headers=self.headers, timeout=30)
|
||||
res = _graph_request("GET", url, headers=self.headers, timeout=30)
|
||||
if res.status_code == 200:
|
||||
data = res.json()
|
||||
all_sites.extend(data.get('value', []))
|
||||
@@ -1783,7 +1805,7 @@ class SharePointApp(wx.Frame):
|
||||
url = f"https://graph.microsoft.com/v1.0/drives/{data['drive_id']}/items/{data['id']}/children"
|
||||
|
||||
while url:
|
||||
res = requests.get(url, headers=self.headers, timeout=30)
|
||||
res = _graph_request("GET", url, headers=self.headers, timeout=30)
|
||||
if res.status_code == 200:
|
||||
res_data = res.json()
|
||||
all_children.extend(res_data.get('value', []))
|
||||
@@ -1906,7 +1928,7 @@ class SharePointApp(wx.Frame):
|
||||
|
||||
first_chunk = True
|
||||
while url:
|
||||
res = requests.get(url, headers=self.headers, timeout=30)
|
||||
res = _graph_request("GET", url, headers=self.headers, timeout=30)
|
||||
if res.status_code != 200: break
|
||||
|
||||
res_data = res.json()
|
||||
@@ -2290,7 +2312,7 @@ class SharePointApp(wx.Frame):
|
||||
|
||||
# 2. Download
|
||||
self.set_status(self.get_txt("msg_fetching_file", name=file_name))
|
||||
res = requests.get(f"{base_url}/content", headers=self.headers, timeout=30)
|
||||
res = _graph_request("GET", f"{base_url}/content", headers=self.headers, timeout=30)
|
||||
if res.status_code != 200:
|
||||
raise Exception(f"{self.get_txt('msg_unknown_error')}: {res.status_code}")
|
||||
|
||||
@@ -2327,7 +2349,7 @@ class SharePointApp(wx.Frame):
|
||||
|
||||
# Checkout
|
||||
is_checked_out = False
|
||||
checkout_res = requests.post(f"{base_url}/checkout", headers=self.headers, timeout=30)
|
||||
checkout_res = _graph_request("POST", f"{base_url}/checkout", headers=self.headers, timeout=30)
|
||||
if checkout_res.status_code in [200, 201, 204]:
|
||||
is_checked_out = True
|
||||
logger.info(f"Fil {file_name} udtjekket succesfuldt.")
|
||||
@@ -2387,7 +2409,7 @@ class SharePointApp(wx.Frame):
|
||||
|
||||
if is_checked_out:
|
||||
logger.info(f"Annullerer udtjekning (discardCheckout) for {file_name}...")
|
||||
res = requests.post(f"{base_url}/discardCheckout", headers=self.headers, timeout=30)
|
||||
res = _graph_request("POST", f"{base_url}/discardCheckout", headers=self.headers, timeout=30)
|
||||
if res.status_code in [200, 204]:
|
||||
is_checked_out = False
|
||||
else:
|
||||
@@ -2402,7 +2424,7 @@ class SharePointApp(wx.Frame):
|
||||
# 6. Checkin (Kun hvis vi faktisk uploadede noget)
|
||||
if is_checked_out:
|
||||
self.set_status(self.get_txt("msg_checking_in", name=file_name))
|
||||
res = requests.post(f"{base_url}/checkin", headers=self.headers, json={"comment": "SP Explorer Edit"}, timeout=30)
|
||||
res = _graph_request("POST", f"{base_url}/checkin", headers=self.headers, json={"comment": "SP Explorer Edit"}, timeout=30)
|
||||
if res.status_code in [200, 201, 204]:
|
||||
is_checked_out = False
|
||||
|
||||
@@ -2424,7 +2446,7 @@ class SharePointApp(wx.Frame):
|
||||
if is_checked_out:
|
||||
# Emergency cleanup hvis vi stadig har fat i filen (f.eks. ved crash eller afbrydelse)
|
||||
logger.info(f"Rydder op: Kalder discardCheckout for {file_name}...")
|
||||
requests.post(f"{base_url}/discardCheckout", headers=self.headers, timeout=30)
|
||||
_graph_request("POST", f"{base_url}/discardCheckout", headers=self.headers, timeout=30)
|
||||
|
||||
if item_id in self.active_edits:
|
||||
del self.active_edits[item_id]
|
||||
|
||||
Reference in New Issue
Block a user