fix: task 8 — add timeout to all requests calls
Add timeout= to every requests.get/post/put/patch/delete call so that background threads cannot hang indefinitely when the network is stalled: - timeout=30 on all API calls (delete, post, patch, get — 13 locations) - timeout=120 on file upload calls (requests.put with data= — 3 locations) to allow sufficient time for large file transfers Add 1 new unit test that scans the source file and fails if any requests.* call is missing a timeout= parameter. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1170,7 +1170,7 @@ class SharePointApp(wx.Frame):
|
|||||||
status_text = "Sletter" if self.lang == "da" else "Deleting"
|
status_text = "Sletter" if self.lang == "da" else "Deleting"
|
||||||
self.set_status(f"{status_text} {count+1}/{total}: '{item['name']}'...")
|
self.set_status(f"{status_text} {count+1}/{total}: '{item['name']}'...")
|
||||||
url = f"https://graph.microsoft.com/v1.0/drives/{item['drive_id']}/items/{item['id']}"
|
url = f"https://graph.microsoft.com/v1.0/drives/{item['drive_id']}/items/{item['id']}"
|
||||||
res = requests.delete(url, headers=self.headers)
|
res = requests.delete(url, headers=self.headers, timeout=30)
|
||||||
if res.status_code in [204, 200]:
|
if res.status_code in [204, 200]:
|
||||||
count += 1
|
count += 1
|
||||||
else:
|
else:
|
||||||
@@ -1197,7 +1197,7 @@ class SharePointApp(wx.Frame):
|
|||||||
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{parent_id}:/{filename}:/content"
|
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{parent_id}:/{filename}:/content"
|
||||||
try:
|
try:
|
||||||
with open(local_path, 'rb') as f:
|
with open(local_path, 'rb') as f:
|
||||||
res = requests.put(url, headers=self.headers, data=f)
|
res = requests.put(url, headers=self.headers, data=f, timeout=120)
|
||||||
if res.status_code in [200, 201]:
|
if res.status_code in [200, 201]:
|
||||||
self.set_status(self.get_txt("msg_upload_success", name=filename))
|
self.set_status(self.get_txt("msg_upload_success", name=filename))
|
||||||
self._refresh_current_view()
|
self._refresh_current_view()
|
||||||
@@ -1238,7 +1238,7 @@ class SharePointApp(wx.Frame):
|
|||||||
def _create_folder_sync(self, name, drive_id, parent_id):
|
def _create_folder_sync(self, name, drive_id, parent_id):
|
||||||
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{parent_id}/children"
|
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{parent_id}/children"
|
||||||
body = {"name": name, "folder": {}, "@microsoft.graph.conflictBehavior": "rename"}
|
body = {"name": name, "folder": {}, "@microsoft.graph.conflictBehavior": "rename"}
|
||||||
res = requests.post(url, headers=self.headers, json=body)
|
res = requests.post(url, headers=self.headers, json=body, timeout=30)
|
||||||
if res.status_code in [200, 201]:
|
if res.status_code in [200, 201]:
|
||||||
return res.json().get('id')
|
return res.json().get('id')
|
||||||
return None
|
return None
|
||||||
@@ -1248,7 +1248,7 @@ class SharePointApp(wx.Frame):
|
|||||||
filename = os.path.basename(local_path)
|
filename = os.path.basename(local_path)
|
||||||
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{parent_id}:/{filename}:/content"
|
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{parent_id}:/{filename}:/content"
|
||||||
with open(local_path, 'rb') as f:
|
with open(local_path, 'rb') as f:
|
||||||
requests.put(url, headers=self.headers, data=f)
|
requests.put(url, headers=self.headers, data=f, timeout=120)
|
||||||
|
|
||||||
def on_new_folder_clicked(self, event):
|
def on_new_folder_clicked(self, event):
|
||||||
if not self.current_drive_id: return
|
if not self.current_drive_id: return
|
||||||
@@ -1282,7 +1282,7 @@ class SharePointApp(wx.Frame):
|
|||||||
self.set_status(f"{self.get_txt('msg_rename')}...")
|
self.set_status(f"{self.get_txt('msg_rename')}...")
|
||||||
url = f"https://graph.microsoft.com/v1.0/drives/{item['drive_id']}/items/{item['id']}"
|
url = f"https://graph.microsoft.com/v1.0/drives/{item['drive_id']}/items/{item['id']}"
|
||||||
body = {"name": new_name}
|
body = {"name": new_name}
|
||||||
res = requests.patch(url, headers=self.headers, json=body)
|
res = requests.patch(url, headers=self.headers, json=body, timeout=30)
|
||||||
if res.status_code in [200, 201]:
|
if res.status_code in [200, 201]:
|
||||||
self.set_status(self.get_txt("msg_success"))
|
self.set_status(self.get_txt("msg_success"))
|
||||||
self._refresh_current_view()
|
self._refresh_current_view()
|
||||||
@@ -1319,7 +1319,7 @@ class SharePointApp(wx.Frame):
|
|||||||
|
|
||||||
def _download_file_sync_call(self, drive_id, item_id, dest_path):
|
def _download_file_sync_call(self, drive_id, item_id, dest_path):
|
||||||
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{item_id}/content"
|
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{item_id}/content"
|
||||||
res = requests.get(url, headers=self.headers)
|
res = requests.get(url, headers=self.headers, timeout=30)
|
||||||
if res.status_code == 200:
|
if res.status_code == 200:
|
||||||
with open(dest_path, 'wb') as f:
|
with open(dest_path, 'wb') as f:
|
||||||
f.write(res.content)
|
f.write(res.content)
|
||||||
@@ -1338,7 +1338,7 @@ class SharePointApp(wx.Frame):
|
|||||||
|
|
||||||
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{folder_id}/children"
|
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{folder_id}/children"
|
||||||
while url:
|
while url:
|
||||||
res = requests.get(url, headers=self.headers)
|
res = requests.get(url, headers=self.headers, timeout=30)
|
||||||
if res.status_code == 200:
|
if res.status_code == 200:
|
||||||
res_data = res.json()
|
res_data = res.json()
|
||||||
items = res_data.get('value', [])
|
items = res_data.get('value', [])
|
||||||
@@ -1715,9 +1715,9 @@ class SharePointApp(wx.Frame):
|
|||||||
self.pulse_gauge(True)
|
self.pulse_gauge(True)
|
||||||
all_sites = []
|
all_sites = []
|
||||||
url = "https://graph.microsoft.com/v1.0/sites?search=*"
|
url = "https://graph.microsoft.com/v1.0/sites?search=*"
|
||||||
|
|
||||||
while url:
|
while url:
|
||||||
res = requests.get(url, headers=self.headers)
|
res = requests.get(url, headers=self.headers, timeout=30)
|
||||||
if res.status_code == 200:
|
if res.status_code == 200:
|
||||||
data = res.json()
|
data = res.json()
|
||||||
all_sites.extend(data.get('value', []))
|
all_sites.extend(data.get('value', []))
|
||||||
@@ -1783,7 +1783,7 @@ class SharePointApp(wx.Frame):
|
|||||||
url = f"https://graph.microsoft.com/v1.0/drives/{data['drive_id']}/items/{data['id']}/children"
|
url = f"https://graph.microsoft.com/v1.0/drives/{data['drive_id']}/items/{data['id']}/children"
|
||||||
|
|
||||||
while url:
|
while url:
|
||||||
res = requests.get(url, headers=self.headers)
|
res = requests.get(url, headers=self.headers, timeout=30)
|
||||||
if res.status_code == 200:
|
if res.status_code == 200:
|
||||||
res_data = res.json()
|
res_data = res.json()
|
||||||
all_children.extend(res_data.get('value', []))
|
all_children.extend(res_data.get('value', []))
|
||||||
@@ -1906,7 +1906,7 @@ class SharePointApp(wx.Frame):
|
|||||||
|
|
||||||
first_chunk = True
|
first_chunk = True
|
||||||
while url:
|
while url:
|
||||||
res = requests.get(url, headers=self.headers)
|
res = requests.get(url, headers=self.headers, timeout=30)
|
||||||
if res.status_code != 200: break
|
if res.status_code != 200: break
|
||||||
|
|
||||||
res_data = res.json()
|
res_data = res.json()
|
||||||
@@ -2290,7 +2290,7 @@ class SharePointApp(wx.Frame):
|
|||||||
|
|
||||||
# 2. Download
|
# 2. Download
|
||||||
self.set_status(self.get_txt("msg_fetching_file", name=file_name))
|
self.set_status(self.get_txt("msg_fetching_file", name=file_name))
|
||||||
res = requests.get(f"{base_url}/content", headers=self.headers)
|
res = requests.get(f"{base_url}/content", headers=self.headers, timeout=30)
|
||||||
if res.status_code != 200:
|
if res.status_code != 200:
|
||||||
raise Exception(f"{self.get_txt('msg_unknown_error')}: {res.status_code}")
|
raise Exception(f"{self.get_txt('msg_unknown_error')}: {res.status_code}")
|
||||||
|
|
||||||
@@ -2327,7 +2327,7 @@ class SharePointApp(wx.Frame):
|
|||||||
|
|
||||||
# Checkout
|
# Checkout
|
||||||
is_checked_out = False
|
is_checked_out = False
|
||||||
checkout_res = requests.post(f"{base_url}/checkout", headers=self.headers)
|
checkout_res = requests.post(f"{base_url}/checkout", headers=self.headers, timeout=30)
|
||||||
if checkout_res.status_code in [200, 201, 204]:
|
if checkout_res.status_code in [200, 201, 204]:
|
||||||
is_checked_out = True
|
is_checked_out = True
|
||||||
logger.info(f"Fil {file_name} udtjekket succesfuldt.")
|
logger.info(f"Fil {file_name} udtjekket succesfuldt.")
|
||||||
@@ -2387,7 +2387,7 @@ class SharePointApp(wx.Frame):
|
|||||||
|
|
||||||
if is_checked_out:
|
if is_checked_out:
|
||||||
logger.info(f"Annullerer udtjekning (discardCheckout) for {file_name}...")
|
logger.info(f"Annullerer udtjekning (discardCheckout) for {file_name}...")
|
||||||
res = requests.post(f"{base_url}/discardCheckout", headers=self.headers)
|
res = requests.post(f"{base_url}/discardCheckout", headers=self.headers, timeout=30)
|
||||||
if res.status_code in [200, 204]:
|
if res.status_code in [200, 204]:
|
||||||
is_checked_out = False
|
is_checked_out = False
|
||||||
else:
|
else:
|
||||||
@@ -2395,14 +2395,14 @@ class SharePointApp(wx.Frame):
|
|||||||
logger.info(f"Ændring fundet! Uploader {file_name}...")
|
logger.info(f"Ændring fundet! Uploader {file_name}...")
|
||||||
self.set_status(self.get_txt("msg_updating_changes"))
|
self.set_status(self.get_txt("msg_updating_changes"))
|
||||||
with open(local_path, 'rb') as f:
|
with open(local_path, 'rb') as f:
|
||||||
upload_res = requests.put(f"{base_url}/content", headers=self.headers, data=f)
|
upload_res = requests.put(f"{base_url}/content", headers=self.headers, data=f, timeout=120)
|
||||||
if upload_res.status_code not in [200, 201]:
|
if upload_res.status_code not in [200, 201]:
|
||||||
raise Exception(f"{self.get_txt('msg_update_failed_code', code=upload_res.status_code)}")
|
raise Exception(f"{self.get_txt('msg_update_failed_code', code=upload_res.status_code)}")
|
||||||
|
|
||||||
# 6. Checkin (Kun hvis vi faktisk uploadede noget)
|
# 6. Checkin (Kun hvis vi faktisk uploadede noget)
|
||||||
if is_checked_out:
|
if is_checked_out:
|
||||||
self.set_status(self.get_txt("msg_checking_in", name=file_name))
|
self.set_status(self.get_txt("msg_checking_in", name=file_name))
|
||||||
res = requests.post(f"{base_url}/checkin", headers=self.headers, json={"comment": "SP Explorer Edit"})
|
res = requests.post(f"{base_url}/checkin", headers=self.headers, json={"comment": "SP Explorer Edit"}, timeout=30)
|
||||||
if res.status_code in [200, 201, 204]:
|
if res.status_code in [200, 201, 204]:
|
||||||
is_checked_out = False
|
is_checked_out = False
|
||||||
|
|
||||||
@@ -2424,7 +2424,7 @@ class SharePointApp(wx.Frame):
|
|||||||
if is_checked_out:
|
if is_checked_out:
|
||||||
# Emergency cleanup hvis vi stadig har fat i filen (f.eks. ved crash eller afbrydelse)
|
# Emergency cleanup hvis vi stadig har fat i filen (f.eks. ved crash eller afbrydelse)
|
||||||
logger.info(f"Rydder op: Kalder discardCheckout for {file_name}...")
|
logger.info(f"Rydder op: Kalder discardCheckout for {file_name}...")
|
||||||
requests.post(f"{base_url}/discardCheckout", headers=self.headers)
|
requests.post(f"{base_url}/discardCheckout", headers=self.headers, timeout=30)
|
||||||
|
|
||||||
if item_id in self.active_edits:
|
if item_id in self.active_edits:
|
||||||
del self.active_edits[item_id]
|
del self.active_edits[item_id]
|
||||||
|
|||||||
@@ -280,5 +280,50 @@ class TestTask7BareExcept(unittest.TestCase):
|
|||||||
"on_download_clicked does not sanitize item['name'] with basename for folder downloads")
|
"on_download_clicked does not sanitize item['name'] with basename for folder downloads")
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Task 8: all requests.* calls must carry a timeout= parameter
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestNetworkTimeouts(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Every requests.get/post/put/patch/delete call must include timeout=
|
||||||
|
so that background threads cannot hang indefinitely on a stalled network.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _calls_missing_timeout(self):
|
||||||
|
"""
|
||||||
|
Return a list of (line_number, line_text) for every requests.*()
|
||||||
|
call in the source file that is missing a timeout= argument.
|
||||||
|
|
||||||
|
Strategy: single-line calls are the norm in this codebase. We look
|
||||||
|
for lines that contain 'requests.METHOD(' and do NOT also contain
|
||||||
|
'timeout='. Lines that contain 'timeout=' anywhere on the same line
|
||||||
|
are considered compliant.
|
||||||
|
"""
|
||||||
|
import re
|
||||||
|
src_path = os.path.join(
|
||||||
|
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
||||||
|
"sharepoint_browser.py"
|
||||||
|
)
|
||||||
|
pattern = re.compile(
|
||||||
|
r'requests\.(get|post|put|patch|delete)\('
|
||||||
|
)
|
||||||
|
missing = []
|
||||||
|
with open(src_path, encoding='utf-8') as fh:
|
||||||
|
for lineno, line in enumerate(fh, 1):
|
||||||
|
if pattern.search(line) and 'timeout=' not in line:
|
||||||
|
missing.append((lineno, line.rstrip()))
|
||||||
|
return missing
|
||||||
|
|
||||||
|
def test_all_requests_calls_have_timeout(self):
|
||||||
|
"""Task 8: No requests.* call is missing a timeout= parameter."""
|
||||||
|
missing = self._calls_missing_timeout()
|
||||||
|
if missing:
|
||||||
|
details = "\n".join(f" line {n}: {txt}" for n, txt in missing)
|
||||||
|
self.fail(
|
||||||
|
f"{len(missing)} requests call(s) are missing timeout=:\n{details}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main(verbosity=2)
|
unittest.main(verbosity=2)
|
||||||
|
|||||||
Reference in New Issue
Block a user