From 483dc70ef85bc970502ec7920c7a941702454f4e Mon Sep 17 00:00:00 2001 From: Martin Tranberg Date: Fri, 27 Mar 2026 09:15:57 +0100 Subject: [PATCH] =?UTF-8?q?H=C3=A5ndter=20401-fejl=20ved=20automatisk=20at?= =?UTF-8?q?=20forny=20download-links?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- download_sharepoint.py | 37 ++++++++++++++++++++++++++++++------- 1 file changed, 30 insertions(+), 7 deletions(-) diff --git a/download_sharepoint.py b/download_sharepoint.py index 4c7df1b..223a3f8 100644 --- a/download_sharepoint.py +++ b/download_sharepoint.py @@ -85,11 +85,19 @@ def verify_integrity(local_path, remote_hash): return True # --- Punkt 2: Resume / Chunked Download logic --- -def download_single_file(download_url, local_path, expected_size, display_name, remote_hash=None): +def get_fresh_download_url(app, drive_id, item_id): + """Fetches a fresh download URL for a specific item ID.""" + url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{item_id}?$select=id,@microsoft.graph.downloadUrl" + headers = get_headers(app) + response = safe_get(url, headers=headers) + return response.json().get('@microsoft.graph.downloadUrl') + +def download_single_file(app, drive_id, item_id, local_path, expected_size, display_name, remote_hash=None, initial_url=None): try: file_mode = 'wb' resume_header = {} existing_size = 0 + download_url = initial_url if os.path.exists(local_path): existing_size = os.path.getsize(local_path) @@ -107,7 +115,21 @@ def download_single_file(download_url, local_path, expected_size, display_name, logger.info(f"Starting: {display_name} ({format_size(expected_size)})") os.makedirs(os.path.dirname(local_path), exist_ok=True) + # Initial download attempt + if not download_url: + download_url = get_fresh_download_url(app, drive_id, item_id) + response = requests.get(download_url, headers=resume_header, stream=True, timeout=120) + + # Handle 401 Unauthorized by refreshing the URL + if response.status_code == 401: + logger.warning(f"URL expired for {display_name}. Fetching fresh URL...") + download_url = get_fresh_download_url(app, drive_id, item_id) + if not download_url: + return False, "Failed to refresh download URL (401)" + # Retry download with new URL + response = requests.get(download_url, headers=resume_header, stream=True, timeout=120) + response.raise_for_status() with open(local_path, file_mode) as f: @@ -153,15 +175,16 @@ def process_item_list(app, drive_id, item_path, local_root_path, report, executo if 'folder' in item: process_item_list(app, drive_id, display_path, local_path, report, executor, futures) elif 'file' in item: + item_id = item['id'] download_url = item.get('@microsoft.graph.downloadUrl') remote_hash = item.get('file', {}).get('hashes', {}).get('quickXorHash') - if not download_url: - with report_lock: - report.append({"Path": display_path, "Error": "No download URL", "Timestamp": datetime.now().isoformat()}) - continue - - future = executor.submit(download_single_file, download_url, local_path, item['size'], display_path, remote_hash) + future = executor.submit( + download_single_file, + app, drive_id, item_id, + local_path, item['size'], display_path, + remote_hash, download_url + ) futures[future] = display_path url = data.get('@odata.nextLink')