Håndter 401-fejl ved automatisk at forny download-links
This commit is contained in:
@@ -85,11 +85,19 @@ def verify_integrity(local_path, remote_hash):
|
||||
return True
|
||||
|
||||
# --- Punkt 2: Resume / Chunked Download logic ---
|
||||
def download_single_file(download_url, local_path, expected_size, display_name, remote_hash=None):
|
||||
def get_fresh_download_url(app, drive_id, item_id):
|
||||
"""Fetches a fresh download URL for a specific item ID."""
|
||||
url = f"https://graph.microsoft.com/v1.0/drives/{drive_id}/items/{item_id}?$select=id,@microsoft.graph.downloadUrl"
|
||||
headers = get_headers(app)
|
||||
response = safe_get(url, headers=headers)
|
||||
return response.json().get('@microsoft.graph.downloadUrl')
|
||||
|
||||
def download_single_file(app, drive_id, item_id, local_path, expected_size, display_name, remote_hash=None, initial_url=None):
|
||||
try:
|
||||
file_mode = 'wb'
|
||||
resume_header = {}
|
||||
existing_size = 0
|
||||
download_url = initial_url
|
||||
|
||||
if os.path.exists(local_path):
|
||||
existing_size = os.path.getsize(local_path)
|
||||
@@ -107,7 +115,21 @@ def download_single_file(download_url, local_path, expected_size, display_name,
|
||||
logger.info(f"Starting: {display_name} ({format_size(expected_size)})")
|
||||
os.makedirs(os.path.dirname(local_path), exist_ok=True)
|
||||
|
||||
# Initial download attempt
|
||||
if not download_url:
|
||||
download_url = get_fresh_download_url(app, drive_id, item_id)
|
||||
|
||||
response = requests.get(download_url, headers=resume_header, stream=True, timeout=120)
|
||||
|
||||
# Handle 401 Unauthorized by refreshing the URL
|
||||
if response.status_code == 401:
|
||||
logger.warning(f"URL expired for {display_name}. Fetching fresh URL...")
|
||||
download_url = get_fresh_download_url(app, drive_id, item_id)
|
||||
if not download_url:
|
||||
return False, "Failed to refresh download URL (401)"
|
||||
# Retry download with new URL
|
||||
response = requests.get(download_url, headers=resume_header, stream=True, timeout=120)
|
||||
|
||||
response.raise_for_status()
|
||||
|
||||
with open(local_path, file_mode) as f:
|
||||
@@ -153,15 +175,16 @@ def process_item_list(app, drive_id, item_path, local_root_path, report, executo
|
||||
if 'folder' in item:
|
||||
process_item_list(app, drive_id, display_path, local_path, report, executor, futures)
|
||||
elif 'file' in item:
|
||||
item_id = item['id']
|
||||
download_url = item.get('@microsoft.graph.downloadUrl')
|
||||
remote_hash = item.get('file', {}).get('hashes', {}).get('quickXorHash')
|
||||
|
||||
if not download_url:
|
||||
with report_lock:
|
||||
report.append({"Path": display_path, "Error": "No download URL", "Timestamp": datetime.now().isoformat()})
|
||||
continue
|
||||
|
||||
future = executor.submit(download_single_file, download_url, local_path, item['size'], display_path, remote_hash)
|
||||
future = executor.submit(
|
||||
download_single_file,
|
||||
app, drive_id, item_id,
|
||||
local_path, item['size'], display_path,
|
||||
remote_hash, download_url
|
||||
)
|
||||
futures[future] = display_path
|
||||
|
||||
url = data.get('@odata.nextLink')
|
||||
|
||||
Reference in New Issue
Block a user