Enterprise-grade optimeringer: Windows Long Path, High-Performance Hashing og Dokumentation

- Tilføjer 'get_long_path' for at understøtte Windows-stier over 260 tegn
- Implementerer dual-mode hashing: Bruger 'quickxorhash' C-bibliotek hvis muligt, ellers manual Python fallback
- Opdaterer requirements.txt med quickxorhash
- Opdaterer README.md og GEMINI.md med de seneste funktioner og tekniske specifikationer
This commit is contained in:
Martin Tranberg
2026-03-29 19:33:31 +02:00
parent 367d31671d
commit c5d4ddaab0
4 changed files with 74 additions and 74 deletions

View File

@@ -6,6 +6,10 @@ import threading
import logging
import base64
import struct
try:
import quickxorhash as qxh_lib
except ImportError:
qxh_lib = None
from concurrent.futures import ThreadPoolExecutor, as_completed
from datetime import datetime
from msal import ConfidentialClientApplication
@@ -41,6 +45,13 @@ def format_size(size_bytes):
size_bytes /= 1024.0
return f"{size_bytes:.2f} EB"
def get_long_path(path):
"""Handles Windows Long Path limitation by prefixing with \\?\\ for absolute paths."""
path = os.path.abspath(path)
if os.name == 'nt' and not path.startswith("\\\\?\\"):
return "\\\\?\\" + path
return path
def load_config(file_path):
config = {}
if not os.path.exists(file_path):
@@ -84,29 +95,34 @@ def safe_get(url, headers, stream=False, timeout=60, params=None):
# --- Punkt 4: Integrity Validation (QuickXorHash) ---
def quickxorhash(file_path):
"""Compute Microsoft QuickXorHash for a file. Returns base64-encoded string.
Follows the official Microsoft implementation: 160-bit circular XOR
with the file length XORed into the LAST 64 bits."""
Uses high-performance C-library if available, otherwise falls back to
manual 160-bit implementation."""
# 1. Prøv det lynhurtige C-bibliotek hvis installeret
if qxh_lib:
hasher = qxh_lib.quickxorhash()
with open(get_long_path(file_path), 'rb') as f:
while True:
chunk = f.read(CHUNK_SIZE)
if not chunk: break
hasher.update(chunk)
return base64.b64encode(hasher.digest()).decode('ascii')
# 2. Fallback til manuel Python implementering (præcis men langsommere)
h = 0
length = 0
mask = (1 << 160) - 1
with open(file_path, 'rb') as f:
with open(get_long_path(file_path), 'rb') as f:
while True:
chunk = f.read(CHUNK_SIZE)
if not chunk:
break
if not chunk: break
for b in chunk:
shift = (length * 11) % 160
shifted = b << shift
wrapped = (shifted & mask) | (shifted >> 160)
h ^= wrapped
length += 1
# Finalize: XOR the 64-bit length into the LAST 64 bits of the 160-bit state.
# Bits 96 to 159.
h ^= (length << (160 - 64))
# Convert to 20 bytes (160 bits) in little-endian format
result = h.to_bytes(20, byteorder='little')
return base64.b64encode(result).decode('ascii')
@@ -115,7 +131,7 @@ def verify_integrity(local_path, remote_hash):
if not remote_hash or not ENABLE_HASH_VALIDATION:
return True
file_size = os.path.getsize(local_path)
file_size = os.path.getsize(get_long_path(local_path))
threshold_bytes = HASH_THRESHOLD_MB * 1024 * 1024
if file_size > threshold_bytes:
@@ -217,13 +233,14 @@ def download_single_file(app, drive_id, item_id, local_path, expected_size, disp
resume_header = {}
existing_size = 0
download_url = initial_url
long_local_path = get_long_path(local_path)
if os.path.exists(local_path):
existing_size = os.path.getsize(local_path)
local_mtime = os.path.getmtime(local_path)
if os.path.exists(long_local_path):
existing_size = os.path.getsize(long_local_path)
local_mtime = os.path.getmtime(long_local_path)
# Konvertér SharePoint ISO8601 UTC tid (f.eks. 2024-03-29T12:00:00Z) til unix timestamp
# Vi fjerner 'Z' og bruger datetime.fromisoformat
remote_mtime = datetime.fromisoformat(remote_mtime_str.replace('Z', '+00:00')).timestamp()
# Hvis filen findes, har rigtig størrelse OG lokal er ikke ældre end remote -> SKIP
@@ -233,7 +250,7 @@ def download_single_file(app, drive_id, item_id, local_path, expected_size, disp
logger.info(f"Skipped (up-to-date): {display_name}")
return True, None
else:
logger.warning(f"Hash mismatch on existing file: {display_name}. Re-downloading.")
logger.warning(f"Hash mismatch on existing file: {display_name}. Re-downloading.")
existing_size = 0
else:
logger.info(f"Update available: {display_name} (Remote is newer)")
@@ -252,7 +269,7 @@ def download_single_file(app, drive_id, item_id, local_path, expected_size, disp
existing_size = 0
logger.info(f"Starting: {display_name} ({format_size(expected_size)})")
os.makedirs(os.path.dirname(local_path), exist_ok=True)
os.makedirs(os.path.dirname(long_local_path), exist_ok=True)
# Initial download attempt
if not download_url:
@@ -273,13 +290,24 @@ def download_single_file(app, drive_id, item_id, local_path, expected_size, disp
else:
raise
with open(local_path, file_mode) as f:
with open(long_local_path, file_mode) as f:
for chunk in response.iter_content(chunk_size=CHUNK_SIZE):
if chunk:
f.write(chunk)
# Post-download check
final_size = os.path.getsize(local_path)
final_size = os.path.getsize(long_local_path)
if final_size == expected_size:
if verify_integrity(local_path, remote_hash):
logger.info(f"DONE: {display_name}")
return True, None
else:
return False, "Integrity check failed (Hash mismatch)"
else:
return False, f"Size mismatch: Remote={expected_size}, Local={final_size}"
except Exception as e:
return False, str(e)
if final_size == expected_size:
if verify_integrity(local_path, remote_hash):
logger.info(f"DONE: {display_name}")