fix: using webdav in code

This commit is contained in:
administrateur 2025-03-29 17:46:18 +01:00
parent bc0501e223
commit 53c5592bc8

View File

@ -4,29 +4,23 @@ import subprocess
import base64 import base64
import requests import requests
import redis import redis
from threading import Thread, Lock import shutil
import time from threading import Lock
from webdav3.client import Client
app = Flask(__name__) app = Flask(__name__)
# Environment variables # Environment variables
MINIO_BUCKET = os.getenv("MINIO_BUCKET", "nextcloud") NEXTCLOUD_URL_DAV = os.getenv("NEXTCLOUD_URL_DAV")
MINIO_HOST = os.getenv("MINIO_HOST", "minio.minio.svc.cluster.local:9000") NEXTCLOUD_USER = os.getenv("NEXTCLOUD_USER")
MINIO_REGION = os.getenv("MINIO_REGION", "us-east-1") NEXTCLOUD_PASSWORD = os.getenv("NEXTCLOUD_PASSWORD")
MINIO_MOUNT_PATH = "/mnt"
N8N_WEBHOOK_URL = os.getenv("N8N_WEBHOOK_URL", "https://n8n.n8n.svc.kube.ia86.cc/webhook/7950310f-e526-475a-82d1-63818da79339") N8N_WEBHOOK_URL = os.getenv("N8N_WEBHOOK_URL", "https://n8n.n8n.svc.kube.ia86.cc/webhook/7950310f-e526-475a-82d1-63818da79339")
DEBUG = bool(os.getenv("DEBUG", True)) DEBUG = bool(os.getenv("DEBUG", True))
S3_ACCESS_KEY = os.getenv("S3_ACCESS_KEY")
S3_SECRET_KEY = os.getenv("S3_SECRET_KEY")
if not S3_ACCESS_KEY or not S3_SECRET_KEY:
raise ValueError("❌ ERROR: Missing S3_ACCESS_KEY or S3_SECRET_KEY")
redis_host = os.getenv("REDIS_HOST", "redis.redis.svc.cluster.local") redis_host = os.getenv("REDIS_HOST", "redis.redis.svc.cluster.local")
redis_port = int(os.getenv("REDIS_PORT", 6379)) redis_port = int(os.getenv("REDIS_PORT", 6379))
redis_client = redis.Redis(host=redis_host, port=redis_port, decode_responses=True) redis_client = redis.Redis(host=redis_host, port=redis_port, decode_responses=True)
S3_UNMOUNT_CHECK_INTERVAL = int(os.getenv("S3_UNMOUNT_CHECK_INTERVAL", 300))
lock_prefix = "lock:mkdocs:" lock_prefix = "lock:mkdocs:"
local_lock = Lock() local_lock = Lock()
@ -34,58 +28,25 @@ def log_debug(msg):
if DEBUG: if DEBUG:
print(f"🔍 DEBUG: {msg}", flush=True) print(f"🔍 DEBUG: {msg}", flush=True)
def mount_s3(): def sync_from_nextcloud(website, tmp_path):
log_debug("Attempting S3 mount...") remote_path = f"/sites/@{website}/"
with local_lock: local_path = tmp_path
if not os.path.ismount(MINIO_MOUNT_PATH):
log_debug(f"Mount path {MINIO_MOUNT_PATH} not mounted. Mounting now.")
os.makedirs(MINIO_MOUNT_PATH, exist_ok=True)
credentials_file = "/etc/passwd-s3fs"
log_debug(f"Creating credentials file: {credentials_file}")
with open(credentials_file, "w") as f:
f.write(f"{S3_ACCESS_KEY}:{S3_SECRET_KEY}\n")
os.chmod(credentials_file, 0o600)
cmd = ( if os.path.exists(local_path):
f"s3fs {MINIO_BUCKET} {MINIO_MOUNT_PATH} " shutil.rmtree(local_path)
f"-o passwd_file={credentials_file} " os.makedirs(local_path, exist_ok=True)
f"-o url=https://{MINIO_HOST} "
"-o use_path_request_style "
"-o allow_other"
)
log_debug(f"Executing S3 mount command: {cmd}") options = {
result = subprocess.run(cmd, shell=True, capture_output=True, text=True) 'webdav_hostname': NEXTCLOUD_URL_DAV,
if result.returncode != 0: 'webdav_login': NEXTCLOUD_USER,
log_debug(f"S3 mount failed: {result.stderr}") 'webdav_password': NEXTCLOUD_PASSWORD
raise RuntimeError(f"S3 mount error: {result.stderr}") }
log_debug("S3 mounted successfully.")
else:
log_debug("S3 already mounted, skipping.")
def unmount_s3(): client = Client(options)
log_debug("Attempting S3 unmount...")
with local_lock:
if os.path.ismount(MINIO_MOUNT_PATH):
cmd = f"fusermount -u {MINIO_MOUNT_PATH}"
result = subprocess.run(cmd, shell=True, capture_output=True, text=True)
if result.returncode == 0:
log_debug("S3 unmounted successfully.")
else:
log_debug(f"S3 unmount failed: {result.stderr}")
else:
log_debug("No active S3 mount to unmount.")
def unmount_checker(): log_debug(f"Starting WebDAV sync from '{remote_path}' to '{local_path}'")
while True: client.download_sync(remote_path=remote_path, local_path=local_path)
log_debug("Periodic check for S3 unmount...") log_debug("WebDAV sync completed successfully.")
time.sleep(S3_UNMOUNT_CHECK_INTERVAL)
active_locks = redis_client.keys(f"{lock_prefix}*")
if not active_locks:
log_debug("No active builds detected, proceeding to unmount S3.")
unmount_s3()
else:
log_debug("Active builds detected, keeping S3 mounted.")
@app.route("/build", methods=["POST"]) @app.route("/build", methods=["POST"])
def build_mkdocs(): def build_mkdocs():
@ -110,24 +71,22 @@ def build_mkdocs():
log_debug(f"Build already active for website: {website}") log_debug(f"Build already active for website: {website}")
return jsonify({"status": "busy", "message": f"Build already active: {website}"}), 429 return jsonify({"status": "busy", "message": f"Build already active: {website}"}), 429
try: tmp_path = f"/tmp/{website}"
mount_s3() compile_path = f"{tmp_path}#compile"
final_path = f"/srv/{website}"
src = f"{MINIO_MOUNT_PATH}/files/sites/@{website}/mkdocs.yml" try:
tmp = f"/srv/{website}" sync_from_nextcloud(website, tmp_path)
src = os.path.join(tmp_path, "mkdocs.yml")
log_debug(f"Checking if mkdocs.yml exists at {src}") log_debug(f"Checking if mkdocs.yml exists at {src}")
timeout = 10 if not os.path.exists(src):
start_time = time.time() log_debug(f"{src} not found after sync")
while not os.path.exists(src): return jsonify({"error": f"{src} not found after sync"}), 404
if time.time() - start_time > timeout:
log_debug(f"Timeout: {src} not found after {timeout}s")
return jsonify({"error": f"{src} not found after mount"}), 404
log_debug(f"Waiting for {src} to appear...")
time.sleep(0.5)
log_debug(f"Running MkDocs build: {src} -> {tmp}") log_debug(f"Running MkDocs build: {src} -> {compile_path}")
cmd = f"mkdocs build --quiet --no-strict --config-file {src} --site-dir {tmp}" cmd = f"mkdocs build --quiet --no-strict --config-file {src} --site-dir {compile_path}"
result = subprocess.run(cmd, shell=True, capture_output=True, text=True) result = subprocess.run(cmd, shell=True, capture_output=True, text=True)
if result.returncode != 0: if result.returncode != 0:
@ -137,7 +96,10 @@ def build_mkdocs():
requests.post(error_callback, json=json_payload, headers={"Content-Type": "application/json"}) requests.post(error_callback, json=json_payload, headers={"Content-Type": "application/json"})
return jsonify({"status": "error", "message": "Build failed", "error": result.stderr}), 500 return jsonify({"status": "error", "message": "Build failed", "error": result.stderr}), 500
log_debug(f"MkDocs build successful for website: {website}") log_debug(f"Performing differential copy from {compile_path} to {final_path}")
subprocess.run(f"rsync -a --delete {compile_path}/ {final_path}/", shell=True, check=True)
log_debug(f"MkDocs build and sync successful for website: {website}")
return jsonify({"status": "success", "message": "Build successful"}), 200 return jsonify({"status": "success", "message": "Build successful"}), 200
finally: finally:
@ -146,6 +108,4 @@ def build_mkdocs():
if __name__ == "__main__": if __name__ == "__main__":
log_debug("Starting Flask server on 0.0.0.0:80") log_debug("Starting Flask server on 0.0.0.0:80")
checker_thread = Thread(target=unmount_checker, daemon=True)
checker_thread.start()
app.run(host="0.0.0.0", port=80) app.run(host="0.0.0.0", port=80)