diff --git a/pyproject.toml b/pyproject.toml index f60b16d6..f4e51953 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,7 @@ dependencies = [ "httpx>=0.27.2", "pyyaml>=6.0.2", "psutil>=6.0.0", + "loguru>=0.7.2", ] [project.optional-dependencies] diff --git a/syftbox/app/manager.py b/syftbox/app/manager.py index f29e86ff..5f73f83d 100644 --- a/syftbox/app/manager.py +++ b/syftbox/app/manager.py @@ -4,6 +4,8 @@ from collections import namedtuple from pathlib import Path +from loguru import logger + from ..lib import DEFAULT_CONFIG_PATH, ClientConfig from .install import install @@ -19,12 +21,12 @@ def list_app(client_config: ClientConfig, silent: bool = False) -> list[str]: if len(apps): if not silent: - print("\nInstalled apps:") + logger.info("\nInstalled apps:") for app in apps: - print(f"✅ {app}") + logger.info(f"✅ {app}") else: if not silent: - print( + logger.info( "\nYou have no apps installed.\n\n" f"Try:\nsyftbox app install OpenMined/github_app_updater\n\nor copy an app to: {apps_path}" ) @@ -32,15 +34,15 @@ def list_app(client_config: ClientConfig, silent: bool = False) -> list[str]: def uninstall_app(client_config: ClientConfig) -> None: - print("Uninstalling Apps") + logger.info("Uninstalling Apps") def update_app(client_config: ClientConfig) -> None: - print("Updating Apps") + logger.info("Updating Apps") def upgrade_app(client_config: ClientConfig) -> None: - print("Upgrading Apps") + logger.info("Upgrading Apps") Commands = namedtuple("Commands", ["description", "execute"]) @@ -105,6 +107,6 @@ def main(parser, args_list) -> None: # we should make this a type if isinstance(result, tuple): step, exception = result - print(f"Error during {step}: ", str(exception)) + logger.info(f"Error during {step}: ", str(exception)) else: parser.print_help() diff --git a/syftbox/client/client.py b/syftbox/client/client.py index 85c10d00..9683346a 100644 --- a/syftbox/client/client.py +++ b/syftbox/client/client.py @@ -5,9 +5,9 @@ import os import sys import time -import traceback import types from dataclasses import dataclass +from datetime import datetime from functools import partial from pathlib import Path from typing import Any @@ -24,6 +24,7 @@ from fastapi.responses import FileResponse, HTMLResponse, JSONResponse from fastapi.staticfiles import StaticFiles from fastapi.templating import Jinja2Templates +from loguru import logger from pydantic import BaseModel from syftbox.client.fsevents import ( @@ -31,12 +32,14 @@ FileSystemEvent, FSWatchdog, ) +from syftbox.client.utils.error_reporting import make_error_report from syftbox.lib import ( DEFAULT_CONFIG_PATH, ClientConfig, SharedState, load_or_create_config, ) +from syftbox.lib.logger import zip_logs class CustomFastAPI(FastAPI): @@ -116,7 +119,7 @@ def load_plugins(client_config: ClientConfig) -> dict[str, Plugin]: ) loaded_plugins[plugin_name] = plugin except Exception as e: - print(e) + logger.info(e) return loaded_plugins @@ -166,11 +169,10 @@ def run_plugin(plugin_name, *args, **kwargs): module = app.loaded_plugins[plugin_name].module module.run(app.shared_state, *args, **kwargs) except Exception as e: - traceback.print_exc() - print("error", e) + logger.exception(e) -def start_plugin(app: FastAPI, plugin_name: str): +def start_plugin(app: CustomFastAPI, plugin_name: str): if plugin_name not in app.loaded_plugins: raise HTTPException( status_code=400, @@ -202,7 +204,7 @@ def start_plugin(app: FastAPI, plugin_name: str): } return {"message": f"Plugin {plugin_name} started successfully"} else: - print(f"Job {existing_job}, already added") + logger.info(f"Job {existing_job}, already added") return {"message": f"Plugin {plugin_name} already started"} except Exception as e: raise HTTPException( @@ -228,6 +230,15 @@ def parse_args(): default="https://syftbox.openmined.org", help="Server", ) + subparsers = parser.add_subparsers(dest="command", help="Sub-command help") + start_parser = subparsers.add_parser("report", help="Generate an error report") + start_parser.add_argument( + "--path", + type=str, + help="Path to the error report file", + default=f"./syftbox_logs_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}", + ) + return parser.parse_args() @@ -250,7 +261,7 @@ def sync_on_event(event: FileSystemEvent): @contextlib.asynccontextmanager async def lifespan(app: CustomFastAPI, client_config: ClientConfig | None = None): # Startup - print("> Starting Client") + logger.info("> Starting Client") # client_config needs to be closed if it was created in this context # if it is passed as lifespan arg (eg for testing) it should be managed by the caller instead. @@ -266,7 +277,7 @@ async def lifespan(app: CustomFastAPI, client_config: ClientConfig | None = None app.job_file = job_file if os.path.exists(job_file): os.remove(job_file) - print(f"> Cleared existing job file: {job_file}") + logger.info(f"> Cleared existing job file: {job_file}") # Start the scheduler jobstores = {"default": SQLAlchemyJobStore(url=f"sqlite:///{job_file}")} @@ -277,16 +288,16 @@ async def lifespan(app: CustomFastAPI, client_config: ClientConfig | None = None app.scheduler = scheduler app.running_plugins = {} app.loaded_plugins = load_plugins(client_config) - print("> Loaded plugins:", sorted(list(app.loaded_plugins.keys()))) + logger.info("> Loaded plugins:", sorted(list(app.loaded_plugins.keys()))) app.watchdog = start_watchdog(app) - print("> Starting autorun plugins:", sorted(client_config.autorun_plugins)) + logger.info("> Starting autorun plugins:", sorted(client_config.autorun_plugins)) for plugin in client_config.autorun_plugins: start_plugin(app, plugin) yield # This yields control to run the application - print("> Shutting down...") + logger.info("> Shutting down...") scheduler.shutdown() app.watchdog.stop() if close_client_config: @@ -297,7 +308,7 @@ def stop_scheduler(app: FastAPI): # Remove the lock file if it exists if os.path.exists(app.job_file): os.remove(app.job_file) - print("> Scheduler stopped and lock file removed.") + logger.info("> Scheduler stopped and lock file removed.") app: CustomFastAPI = FastAPI(lifespan=lifespan) @@ -463,12 +474,22 @@ def get_syftbox_src_path(): def main() -> None: args = parse_args() client_config = load_or_create_config(args) + error_config = make_error_report(client_config) + + if args.command == "report": + output_path = Path(args.path).resolve() + output_path_with_extension = zip_logs(output_path) + logger.info(f"Logs saved to: {output_path_with_extension}.") + logger.info("Please share your bug report together with the zipped logs") + return + + logger.info(f"Client metadata: {error_config.model_dump_json(indent=2)}") os.environ["SYFTBOX_DATASITE"] = client_config.email os.environ["SYFTBOX_CLIENT_CONFIG_PATH"] = client_config.config_path - print("Dev Mode: ", os.environ.get("SYFTBOX_DEV")) - print("Wheel: ", os.environ.get("SYFTBOX_WHEEL")) + logger.info("Dev Mode: ", os.environ.get("SYFTBOX_DEV")) + logger.info("Wheel: ", os.environ.get("SYFTBOX_WHEEL")) debug = True port = client_config.port @@ -488,9 +509,9 @@ def main() -> None: except SystemExit as e: if e.code != 1: # If it's not the "Address already in use" error raise - print(f"Failed to start server on port {port}. Trying next port.") + logger.info(f"Failed to start server on port {port}. Trying next port.") port = 0 - print(f"Unable to find an available port after {max_attempts} attempts.") + logger.info(f"Unable to find an available port after {max_attempts} attempts.") sys.exit(1) diff --git a/syftbox/client/plugins/apps.py b/syftbox/client/plugins/apps.py index 014c2f57..a823174e 100644 --- a/syftbox/client/plugins/apps.py +++ b/syftbox/client/plugins/apps.py @@ -44,10 +44,10 @@ def find_and_run_script(task_path, extra_args): env=env, ) - # print("✅ Script run.sh executed successfully.") + # logger.info("✅ Script run.sh executed successfully.") return result except Exception as e: - print("Error running shell script", e) + logger.info("Error running shell script", e) else: raise FileNotFoundError(f"run.sh not found in {task_path}") @@ -64,7 +64,7 @@ def find_and_run_script(task_path, extra_args): # def copy_default_apps(apps_path): # if not os.path.exists(DEFAULT_APPS_PATH): -# print(f"Default apps directory not found: {DEFAULT_APPS_PATH}") +# logger.info(f"Default apps directory not found: {DEFAULT_APPS_PATH}") # return # for app in os.listdir(DEFAULT_APPS_PATH): @@ -73,11 +73,11 @@ def find_and_run_script(task_path, extra_args): # if os.path.isdir(src_app_path): # if os.path.exists(dst_app_path): -# print(f"App already installed at: {dst_app_path}") +# logger.info(f"App already installed at: {dst_app_path}") # # shutil.rmtree(dst_app_path) # else: # shutil.copytree(src_app_path, dst_app_path) -# print(f"Copied default app: {app}") +# logger.info(f"Copied default app: {app}") def dict_to_namespace(data) -> SimpleNamespace | list | Any: @@ -113,12 +113,13 @@ def run_apps(client_config): if os.path.exists(file_path): perm_file = SyftPermission.load(file_path) else: - print(f"> {client_config.email} Creating Apps Permfile") + logger.info(f"> {client_config.email} Creating Apps Permfile") try: perm_file = SyftPermission.datasite_default(client_config.email) perm_file.save(file_path) except Exception as e: - print("Failed to create perm file", e) + logger.error("Failed to create perm file") + logger.exception(e) apps = os.listdir(apps_path) for app in apps: @@ -128,7 +129,7 @@ def run_apps(client_config): if app_config is None: run_app(client_config, app_path) elif RUNNING_APPS.get(app, None) is None: - print("⏱ Scheduling a new app run.") + logger.info("⏱ Scheduling a new app run.") thread = threading.Thread( target=run_custom_app_config, args=(client_config, app_config, app_path), @@ -157,7 +158,7 @@ def run_custom_app_config(client_config, app_config, path): env.update(app_envs) while True: - print(f"👟 Running {app_name}") + logger.info(f"👟 Running {app_name}") _ = subprocess.run( app_config.app.run.command, cwd=path, @@ -174,22 +175,22 @@ def run_app(client_config, path): extra_args = [] try: - print(f"👟 Running {app_name} app", end="") + logger.info(f"👟 Running {app_name} app", end="") result = find_and_run_script(path, extra_args) if hasattr(result, "returncode"): if "Already generated" not in str(result.stdout): - print("\n") - print(result.stdout) + logger.info("\n") + logger.info(result.stdout) else: - print(" - no change") + logger.info(" - no change") exit_code = result.returncode if exit_code != 0: - print(f"Error running: {app_name}", result.stdout, result.stderr) + logger.info(f"Error running: {app_name}", result.stdout, result.stderr) except Exception as e: - print(f"Failed to run. {e}") + logger.info(f"Failed to run. {e}") def run(shared_state): - # print("> Running Apps") + # logger.info("> Running Apps") client_config = shared_state.client_config run_apps(client_config) diff --git a/syftbox/client/plugins/create_datasite.py b/syftbox/client/plugins/create_datasite.py index 4df42fdf..ecdac75a 100644 --- a/syftbox/client/plugins/create_datasite.py +++ b/syftbox/client/plugins/create_datasite.py @@ -18,12 +18,13 @@ def claim_datasite(client_config): if os.path.exists(file_path): perm_file = SyftPermission.load(file_path) else: - print(f"> {client_config.email} Creating Datasite + Permfile") + logger.info(f"> {client_config.email} Creating Datasite + Permfile") try: perm_file = SyftPermission.datasite_default(client_config.email) perm_file.save(file_path) except Exception as e: - print("Failed to create perm file", e) + logger.error("Failed to create perm file") + logger.exception(e) public_path = client_config.datasite_path + "/" + "public" os.makedirs(public_path, exist_ok=True) @@ -31,12 +32,13 @@ def claim_datasite(client_config): if os.path.exists(public_file_path): public_perm_file = SyftPermission.load(public_file_path) else: - print(f"> {client_config.email} Creating Public Permfile") + logger.info(f"> {client_config.email} Creating Public Permfile") try: public_perm_file = SyftPermission.mine_with_public_read(client_config.email) public_perm_file.save(public_file_path) except Exception as e: - print("Failed to create perm file", e) + logger.error("Failed to create perm file") + logger.exception(e) def run(shared_state): diff --git a/syftbox/client/plugins/init.py b/syftbox/client/plugins/init.py index 4fd23e17..8672af4b 100644 --- a/syftbox/client/plugins/init.py +++ b/syftbox/client/plugins/init.py @@ -1,5 +1,7 @@ from threading import Event +from loguru import logger + stop_event = Event() @@ -21,4 +23,4 @@ def run(shared_state): if not stop_event.is_set(): if not shared_state.client_config.token: register(shared_state.client_config) - print("> Register Complete") + logger.info("> Register Complete") diff --git a/syftbox/client/plugins/sync.py b/syftbox/client/plugins/sync.py index 92249032..5b7bb1f0 100644 --- a/syftbox/client/plugins/sync.py +++ b/syftbox/client/plugins/sync.py @@ -1,9 +1,9 @@ import os -import traceback from collections import defaultdict from datetime import datetime from threading import Event +from loguru import logger from watchdog.events import DirModifiedEvent from syftbox.lib import ( @@ -54,7 +54,7 @@ def filter_ignore_files(dir_state: DirState) -> DirState: for afile, file_info in dir_state.tree.items(): full_path = folder_path + "/" + afile if full_path.startswith(rule_prefix): - # print("> File ignored by .syftignore", afile, ignore_rule) + # logger.info("> File ignored by .syftignore", afile, ignore_rule) if afile in pruned_tree: del pruned_tree[afile] @@ -114,7 +114,7 @@ def diff_dirstate(old: DirState, new: DirState): kind = FileChangeKind.WRITE else: pass - # print( + # logger.info( # old_sub_path, # afile, # f"> 🔥 File hash eq=={old_file_info.file_hash == file_info.file_hash} " @@ -154,12 +154,12 @@ def diff_dirstate(old: DirState, new: DirState): ) changes.append(change) else: - print( + logger.info( f"🔥 Skipping delete {file_info}. File change is < 3 seconds ago" ) return changes except Exception as e: - print("Error in diff_dirstate", str(e)) + logger.info("Error in diff_dirstate", str(e)) raise e @@ -268,13 +268,13 @@ def push_changes( if "accepted" in write_response and write_response["accepted"]: written_changes.append(ok_change) else: - print("> 🔥 Rejected change", ok_change) + logger.info("> 🔥 Rejected change", ok_change) else: - print( + logger.info( f"> {client_config.email} FAILED /write {change.kind} {change.internal_path}", ) except Exception as e: - print( + logger.info( f"Failed to call /write on the server for {change.internal_path}", str(e), ) @@ -309,12 +309,12 @@ def pull_changes(client_config, changes: list[FileChange]): if response.status_code == 200: remote_changes.append((ok_change, data)) else: - print( + logger.info( f"> {client_config.email} FAILED /read {change.kind} {change.internal_path}", ) except Exception as e: - traceback.print_exc() - print("Failed to call /read on the server", str(e)) + logger.error("Failed to call /read on the server") + logger.exception(e) return remote_changes @@ -330,9 +330,10 @@ def list_datasites(client_config: ClientConfig): if response.status_code == 200: datasites = remote_datasites else: - print(f"> {client_config.email} FAILED /list_datasites") + logger.info(f"> {client_config.email} FAILED /list_datasites") except Exception as e: - print("Failed to call /list_datasites on the server", str(e)) + logger.error("Failed to call /list_datasites on the server") + logger.exception(e) return datasites @@ -357,15 +358,16 @@ def get_remote_state(client_config: ClientConfig, sub_path: str): dir_state.tree = fix_tree return dir_state else: - print( + logger.info( "/dir_state returned a bad result", type(state_response), state_response, ) - print(f"> {client_config.email} FAILED /dir_state: {sub_path}") + logger.info(f"> {client_config.email} FAILED /dir_state: {sub_path}") return None except Exception as e: - print("Failed to call /dir_state on the server", str(e)) + logger.error("Failed to call /dir_state on the server") + logger.exception(e) def create_datasites(client_config): @@ -554,7 +556,7 @@ def sync_down(client_config) -> int: remote_dir_state = get_remote_state(client_config, datasite) if not remote_dir_state: - # print(f"No remote state for dir: {datasite}") + # logger.info(f"No remote state for dir: {datasite}") continue pre_filter_changes = diff_dirstate(new_dir_state, remote_dir_state) @@ -630,7 +632,7 @@ def sync_down(client_config) -> int: change_text += ascii_for_change(deleted_files) if len(change_text) > 0: - print(change_text) + logger.info(change_text) synced_dir_state.save(dir_filename) n_changes += len(changed_files) + len(deleted_files) @@ -652,33 +654,34 @@ def do_sync(shared_state): try: create_datasites(shared_state.client_config) except Exception as e: - traceback.print_exc() - print("failed to get_datasites", e) + logger.error("failed to get_datasites", e) + logger.exception(e) try: if SYNC_UP_ENABLED: num_changes += sync_up(shared_state.client_config) else: - print("❌ Sync Up Disabled") + logger.info("❌ Sync Up Disabled") except Exception as e: - traceback.print_exc() - print("failed to sync up", e) + logger.error("failed to sync up", e) + logger.exception(e) try: if SYNC_DOWN_ENABLED: num_changes += sync_down(shared_state.client_config) else: - print("❌ Sync Down Disabled") + logger.info("❌ Sync Down Disabled") except Exception as e: - traceback.print_exc() - print("failed to sync down", e) + logger.error("failed to sync down", e) + logger.exception(e) if num_changes == 0: if event_length: - print(f"✅ Synced {event_length} File Events") + logger.info(f"✅ Synced {event_length} File Events") else: - print("✅ Synced due to Timer") + logger.info("✅ Synced due to Timer") except Exception as e: - print("Failed to run plugin", e) + logger.error("Failed to run plugin") + logger.exception(e) FLUSH_SYNC_TIMEOUT = 0.5 diff --git a/syftbox/client/utils/error_reporting.py b/syftbox/client/utils/error_reporting.py new file mode 100644 index 00000000..26c8e3bc --- /dev/null +++ b/syftbox/client/utils/error_reporting.py @@ -0,0 +1,40 @@ +import datetime +import sys +from platform import platform + +import requests +from pydantic import BaseModel, Field + +import syftbox +from syftbox.lib.lib import ClientConfig + + +class ErrorReport(BaseModel): + client_config: dict + server_syftbox_version: str | None = None + client_syftbox_version: str = syftbox.__version__ + python_version: str = sys.version + platform: str = platform() + timestamp: datetime.datetime = Field( + default_factory=lambda: datetime.datetime.now(datetime.UTC) + ) + + @classmethod + def from_client_config(cls, client_config: ClientConfig): + client_config.token = None + return cls( + client_config=client_config.to_dict(), + server_version=try_get_server_version(client_config.server_url), + ) + + +def make_error_report(client_config: ClientConfig): + return ErrorReport.from_client_config(client_config) + + +def try_get_server_version(server_url): + try: + # do not use the server_client here, as it may not be in bad state + return requests.get(f"{server_url}/info").json()["version"] + except Exception: + return None diff --git a/syftbox/lib/__init__.py b/syftbox/lib/__init__.py index fb1d65a3..8244e831 100644 --- a/syftbox/lib/__init__.py +++ b/syftbox/lib/__init__.py @@ -1,2 +1,4 @@ from syftbox import __version__ from syftbox.lib.lib import * # noqa: F403 + +from .logger import logger diff --git a/syftbox/lib/lib.py b/syftbox/lib/lib.py index 4466e6c2..335190f1 100644 --- a/syftbox/lib/lib.py +++ b/syftbox/lib/lib.py @@ -16,6 +16,7 @@ import httpx import requests +from loguru import logger from typing_extensions import Self from syftbox.client.utils import macos @@ -31,7 +32,9 @@ DEFAULT_PORT = 8082 ICON_FOLDER = ASSETS_FOLDER / "icon" DEFAULT_SYNC_FOLDER = os.path.expanduser("~/Desktop/SyftBox") -DEFAULT_CONFIG_PATH = os.path.expanduser("~/.syftbox/client_config.json") +DEFAULT_CONFIG_FOLDER = os.path.expanduser("~/.syftbox") +DEFAULT_CONFIG_PATH = os.path.join(DEFAULT_CONFIG_FOLDER, "client_config.json") +DEFAULT_LOGS_PATH = os.path.join(DEFAULT_CONFIG_FOLDER, "logs", "syftbox.log") USER_GROUP_GLOBAL = "GLOBAL" @@ -93,7 +96,7 @@ def load(cls, filepath: str) -> Self: return cls(**d) except Exception as e: raise e - print(f"Unable to load jsonable file: {filepath}. {e}") + logger.info(f"Unable to load jsonable file: {filepath}. {e}") return None def save(self, filepath: str) -> None: @@ -482,7 +485,7 @@ def autocache( return file_path return download_file(url, file_path) except Exception as e: - print(f"Failed to autocache: {url}. {e}") + logger.info(f"Failed to autocache: {url}. {e}") return None @@ -491,7 +494,7 @@ def download_file(url: str, full_path: str | Path) -> Path | None: if not full_path.exists(): r = requests.get(url, allow_redirects=True, verify=verify_tls()) # nosec if not r.ok: - print(f"Got {r.status_code} trying to download {url}") + logger.info(f"Got {r.status_code} trying to download {url}") return None full_path.parent.mkdir(parents=True, exist_ok=True) full_path.write_bytes(r.content) @@ -575,7 +578,7 @@ def get_datasites(self: str) -> list[str]: def use(self): os.environ["SYFTBOX_CURRENT_CLIENT"] = self.config_path os.environ["SYFTBOX_SYNC_DIR"] = self.sync_folder - print(f"> Setting Sync Dir to: {self.sync_folder}") + logger.info(f"> Setting Sync Dir to: {self.sync_folder}") def create_folder(self, path: str, permission: SyftPermission): os.makedirs(path, exist_ok=True) diff --git a/syftbox/lib/logger.py b/syftbox/lib/logger.py new file mode 100644 index 00000000..3c6ab16c --- /dev/null +++ b/syftbox/lib/logger.py @@ -0,0 +1,20 @@ +from pathlib import Path +from shutil import make_archive + +from loguru import logger + +from syftbox.lib.lib import DEFAULT_LOGS_PATH + +# Configure Loguru to write logs to a file with rotation +logger.add( + DEFAULT_LOGS_PATH, + rotation="100 MB", # Rotate after the log file reaches 100 MB + retention=2, # Keep only the last 1 log files + compression="zip", # Usually, 10x reduction in file size +) + + +def zip_logs(output_path): + logger.info("Compressing logs folder") + logs_folder = Path(DEFAULT_LOGS_PATH).parent + return make_archive(output_path, "zip", logs_folder) diff --git a/syftbox/main.py b/syftbox/main.py index 3c606db2..8b2de103 100644 --- a/syftbox/main.py +++ b/syftbox/main.py @@ -1,6 +1,8 @@ import argparse import sys +from loguru import logger + from syftbox import __version__ from syftbox.app.manager import list_app from syftbox.app.manager import main as app_manager_main @@ -61,9 +63,9 @@ def print_debug(): if key.startswith("SYFT") }, } - print(yaml.dump(debug_info, default_flow_style=False)) + logger.info(yaml.dump(debug_info, default_flow_style=False)) except Exception as e: - print(e) + logger.info(e) def main(): @@ -107,9 +109,9 @@ def main(): sys.argv = [sys.argv[0]] + remaining_args app_manager_main(app_parser, remaining_args) elif args.command == "version": - print(f"{__version__}") + logger.info(f"{__version__}") elif args.command == "debug": - print_debug() + logger.info_debug() else: parser.print_help() diff --git a/syftbox/server/models.py b/syftbox/server/models.py index 8fc89726..ab0eff3b 100644 --- a/syftbox/server/models.py +++ b/syftbox/server/models.py @@ -3,6 +3,7 @@ from enum import Enum from typing import Optional +from loguru import logger from pydantic import BaseModel from typing_extensions import Self @@ -120,7 +121,7 @@ def delete(self) -> bool: except Exception as e: if "No such file" in str(e): return True - print(f"Failed to delete file at {self.full_path}. {e}") + logger.info(f"Failed to delete file at {self.full_path}. {e}") return False def write_to(self, data: bytes, path: str) -> bool: @@ -136,7 +137,8 @@ def write_to(self, data: bytes, path: str) -> bool: ) return True except Exception as e: - print("failed to write", path, e) + logger.error(f"failed to write to {path}.") + logger.exception(e) return False diff --git a/syftbox/server/server.py b/syftbox/server/server.py index 59956792..019d8853 100644 --- a/syftbox/server/server.py +++ b/syftbox/server/server.py @@ -4,7 +4,6 @@ import os import random import sys -import traceback from dataclasses import dataclass from datetime import datetime from pathlib import Path @@ -20,6 +19,7 @@ RedirectResponse, ) from jinja2 import Template +from loguru import logger from typing_extensions import Any from syftbox import __version__ @@ -56,7 +56,7 @@ def load_list(cls, filepath: str) -> list[Any]: ds.append(cls(**di)) return ds except Exception as e: - print(f"Unable to load list file: {filepath}. {e}") + logger.info(f"Unable to load list file: {filepath}. {e}") return None @@ -78,7 +78,7 @@ def load_dict(cls, filepath: str) -> list[Any]: dicts[key] = cls(**value) return dicts except Exception as e: - print(f"Unable to load dict file: {filepath}. {e}") + logger.info(f"Unable to load dict file: {filepath}. {e}") return None @@ -150,25 +150,25 @@ def create_folders(folders: list[str]) -> None: @contextlib.asynccontextmanager async def lifespan(app: FastAPI, settings: ServerSettings | None = None): # Startup - print("> Starting Server") + logger.info("> Starting Server") if settings is None: settings = ServerSettings() - print(settings) + logger.info(settings) - print("> Creating Folders") + logger.info("> Creating Folders") create_folders(settings.folders) users = Users(path=settings.user_file_path) - print("> Loading Users") - print(users) + logger.info("> Loading Users") + logger.info(users) yield { "server_settings": settings, "users": users, } - print("> Shutting down server") + logger.info("> Shutting down server") app = FastAPI(lifespan=lifespan) @@ -320,7 +320,7 @@ async def register(request: Request, users: Users = Depends(get_users)): data = await request.json() email = data["email"] token = users.create_user(email) - print(f"> {email} registering: {token}") + logger.info(f"> {email} registering: {token}") return JSONResponse({"status": "success", "token": token}, status_code=200) @@ -349,16 +349,18 @@ async def write( if change.hash_equal_or_none(): result = change.delete() else: - print(f"> 🔥 {change.kind} hash doesnt match so ignore {change}") + logger.info( + f"> 🔥 {change.kind} hash doesnt match so ignore {change}" + ) accepted = False else: raise Exception(f"Unknown type of change kind. {change.kind}") else: - print(f"> 🔥 {change.kind} is older so ignore {change}") + logger.info(f"> 🔥 {change.kind} is older so ignore {change}") accepted = False if result: - print(f"> {email} {change.kind}: {change.internal_path}") + logger.info(f"> {email} {change.kind}: {change.internal_path}") return WriteResponse( status="success", change=change, @@ -370,7 +372,7 @@ async def write( accepted=accepted, ), 400 except Exception as e: - print("Exception writing", e) + logger.info("Exception writing", e) raise HTTPException( status_code=400, detail=f"Exception writing {e}", @@ -384,12 +386,15 @@ async def read( email = request.email change = request.change change.sync_folder = os.path.abspath(str(server_settings.snapshot_folder)) - print(f"> {email} {change.kind}: {change.internal_path}") + logger.info(f"> {email} {change.kind}: {change.internal_path}") # TODO: handle permissions, create and delete + data = None + if change.kind_write and not change.is_directory(): + data = bintostr(change.read()) return ReadResponse( status="success", change=change.model_dump(mode="json"), - data=bintostr(change.read()) if change.kind_write else None, + data=data, is_directory=change.is_directory(), ) @@ -421,10 +426,7 @@ async def dir_state( ) raise HTTPException(status_code=400, detail={"status": "error"}) except Exception as e: - # TODO dir_state can fail in hash_dir os.path.join - # example: if sub_path is absolute, os.path.join will return sub_path and not snapshot_folder - traceback.print_exc() - print("Failed to run /dir_state", e) + logger.exception("Failed to run /dir_state", e) @app.get("/list_datasites", response_model=ListDatasitesResponse) diff --git a/tests/server/endpoint_test.py b/tests/server/endpoint_test.py index dbd68f29..bc86cd6e 100644 --- a/tests/server/endpoint_test.py +++ b/tests/server/endpoint_test.py @@ -98,6 +98,21 @@ def test_read_file(client: TestClient): response.raise_for_status() +def test_read_folder(client: TestClient): + change = { + "kind": "write", + "parent_path": TEST_DATASITE_NAME, + "sub_path": ".", + "file_hash": "some_hash", + "last_modified": time.time(), + } + response = client.post( + "/read", json={"email": TEST_DATASITE_NAME, "change": change} + ) + + response.raise_for_status() + + def test_dir_state(client: TestClient): response = client.post( "/dir_state", json={"email": TEST_DATASITE_NAME, "sub_path": "."} diff --git a/uv.lock b/uv.lock index 9f6c4918..a025223e 100644 --- a/uv.lock +++ b/uv.lock @@ -555,6 +555,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 }, ] +[[package]] +name = "loguru" +version = "0.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "win32-setctime", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/30/d87a423766b24db416a46e9335b9602b054a72b96a88a241f2b09b560fa8/loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac", size = 145103 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/0a/4f6fed21aa246c6b49b561ca55facacc2a44b87d65b8b92362a8e99ba202/loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb", size = 62549 }, +] + [[package]] name = "markdown" version = "3.7" @@ -812,8 +825,6 @@ version = "6.0.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/18/c7/8c6872f7372eb6a6b2e4708b88419fb46b857f7a2e1892966b851cc79fc9/psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2", size = 508067 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/66/78c9c3020f573c58101dc43a44f6855d01bbbd747e24da2f0c4491200ea3/psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35", size = 249766 }, - { url = "https://files.pythonhosted.org/packages/e1/3f/2403aa9558bea4d3854b0e5e567bc3dd8e9fbc1fc4453c0aa9aafeb75467/psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1", size = 253024 }, { url = "https://files.pythonhosted.org/packages/0b/37/f8da2fbd29690b3557cca414c1949f92162981920699cd62095a984983bf/psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0", size = 250961 }, { url = "https://files.pythonhosted.org/packages/35/56/72f86175e81c656a01c4401cd3b1c923f891b31fbcebe98985894176d7c9/psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0", size = 287478 }, { url = "https://files.pythonhosted.org/packages/19/74/f59e7e0d392bc1070e9a70e2f9190d652487ac115bb16e2eff6b22ad1d24/psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd", size = 290455 }, @@ -1187,6 +1198,7 @@ dependencies = [ { name = "fastapi" }, { name = "httpx" }, { name = "jinja2" }, + { name = "loguru" }, { name = "markdown" }, { name = "pandas" }, { name = "postmarker" }, @@ -1219,6 +1231,7 @@ requires-dist = [ { name = "fastapi", specifier = ">=0.114.0" }, { name = "httpx", specifier = ">=0.27.2" }, { name = "jinja2", specifier = ">=3.1.4" }, + { name = "loguru", specifier = ">=0.7.2" }, { name = "markdown", specifier = ">=3.7" }, { name = "pandas", specifier = ">=2.2.2" }, { name = "postmarker", specifier = ">=1.0" }, @@ -1357,6 +1370,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/38/b8/0aa69337651b3005f161f7f494e59188a1d8d94171666900d26d29d10f69/watchdog-5.0.3-py3-none-win_ia64.whl", hash = "sha256:49f4d36cb315c25ea0d946e018c01bb028048023b9e103d3d3943f58e109dd45", size = 79324 }, ] +[[package]] +name = "win32-setctime" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/dd/f95a13d2b235a28d613ba23ebad55191514550debb968b46aab99f2e3a30/win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2", size = 3676 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/e6/a7d828fef907843b2a5773ebff47fb79ac0c1c88d60c0ca9530ee941e248/win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad", size = 3604 }, +] + [[package]] name = "zipp" version = "3.20.2"