Skip to content

Commit

Permalink
Merge pull request #10 from madhavajay/madhava/fix_perm_tree_Bugs
Browse files Browse the repository at this point in the history
Added database for jobs
madhavajay authored Sep 16, 2024
2 parents 401a22b + 692d065 commit 8374a2c
Showing 7 changed files with 187 additions and 25 deletions.
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -169,4 +169,6 @@ data/**
users/**
dist
syftbox.egg-info
keys/**
keys/**
scheduler.lock
jobs.sqlite
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -12,6 +12,7 @@ dependencies = [
"apscheduler>=3.10.4",
"jinja2>=3.1.4",
"typing-extensions>=4.12.2",
"sqlalchemy>=2.0.34",
]

[build-system]
61 changes: 46 additions & 15 deletions syftbox/client/client.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
import argparse
import atexit
import importlib
import os
import subprocess
import sys
import time
import traceback
import types
from dataclasses import dataclass
from pathlib import Path

import uvicorn
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.schedulers.background import BackgroundScheduler
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
@@ -260,6 +263,7 @@ def run_plugin(plugin_name):
module = app.loaded_plugins[plugin_name].module
module.run(app.shared_state)
except Exception as e:
traceback.print_exc()
print(e)


@@ -278,19 +282,25 @@ def start_plugin(plugin_name: str):

try:
plugin = app.loaded_plugins[plugin_name]
job = app.scheduler.add_job(
func=run_plugin,
trigger="interval",
seconds=plugin.schedule / 1000,
id=plugin_name,
args=[plugin_name],
)
app.running_plugins[plugin_name] = {
"job": job,
"start_time": time.time(),
"schedule": plugin.schedule,
}
return {"message": f"Plugin {plugin_name} started successfully"}

existing_job = app.scheduler.get_job(plugin_name)
if existing_job is None:
job = app.scheduler.add_job(
func=run_plugin,
trigger="interval",
seconds=plugin.schedule / 1000,
id=plugin_name,
args=[plugin_name],
)
app.running_plugins[plugin_name] = {
"job": job,
"start_time": time.time(),
"schedule": plugin.schedule,
}
return {"message": f"Plugin {plugin_name} started successfully"}
else:
print(f"Job {existing_job}, already added")
return {"message": f"Plugin {plugin_name} already started"}
except Exception as e:
raise HTTPException(
status_code=500,
@@ -316,20 +326,33 @@ def parse_args():
return parser.parse_args()


JOB_FILE = "jobs.sqlite"


async def lifespan(app: FastAPI):
# Startup
print("> Starting Client")
args = parse_args()
client_config = load_or_create_config(args)
app.shared_state = initialize_shared_state(client_config)

scheduler = BackgroundScheduler()
# Clear the lock file on the first run if it exists
if os.path.exists(JOB_FILE):
os.remove(JOB_FILE)
print(f"> Cleared existing job file: {JOB_FILE}")

# Start the scheduler
jobstores = {"default": SQLAlchemyJobStore(url=f"sqlite:///{JOB_FILE}")}
scheduler = BackgroundScheduler(jobstores=jobstores)
scheduler.start()
app.scheduler = scheduler
atexit.register(stop_scheduler)

app.running_plugins = {}
app.loaded_plugins = load_plugins(client_config)

autorun_plugins = ["init", "sync", "create_datasite", "watch_and_run"]
# autorun_plugins = ["init", "sync", "create_datasite", "watch_and_run"]
autorun_plugins = ["init", "sync", "create_datasite"]
for plugin in autorun_plugins:
start_plugin(plugin)

@@ -339,6 +362,13 @@ async def lifespan(app: FastAPI):
print("Shutting down...")


def stop_scheduler():
# Remove the lock file if it exists
if os.path.exists(JOB_FILE):
os.remove(JOB_FILE)
print("Scheduler stopped and lock file removed.")


app = FastAPI(lifespan=lifespan)

app.mount("/static", StaticFiles(directory=current_dir / "static"), name="static")
@@ -504,6 +534,7 @@ def main() -> None:
port=client_config.port,
log_level="debug" if debug else "info",
reload=debug, # Enable hot reloading only in debug mode
reload_dirs="./syftbox",
)


8 changes: 2 additions & 6 deletions syftbox/client/plugins/create_datasite.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging
import os

from syftbox.lib import USER_GROUP_GLOBAL, SyftPermission, perm_file_path
from syftbox.lib import SyftPermission, perm_file_path

logger = logging.getLogger(__name__)

@@ -20,11 +20,7 @@ def claim_datasite(client_config):
else:
print(f"> {client_config.email} Creating Datasite + Permfile")
try:
perm_file = SyftPermission(
admin=[client_config.email],
read=[client_config.email, USER_GROUP_GLOBAL],
write=[client_config.email],
)
perm_file = SyftPermission.datasite_default(client_config.email)
perm_file.save(file_path)
except Exception as e:
print("Failed to create perm file", e)
4 changes: 4 additions & 0 deletions syftbox/client/plugins/sync.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import os
import traceback
from threading import Event

import requests
@@ -432,16 +433,19 @@ def run(shared_state):
try:
create_datasites(shared_state.client_config)
except Exception as e:
traceback.print_exc()
print("failed to get_datasites", e)

try:
num_changes += sync_up(shared_state.client_config)
except Exception as e:
traceback.print_exc()
print("failed to sync up", e)

try:
num_changes += sync_down(shared_state.client_config)
except Exception as e:
traceback.print_exc()
print("failed to sync down", e)
if num_changes == 0:
print("✅ Synced")
51 changes: 48 additions & 3 deletions syftbox/lib/lib.py
Original file line number Diff line number Diff line change
@@ -79,6 +79,18 @@ class SyftPermission(Jsonable):
write: list[str]
filepath: str | None = None

@classmethod
def datasite_default(cls, email: str) -> Self:
return SyftPermission(
admin=[email],
read=[email],
write=[email],
)

@classmethod
def no_permission(self) -> Self:
return SyftPermission(admin=[], read=[], write=[])

def __repr__(self) -> str:
string = "SyftPermission:\n"
string += f"{self.filepath}\n"
@@ -244,10 +256,28 @@ def get_datasites(sync_folder: str) -> list[str]:
return datasites


def build_tree_string(paths_dict, prefix=""):
lines = []
items = list(paths_dict.items())

for index, (key, value) in enumerate(items):
# Determine if it's the last item in the current directory level
connector = "└── " if index == len(items) - 1 else "├── "
lines.append(f"{prefix}{connector}{repr(key)}")

# Prepare the prefix for the next level
if isinstance(value, dict):
extension = " " if index == len(items) - 1 else "│ "
lines.append(build_tree_string(value, prefix + extension))

return "\n".join(lines)


@dataclass
class PermissionTree(Jsonable):
tree: dict[str, SyftPermission]
parent_path: str
root_perm: SyftPermission | None

@classmethod
def from_path(cls, parent_path) -> Self:
@@ -258,12 +288,24 @@ def from_path(cls, parent_path) -> Self:
path = os.path.join(root, file)
perm_dict[path] = SyftPermission.load(path)

return PermissionTree(tree=perm_dict, parent_path=parent_path)
root_perm = None
root_perm_path = perm_file_path(parent_path)
if root_perm_path in perm_dict:
root_perm = perm_dict[root_perm_path]

return PermissionTree(
root_perm=root_perm, tree=perm_dict, parent_path=parent_path
)

@property
def root_or_default(self) -> SyftPermission:
if self.root_perm:
return self.root_perm
return SyftPermission.no_permission()

def permission_for_path(self, path: str) -> SyftPermission:
parent_path = os.path.normpath(self.parent_path)
top_perm_file = perm_file_path(parent_path)
current_perm = self.tree[top_perm_file]
current_perm = self.root_or_default

# default
if parent_path not in path:
@@ -285,6 +327,9 @@ def permission_for_path(self, path: str) -> SyftPermission:

return current_perm

def __repr__(self) -> str:
return f"PermissionTree: {self.parent_path}\n" + build_tree_string(self.tree)


def filter_read_state(user_email: str, dir_state: DirState, perm_tree: PermissionTree):
filtered_tree = {}
83 changes: 83 additions & 0 deletions uv.lock

Large diffs are not rendered by default.

0 comments on commit 8374a2c

Please sign in to comment.