Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update dependencies config, ops improvements, fixes #21 #36

Merged
merged 6 commits into from
Nov 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,16 @@ updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
interval: "weekly"
target-branch: "develop"
assignees:
- "kerberizer"

# Maintain dependencies for Python
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"
target-branch: "develop"
assignees:
- "kerberizer"
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ on: # yamllint disable-line rule:truthy
workflow_dispatch:

env:
POETRY_VERSION: 1.8.3
POETRY_VERSION: 1.8.4
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}

Expand Down Expand Up @@ -113,7 +113,7 @@ jobs:
if: github.event_name != 'pull_request'
uses: sigstore/[email protected]
with:
cosign-release: 'v2.4.0'
cosign-release: 'v2.4.1'

# Workaround: https://github.com/docker/build-push-action/issues/461
- name: Setup Docker buildx
Expand Down
6 changes: 0 additions & 6 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -1,6 +0,0 @@
[submodule "pynanomapper"]
path = extern/pynanomapper
url = https://github.com/ideaconsult/pynanomapper.git
[submodule "ramanchada2"]
path = extern/ramanchada2
url = https://github.com/h2020charisma/ramanchada2.git
13 changes: 1 addition & 12 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,9 @@ FROM python:3.11-slim AS requirements-stage

WORKDIR /tmp

RUN pip install poetry

COPY ./pyproject.toml ./poetry.lock* /tmp/
COPY ./extern/pynanomapper /tmp/extern/pynanomapper
COPY ./extern/ramanchada2 /tmp/extern/ramanchada2

RUN pip install poetry
RUN poetry export -f requirements.txt --output requirements.txt --without=dev --without-hashes


Expand All @@ -18,14 +15,6 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /var/lib/apt/lists/*

COPY --from=requirements-stage /tmp/requirements.txt /app/requirements.txt
COPY ./extern/pynanomapper /tmp/extern/pynanomapper
COPY ./extern/ramanchada2 /tmp/extern/ramanchada2

RUN sed -i 's/^-e //' /app/requirements.txt

# FIXME: 809de9f workaround introduced discrepancy between poetry.lock and this installation.
# This is another "fix" that'll come back to bite us until we fix the whole dependency thing.
RUN sed -i '/^pyambit/d' /tmp/extern/pynanomapper/pyproject.toml

RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt

Expand Down
1 change: 0 additions & 1 deletion extern/pynanomapper
Submodule pynanomapper deleted from f9e9e3
1 change: 0 additions & 1 deletion extern/ramanchada2
Submodule ramanchada2 deleted from d7b4e2
2,114 changes: 1,066 additions & 1,048 deletions poetry.lock

Large diffs are not rendered by default.

7 changes: 4 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,15 @@ python = ">=3.10,<3.13"
apscheduler = "^3.10.1"
fastapi = "^0.112.1"
h5grove = "^2.0.0"
h5pyd = { git = "https://github.com/HDFGroup/h5pyd.git" }
h5pyd = { git = "https://github.com/HDFGroup/h5pyd.git", rev = "8d1c87ea9b6d4b7691b75ef6e372bc0ece141a76" }
numcompress = "^0.1.2"
openpyxl = "^3.1.2"
pyambit = "^0.0.1"
pydantic-settings = "^2.4.0"
pynanomapper = { path = "extern/pynanomapper", develop = true }
pynanomapper = "^2.0.1"
python-keycloak = "^4.3.0"
python-multipart = "^0.0.6"
ramanchada2 = { path = "extern/ramanchada2", develop = true }
ramanchada2 = "^1.2.0"
uvicorn = "^0.27.1"

[tool.poetry.urls]
Expand Down
46 changes: 11 additions & 35 deletions src/rcapi/services/process_service.py
Original file line number Diff line number Diff line change
@@ -1,45 +1,21 @@
import os
import uuid
import time
import shutil
from rcapi.models.models import Task # Import your data models
import h5py
from ..config.app_config import load_config
from pyambit.nexus_parser import SpectrumParser

config = load_config()
async def process(task : Task,process_config : dict, nexus_dataset_url: str,base_url: str):
try:
process_class = process_config["class"]
process_class.process(task,nexus_dataset_url,base_url)

task.status = "Completed"
except (ImportError, AttributeError) as e:
task.status = "Error"
task.error = f"Failed to load plugin or class: {e}"
except Exception as e:
task.status = "Error"
task.error = f"{e}"
task.completed=int(time.time() * 1000)
config = load_config()
UPLOAD_DIR = config.upload_dir
os.makedirs(UPLOAD_DIR, exist_ok=True)

async def process_new(task : Task,nexus_dataset_url: str,base_url: str):
open_dataset(nexus_dataset_url,base_url)
task.status = "Completed"
task.completed=int(time.time() * 1000)


def open_dataset(nexus_dataset_url: str,base_url: str):
if nexus_dataset_url.startswith(base_url):
uuid = nexus_dataset_url.split("/")[-1]
spectrum_parser = SpectrumParser()
spectrum_parser.parse(os.path.join(UPLOAD_DIR,f"{uuid}.nxs"))
# Access the spectrum data
for key in spectrum_parser.parsed_objects:
spe = spectrum_parser.parsed_objects[key]
print("Spectrum data", key, spe)
#spe.plot()

else:
pass
async def process(task: Task, process_config: dict,
nexus_dataset_url: str, base_url: str):
task.status = "Error"
task.error = "Not implemented"
task.completed = int(time.time() * 1000)


async def process_new(task: Task, nexus_dataset_url: str, base_url: str):
task.status = "Error"
task.completed = int(time.time() * 1000)
37 changes: 6 additions & 31 deletions src/rcapi/services/processing_spectra.py
Original file line number Diff line number Diff line change
@@ -1,45 +1,20 @@
import os
from rcapi.config.app_config import initialize_dirs
from rcapi.models.models import Task
from pyambit.nexus_parser import SpectrumParser
from pyambit.nexus_spectra import peaks2nxdata

#from ramanchada2.protocols.calibration import CalibrationModel

config, UPLOAD_DIR, NEXUS_DIR, TEMPLATE_DIR = initialize_dirs()

def open_dataset(nexus_dataset_url: str,base_url: str):
print(nexus_dataset_url,base_url)
if nexus_dataset_url.startswith(base_url):
uuid = nexus_dataset_url.split("/")[-1]
spectrum_parser = SpectrumParser()
spectrum_parser.parse(os.path.join(NEXUS_DIR,f"{uuid}.nxs"))
return spectrum_parser
else:
return None

class ProcessMock:
def process(task : Task,nexus_dataset_url: str,base_url: str):
spectrum_parser : SpectrumParser = open_dataset(nexus_dataset_url,base_url)
for key in spectrum_parser.parsed_objects:
spe = spectrum_parser.parsed_objects[key]
print("Spectrum data", key, spe)
#spe.plot()
def process(task: Task, nexus_dataset_url: str, base_url: str):
pass


class ProcessCalibrate:
def process(task : Task,nexus_dataset_url: str,base_url: str):
#calmodel = CalibrationModel(laser_wl)
#calmodel.derive_model_x(spe_neon,spe_neon_units="cm-1",ref_neon=None,ref_neon_units="nm",spe_sil=None,spe_sil_units="cm-1",ref_sil=None,ref_sil_units="cm-1")
def process(task: Task, nexus_dataset_url: str, base_url: str):
pass


class ProcessFindPeak:
def process(task : Task,nexus_dataset_url: str,base_url: str):
spectrum_parser : SpectrumParser = open_dataset(nexus_dataset_url,base_url)
for key in spectrum_parser.parsed_objects:
spe = spectrum_parser.parsed_objects[key]
print("Spectrum data", key, spe)
peak_candidates = spe.find_peak_multipeak(sharpening='hht', strategy='topo')
fitres = spe.fit_peak_multimodel(profile='Moffat', candidates=peak_candidates, no_fit=True)
print(fitres.to_dataframe_peaks())
def process(task: Task, nexus_dataset_url: str, base_url: str):
pass