From 4094cc9e94b72a95b41bb3ec262eb135773dc4a0 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Wed, 21 Aug 2024 16:42:25 -0400 Subject: [PATCH 01/29] Add ILAB / Crucible support to CPT backend GET localhost:8000/api/v1/ilab/runs?benchmark=ilab will query the ilab.crucible OpenSearch instance and return a list of ilab benchmark runs. --- README.md | 7 +- backend/Pipfile.lock | 20 + backend/app/api/api.py | 5 + backend/app/api/v1/endpoints/ilab/ilab.py | 716 ++++++++ backend/app/main.py | 74 +- backend/app/services/crucible_svc.py | 1608 +++++++++++++++++ backend/poetry.lock | 1141 ++++++------ backend/pyproject.toml | 3 +- backend/scripts/start-reload.sh | 4 +- backend/skeleton.toml | 5 + frontend/README.md | 2 +- frontend/src/App.js | 58 - frontend/src/App.jsx | 2 + frontend/src/actions/filterActions.js | 3 + frontend/src/actions/ilabActions.js | 98 + frontend/src/actions/types.js | 4 + .../src/assets/constants/SidemenuConstants.js | 1 + .../src/components/atoms/PlotGraph/index.jsx | 1 - .../molecules/ExpandedRow/index.jsx | 2 +- .../molecules/SideMenuOptions/index.jsx | 5 + .../organisms/TableFilters/index.jsx | 3 +- .../src/components/templates/ILab/index.jsx | 202 +++ frontend/src/reducers/ilabReducer.js | 30 + frontend/src/reducers/index.js | 2 + frontend/src/store/reducers/InitialData.js | 181 -- frontend/src/store/reducers/index.js | 18 - frontend/src/utils/apiConstants.js | 5 +- frontend/src/utils/routeConstants.js | 1 + local-compose.sh | 4 +- 29 files changed, 3397 insertions(+), 808 deletions(-) create mode 100644 backend/Pipfile.lock create mode 100644 backend/app/api/v1/endpoints/ilab/ilab.py create mode 100644 backend/app/services/crucible_svc.py delete mode 100644 frontend/src/App.js create mode 100644 frontend/src/actions/ilabActions.js create mode 100644 frontend/src/components/templates/ILab/index.jsx create mode 100644 frontend/src/reducers/ilabReducer.js delete mode 100644 frontend/src/store/reducers/InitialData.js delete mode 100644 frontend/src/store/reducers/index.js diff --git a/README.md b/README.md index 540ebdda..43b3ca21 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,11 @@ indice= username= password= +[.crucible] +url= +username= +password= + [ocp-server] port=8000 @@ -53,7 +58,7 @@ indice= username= password= ``` -**Note: The below applies only for the elastic search at the moment** +**Note: The below applies only for the elastic search at the moment** If you also have an archived internal instance that keeps track of older data, it can be specified with '.internal' suffix. Example of our `OCP` internal archived instance's configuration. ```toml [ocp.elasticsearch.internal] diff --git a/backend/Pipfile.lock b/backend/Pipfile.lock new file mode 100644 index 00000000..eb6410cb --- /dev/null +++ b/backend/Pipfile.lock @@ -0,0 +1,20 @@ +{ + "_meta": { + "hash": { + "sha256": "fedbd2ab7afd84cf16f128af0619749267b62277b4cb6989ef16d4bef6e4eef2" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.10" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": {}, + "develop": {} +} diff --git a/backend/app/api/api.py b/backend/app/api/api.py index b74b8ad4..c37735dc 100644 --- a/backend/app/api/api.py +++ b/backend/app/api/api.py @@ -1,3 +1,4 @@ +import sys from fastapi import APIRouter from app.api.v1.endpoints.ocp import results @@ -11,6 +12,7 @@ from app.api.v1.endpoints.telco import telcoJobs from app.api.v1.endpoints.telco import telcoGraphs from app.api.v1.endpoints.ocm import ocmJobs +from app.api.v1.endpoints.ilab import ilab router = APIRouter() @@ -39,3 +41,6 @@ # OCM endpoint router.include_router(ocmJobs.router, tags=['ocm']) + +# InstructLab endpoint +router.include_router(router=ilab.router, tags=['ilab']) diff --git a/backend/app/api/v1/endpoints/ilab/ilab.py b/backend/app/api/v1/endpoints/ilab/ilab.py new file mode 100644 index 00000000..e20849dc --- /dev/null +++ b/backend/app/api/v1/endpoints/ilab/ilab.py @@ -0,0 +1,716 @@ +from datetime import datetime, timedelta, timezone +from typing import Annotated, Any, Optional + +from app.services.crucible_svc import CrucibleService, Graph, GraphList +from fastapi import APIRouter, Query + +router = APIRouter() + + +CONFIGPATH = "ilab.crucible" + + +def example_response(response) -> dict[str, Any]: + return {"content": {"application/json": {"example": response}}} + + +def example_error(message: str) -> dict[str, Any]: + return example_response({"message": message}) + + +@router.get( + "/api/v1/ilab/runs/filters", + summary="Returns possible filters", + description=( + "Returns a nested JSON object with all parameter and tag filter terms" + ), + responses={ + 200: example_response( + { + "param": { + "model": { + "/home/models/granite-7b-lab/": 26, + "/home/models/Mixtral-8x7B-Instruct-v0.1": 20, + "/home/models/granite-7b-redhat-lab": 11, + }, + "gpus": {"4": 53}, + "workflow": {"sdg": 22, "train": 16, "train+eval": 5}, + "data-path": { + "/home/data/training/jul19-knowledge-26k.jsonl": 16, + "/home/data/training/knowledge_data.jsonl": 13, + "/home/data/jun12-phase05.jsonl": 4, + "/home/data/training/jun12-phase05.jsonl": 4, + }, + "nnodes": {"1": 37}, + "train-until": {"checkpoint:1": 17, "complete": 16}, + "save-samples": {"2500": 11, "10000": 5, "5000": 1}, + "deepspeed-cpu-offload-optimizer": {"1": 13, "0": 2}, + "deepspeed-cpu-offload-optimizer-pin-memory": {"1": 13, "0": 2}, + "batch-size": {"0": 2, "12": 2, "16": 2, "4": 2, "8": 2}, + "cpu-offload-optimizer": {"1": 6}, + "cpu-offload-pin-memory": {"1": 6}, + "nproc-per-node": {"4": 4}, + "num-runavg-samples": {"2": 2, "6": 2}, + "num-cpus": {"30": 2}, + }, + "tag": {"topology": {"none": 21}}, + } + ) + }, +) +async def run_filters(): + crucible = CrucibleService(CONFIGPATH) + return crucible.run_filters() + + +@router.get( + "/api/v1/ilab/runs", + summary="Returns a list of InstructLab runs", + description="Returns a list of runs summary documents.", + responses={ + 200: example_response( + { + "results": [ + { + "benchmark": "ilab", + "email": "rhel-ai-user@example.com", + "id": "bd72561c-cc20-400b-b6f6-d9534a60033a", + "name": '"RHEL-AI User"', + "source": "n42-h01-b01-mx750c.example.com//var/lib/crucible/run/ilab--2024-09-11_19:43:53_UTC--bd72561c-cc20-400b-b6f6-d9534a60033a", + "status": "pass", + "begin_date": "1970-01-01 00:00:00+00:00", + "end_date": "1970-01-01 00:00:00+00:00", + "params": { + "gpus": "4", + "model": "/home/models/Mixtral-8x7B-Instruct-v0.1", + "workflow": "sdg", + }, + "iterations": [ + { + "iteration": 1, + "primary_metric": "ilab::sdg-samples-sec", + "primary_period": "measurement", + "status": "pass", + "params": { + "gpus": "4", + "model": "/home/models/Mixtral-8x7B-Instruct-v0.1", + "workflow": "sdg", + }, + } + ], + "primary_metrics": ["ilab::sdg-samples-sec"], + "tags": {"topology": "none"}, + } + ], + "count": 5, + "total": 21, + "startDate": "2024-08-19 20:42:52.239000+00:00", + "endDate": "2024-09-18 20:42:52.239000+00:00", + } + ), + 400: example_error( + "sort key 'bad' must be one of begin,benchmark,email,end,id,name,source,status" + ), + 422: example_error( + "invalid date format, start_date must be less than end_date" + ), + }, +) +async def runs( + start_date: Annotated[ + Optional[str], + Query(description="Start time for search", examples=["2020-11-10"]), + ] = None, + end_date: Annotated[ + Optional[str], + Query(description="End time for search", examples=["2020-11-10"]), + ] = None, + filter: Annotated[ + Optional[list[str]], + Query( + description="Filter terms", examples=["tag:name=value", "param:name=value"] + ), + ] = None, + sort: Annotated[ + Optional[list[str]], + Query(description="Sort terms", examples=["start:asc", "status:desc"]), + ] = None, + size: Annotated[ + Optional[int], Query(description="Number of runs in a page", examples=[10]) + ] = None, + offset: Annotated[ + Optional[int], + Query(description="Page offset to start", examples=[10]), + ] = None, +): + crucible = CrucibleService(CONFIGPATH) + if start_date is None or end_date is None: + now = datetime.now(timezone.utc) + start = now - timedelta(days=30) if start_date is None else start_date + end = now if end_date is None else end_date + results: dict[str, Any] = crucible.runs( + start=start, end=end, filter=filter, sort=sort, size=size, offset=offset + ) + return results + + +@router.get( + "/api/v1/ilab/runs/{run}/tags", + summary="Returns the Crucible tags for a run", + description="Returns tags for a specified Run ID.", + responses={ + 200: example_response({"topology": "none"}), + 400: example_error("Parameter error"), + }, +) +async def tags(run: str): + crucible = CrucibleService(CONFIGPATH) + return crucible.tags(run) + + +@router.get( + "/api/v1/ilab/runs/{run}/params", + summary="Returns the InstructLab parameters for a run", + description="Returns params for a specified Run ID by iteration plus common params.", + responses={ + 200: example_response( + { + "9D5AB7D6-510A-11EF-84ED-CCA69E6B5B5B": { + "num-runavg-samples": "2", + "cpu-offload-pin-memory": "1", + "nnodes": "1", + "cpu-offload-optimizer": "1", + "data-path": "/home/data/training/knowledge_data.jsonl", + "model": "/home/models/granite-7b-lab/", + "nproc-per-node": "4", + }, + "common": { + "num-runavg-samples": "2", + "cpu-offload-pin-memory": "1", + "nnodes": "1", + "cpu-offload-optimizer": "1", + "data-path": "/home/data/training/knowledge_data.jsonl", + "model": "/home/models/granite-7b-lab/", + "nproc-per-node": "4", + }, + } + ), + 400: example_error("Parameter error"), + }, +) +async def params(run: str): + crucible = CrucibleService(CONFIGPATH) + return crucible.params(run) + + +@router.get( + "/api/v1/ilab/runs/{run}/iterations", + summary="Returns a list of InstructLab run iterations", + description="Returns a list of iterations for a specified Run ID.", + responses={ + 200: example_response( + [ + { + "id": "6B98F650-7139-11EF-BB69-98B53E962BD1", + "num": 2, + "path": None, + "primary-metric": "ilab::sdg-samples-sec", + "primary-period": "measurement", + "status": "pass", + }, + { + "id": "6B99173E-7139-11EF-9434-F8BB3B1B9CFC", + "num": 5, + "path": None, + "primary-metric": "ilab::sdg-samples-sec", + "primary-period": "measurement", + "status": "pass", + }, + ] + ), + 400: example_error("Parameter error"), + }, +) +async def iterations(run: str): + crucible = CrucibleService(CONFIGPATH) + return crucible.iterations(run) + + +@router.get( + "/api/v1/ilab/runs/{run}/samples", + summary="Returns a list of InstructLab run samples", + description="Returns a list of samples for a specified Run ID.", + responses={ + 200: example_response( + [ + { + "id": "6BA5071A-7139-11EF-9864-EA6BC0BEFE10", + "num": "1", + "path": None, + "status": "pass", + }, + { + "id": "6BBE6872-7139-11EF-BFAA-8569A9399D61", + "num": "1", + "path": None, + "status": "pass", + }, + ] + ), + 400: example_error("Parameter error"), + }, +) +async def run_samples(run: str): + crucible = CrucibleService(CONFIGPATH) + return crucible.samples(run) + + +@router.get( + "/api/v1/ilab/runs/{run}/periods", + summary="Returns a list of InstructLab run periods", + description="Returns a list of periods for a specified Run ID.", + responses={ + 200: example_response( + [ + { + "begin": "2024-09-12 17:40:27.982000+00:00", + "end": "2024-09-12 18:03:23.132000+00:00", + "id": "6BA57EF2-7139-11EF-A80B-E5037504B9B1", + "name": "measurement", + }, + { + "begin": "2024-09-12 16:50:19.305000+00:00", + "end": "2024-09-12 17:14:04.475000+00:00", + "id": "6BAD466E-7139-11EF-8E60-927A210BA97E", + "name": "measurement", + }, + ] + ), + 400: example_error("Parameter error"), + }, +) +async def run_periods(run: str): + crucible = CrucibleService(CONFIGPATH) + return crucible.periods(run) + + +@router.get( + "/api/v1/ilab/iterations/{iteration}/samples", + summary="Returns a list of InstructLab iteration samples", + description="Returns a list of iterations for a specified iteration ID.", + responses={ + 200: example_response( + [ + { + "id": "6BB8BD00-7139-11EF-B2B2-942D604C0B7B", + "num": "1", + "path": None, + "status": "pass", + } + ] + ), + 400: example_error("Parameter error"), + }, +) +async def iteration_samples(iteration: str): + crucible = CrucibleService(CONFIGPATH) + return crucible.samples(iteration=iteration) + + +@router.get( + "/api/v1/ilab/runs/{run}/timeline", + summary="Returns the 'timeline' of a run", + description="Describes the sequence of iterations, samples, and periods.", + responses={ + 200: example_response( + { + "run": { + "id": "70d3b53f-c588-49a3-91c2-7fcf3927be7e", + "iterations": [ + { + "id": "BFC16DA6-60C8-11EF-AB10-CF940109872B", + "num": 1, + "path": None, + "primary-metric": "ilab::train-samples-sec", + "primary-period": "measurement", + "status": "pass", + "samples": [ + { + "id": "C021BECC-60C8-11EF-A619-E0BC70D6C320", + "num": "1", + "path": None, + "status": "pass", + "periods": [ + { + "begin": "2024-08-22 19:09:08.642000+00:00", + "end": "2024-08-22 20:04:32.889000+00:00", + "id": "C022CDC6-60C8-11EF-BA80-AFE7B4B2692B", + "name": "measurement", + } + ], + } + ], + } + ], + "begin": "2024-08-22 19:09:08.642000+00:00", + "end": "2024-08-22 20:04:32.889000+00:00", + } + } + ), + 400: example_error("Parameter error"), + }, +) +async def timeline(run: str): + crucible = CrucibleService(CONFIGPATH) + return crucible.timeline(run) + + +@router.get( + "/api/v1/ilab/runs/{run}/metrics", + summary="Describe the metrics collected for a run", + description="Returns metric labels along with breakout names and values.", + responses={ + 200: example_response( + { + "ilab::train-samples-sec": { + "periods": ["C022CDC6-60C8-11EF-BA80-AFE7B4B2692B"], + "breakdowns": { + "benchmark-group": ["unknown"], + "benchmark-name": ["unknown"], + "benchmark-role": ["client"], + "csid": ["1"], + "cstype": ["client"], + "endpoint-label": ["remotehosts-1"], + "engine-id": ["1"], + "engine-role": ["benchmarker"], + "engine-type": ["client"], + "hosted-by": ["nvd-srv-29.nvidia.eng.rdu2.dc.redhat.com"], + "hostname": ["nvd-srv-29.nvidia.eng.rdu2.dc.redhat.com"], + "hypervisor-host": ["none"], + "osruntime": ["podman"], + "tool-name": ["unknown"], + "userenv": ["rhel-ai"], + }, + } + }, + ), + 400: example_error("Parameter error"), + }, +) +async def metrics(run: str): + crucible = CrucibleService(CONFIGPATH) + return crucible.metrics_list(run) + + +@router.get( + "/api/v1/ilab/runs/{run}/breakouts/{metric}", + summary="Returns breakout options for a metric", + description="Describes the breakout names and available values for a run.", + responses={ + 200: example_response( + { + "label": "mpstat::Busy-CPU", + "class": ["throughput"], + "type": "Busy-CPU", + "source": "mpstat", + "breakdowns": {"num": ["8", "72"], "thread": [0, 1]}, + } + ), + 400: example_error("Metric name not found for run "), + }, +) +async def metric_breakouts( + run: str, + metric: str, + name: Annotated[ + Optional[list[str]], + Query( + description="List of name[=key] to match", + examples=["cpu=10", "cpu=10,cpu=110"], + ), + ] = None, + period: Annotated[ + Optional[list[str]], + Query( + description="List of periods to match", + examples=["", ","], + ), + ] = None, +): + crucible = CrucibleService(CONFIGPATH) + return crucible.metric_breakouts(run, metric, names=name, periods=period) + + +@router.get( + "/api/v1/ilab/runs/{run}/data/{metric}", + summary="Returns metric data collected for a run", + description="Returns data collected for a specified Run ID metric.", + responses={ + 200: example_response( + [ + { + "begin": "2024-08-22 20:04:05.072000+00:00", + "end": "2024-08-22 20:04:19.126000+00:00", + "duration": 14.055, + "value": 9.389257233311497, + }, + { + "begin": "2024-08-22 20:04:19.127000+00:00", + "end": "2024-08-22 20:04:32.889000+00:00", + "duration": 13.763, + "value": 9.552584444155011, + }, + ] + ), + 400: example_error("No matches for ilab::train-samples-sc+cpu=10"), + 422: example_response( + response={ + "detail": [ + { + "message": "More than one metric (2) probably means you should add filters", + "names": {"dev": ["sdb", "sdb3"]}, + "periods": [], + } + ] + } + ), + }, +) +async def metric_data( + run: str, + metric: str, + name: Annotated[ + Optional[list[str]], + Query( + description="List of name[=key] to match", + examples=["cpu=10", "cpu=10,cpu=110"], + ), + ] = None, + period: Annotated[ + Optional[list[str]], + Query( + description="List of periods to match", + examples=["", ","], + ), + ] = None, + aggregate: Annotated[ + bool, Query(description="Allow aggregation of metrics") + ] = False, +): + crucible = CrucibleService(CONFIGPATH) + return crucible.metrics_data( + run, metric, names=name, periods=period, aggregate=aggregate + ) + + +@router.get( + "/api/v1/ilab/runs/{run}/summary/{metric}", + summary="Returns metric data collected for a run", + description="Returns data collected for a specified Run ID metric.", + responses={ + 200: example_response( + { + "count": 234, + "min": 7.905045031896648, + "max": 9.666444615077308, + "avg": 9.38298722585416, + "sum": 2195.6190108498736, + } + ), + 400: example_error("No matches for ilab::train-samples-sc+cpu=10"), + 422: example_response( + response={ + "detail": [ + { + "message": "More than one metric (2) probably means you should add filters", + "names": {"dev": ["sdb", "sdb3"]}, + "periods": [], + } + ] + } + ), + }, +) +async def metric_summary( + run: str, + metric: str, + name: Annotated[ + Optional[list[str]], + Query( + description="List of name[=key] to match", + examples=["cpu=10", "cpu=10,cpu=110"], + ), + ] = None, + period: Annotated[ + Optional[list[str]], + Query( + description="List of periods to match", + examples=["", ","], + ), + ] = None, +): + crucible = CrucibleService(CONFIGPATH) + return crucible.metrics_summary(run, metric, names=name, periods=period) + + +@router.post( + "/api/v1/ilab/runs/multigraph", + summary="Returns overlaid Plotly graph objects for a run", + description="Returns metric data in a form usable by the Plot React component.", + responses={ + 200: example_response( + response={ + "iostat::operations-merged-sec": [ + { + "x": [ + "2024-09-05 22:01:52+00:00", + "2024-09-05 21:56:37+00:00", + "2024-09-05 21:56:52+00:00", + ], + "y": [0.0, 0.0, 0.33], + "name": "Metric iostat::operations-merged-sec cmd=read,dev=sdb", + "type": "scatter", + "mode": "markers", + "orientation": "h", + "labels": {"x": "sample timestamp", "y": "samples / second"}, + } + ] + } + ), + 400: example_error("No matches for ilab::train-samples-sec"), + 422: example_response( + response={ + "detail": [ + { + "message": "More than one metric (2) probably means you should add filters", + "names": {"dev": ["sdb", "sdb3"]}, + "periods": [], + } + ] + } + ), + }, +) +async def metric_graph_body(graphs: GraphList): + crucible = CrucibleService(CONFIGPATH) + return crucible.metrics_graph(graphs) + + +@router.get( + "/api/v1/ilab/runs/{run}/graph/{metric}", + summary="Returns a single Plotly graph object for a run", + description="Returns metric data in a form usable by the Plot React component.", + responses={ + 200: example_response( + response={ + "iostat::operations-merged-sec": [ + { + "x": [ + "2024-09-05 22:01:52+00:00", + "2024-09-05 21:56:37+00:00", + "2024-09-05 21:56:52+00:00", + ], + "y": [0.0, 0.0, 0.33], + "name": "Metric iostat::operations-merged-sec cmd=read,dev=sdb", + "type": "scatter", + "mode": "markers", + "orientation": "h", + "labels": {"x": "sample timestamp", "y": "samples / second"}, + } + ] + } + ), + 400: example_error("No matches for ilab::train-samples-sec"), + 422: example_response( + response={ + "detail": [ + { + "message": "More than one metric (2) probably means you should add filters", + "names": {"dev": ["sdb", "sdb3"]}, + "periods": [], + } + ] + } + ), + }, +) +async def metric_graph_param( + run: str, + metric: str, + name: Annotated[ + Optional[list[str]], + Query( + description="List of name[=key] to match", + examples=["cpu=10", "cpu=10,cpu=110"], + ), + ] = None, + period: Annotated[ + Optional[list[str]], + Query( + description="List of periods to match", + examples=["", ","], + ), + ] = None, + aggregate: Annotated[ + bool, Query(description="Allow aggregation of metrics") + ] = False, +): + crucible = CrucibleService(CONFIGPATH) + return crucible.metrics_graph( + GraphList( + run=run, + name=metric, + graphs=[ + Graph(metric=metric, aggregate=aggregate, names=name, periods=period) + ], + ) + ) + + +@router.get( + "/api/v1/ilab/info", + summary="Returns info about the Crucible OpenSearch instance", + description="Returns info about the Crucible OpenSearch instance", + responses={ + 200: example_response( + { + "name": "node.example.com", + "cluster_name": "opensearch", + "cluster_uuid": "YYaHMEjMT9G8z31-R7tJDA", + "version": { + "distribution": "opensearch", + "number": "2.15.0", + "build_type": "rpm", + "build_hash": "61dbcd0795c9bfe9b81e5762175414bc38bbcadf", + "build_date": "2024-06-20T03:27:31.591886152Z", + "build_snapshot": False, + "lucene_version": "9.10.0", + "minimum_wire_compatibility_version": "7.10.0", + "minimum_index_compatibility_version": "7.0.0", + }, + } + ), + }, +) +async def info(): + crucible = CrucibleService(CONFIGPATH) + return crucible.info + + +@router.get( + "/api/v1/ilab/{index}/fields", + summary="Returns a list of Crucible index fields", + description="Returns a list of Crucible index fields.", + responses={ + 200: example_response( + { + "cdm": ["ver"], + "run": ["name", "end", "begin", "email", "benchmark", "source", "id"], + } + ), + 400: example_error("Index name 'foo' doesn't exist"), + }, +) +async def fields(index: str): + crucible = CrucibleService(CONFIGPATH) + return crucible.fields(index=index) diff --git a/backend/app/main.py b/backend/app/main.py index e25f9d10..c68d4c5b 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -1,6 +1,8 @@ +import sys +import traceback import typing -from fastapi import FastAPI, Request +from fastapi import FastAPI, HTTPException, Request from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import JSONResponse import orjson @@ -15,24 +17,44 @@ def render(self, content: typing.Any) -> bytes: return orjson.dumps(content) -origins = [ - "http://localhost:3000", - "localhost:3000" -] - -app = FastAPI(default_response_class=ORJSONResponse, - docs_url="/docs", - redoc_url=None, - title="CPT-Dashboard API Documentation", - version="0.0.1", - contact={ - "name": "OCP PerfScale Jedi", - "url": "https://redhat.enterprise.slack.com/archives/C05CDC19ZKJ", - }, - license_info={ - "name": "Apache 2.0", - "url": "https://www.apache.org/licenses/LICENSE-2.0", - }) +origins = ["http://localhost:3000", "localhost:3000"] + +app = FastAPI( + default_response_class=ORJSONResponse, + docs_url="/docs", + redoc_url=None, + title="CPT-Dashboard API Documentation", + version="0.0.1", + contact={ + "name": "OCP PerfScale Jedi", + "url": "https://redhat.enterprise.slack.com/archives/C05CDC19ZKJ", + }, + license_info={ + "name": "Apache 2.0", + "url": "https://www.apache.org/licenses/LICENSE-2.0", + }, +) + + +@app.middleware("http") +async def report_exceptions(request: Request, call_next): + try: + return await call_next(request) + except Exception as e: + if isinstance(e, HTTPException): + raise + tb = e.__traceback__ + print(f"Unhandled exception {e.__class__.__name__}: {str(e)}") + where = "unknown" + while tb is not None: + where = f"{tb.tb_frame.f_code.co_filename}:{tb.tb_lineno}" + print( + f" {where} {tb.tb_frame.f_code.co_name}", + file=sys.stderr, + ) + tb = tb.tb_next + return JSONResponse(status_code=500, content={"message": f"Unhandled server error at {where}: {str(e)}"}) + app.add_middleware( CORSMiddleware, @@ -42,15 +64,17 @@ def render(self, content: typing.Any) -> bytes: allow_headers=["*"], ) -routes_to_reroute = ['/'] +routes_to_reroute = ["/"] + -@app.middleware('http') +@app.middleware("http") async def some_middleware(request: Request, call_next): if request.url.path in routes_to_reroute: - request.scope['path'] = '/docs' - headers = dict(request.scope['headers']) - headers[b'custom-header'] = b'my custom header' - request.scope['headers'] = [(k, v) for k, v in headers.items()] + request.scope["path"] = "/docs" + headers = dict(request.scope["headers"]) + headers[b"custom-header"] = b"my custom header" + request.scope["headers"] = [(k, v) for k, v in headers.items()] return await call_next(request) + app.include_router(router) diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py new file mode 100644 index 00000000..6a61dadc --- /dev/null +++ b/backend/app/services/crucible_svc.py @@ -0,0 +1,1608 @@ +from dataclasses import dataclass +import sys +from collections import defaultdict +from datetime import datetime, timezone +import time +from typing import Any, Iterator, Optional, Tuple, Union + +from pydantic import BaseModel + +from app import config +from elasticsearch import Elasticsearch, NotFoundError +from fastapi import HTTPException, status + + +class Graph(BaseModel): + metric: str + aggregate: bool = False + names: Optional[list[str]] = None + periods: Optional[list[str]] = None + + +class GraphList(BaseModel): + run: str + name: str + graphs: list[Graph] + + +colors = [ + "black", + "aqua", + "blue", + "fuschia", + "gray", + "green", + "maroon", + "navy", + "olive", + "teal", + "silver", + "lightskyblue", + "mediumspringgreen", + "mistyrose", + "darkgoldenrod", + "cadetblue", + "chocolate", + "coral", + "brown", + "bisque", + "deeppink", + "sienna", +] + + +@dataclass +class Term: + namespace: str + key: str + value: str + + +class Parser: + """Help parsing filter expressions.""" + + def __init__(self, term: str): + """Construct an instance to help parse query parameter expressions + + These consist of a sequence of tokens separated by delimiters. Each + token may be quoted to allow matching against strings with spaces. + + For example, `param:name="A string"` + + Args: + term: A filter expression to parse + """ + self.buffer = term + self.context = term + self.offset = 0 + + def _next_token( + self, delimiters: list[str] = [], optional: bool = False + ) -> Tuple[str, Union[str, None]]: + """Extract the next token from an expression + + Tokens may be quoted; the quotes are removed. for example, the two + expressions `'param':"workflow"='"sdg"'` and `param:workflow:sdg` are + identical. + + Args: + delimiters: a list of delimiter characters + optional: whether the terminating delimiter is optional + + Returns: + A tuple consisting of the token and the delimiter (or None if + parsing reached the end of the expression and the delimiter was + optional) + """ + + @dataclass + class Quote: + open: int + quote: str + + quoted: list[Quote] = [] + next_char = None + token = "" + first_quote = None + for o in range(len(self.buffer)): + next_char = self.buffer[o] + if next_char in delimiters and not quoted: + self.buffer = self.buffer[o + 1 :] + self.offset += o + 1 + break + elif next_char in ('"', "'"): + if o == 0: + first_quote = next_char + if quoted and quoted[-1].quote == next_char: + quoted.pop() + else: + quoted.append(Quote(o, next_char)) + token += next_char + else: + next_char = None + if quoted: + q = quoted[-1] + c = self.context + i = q.open + self.offset + annotated = c[:i] + "[" + c[i] + "]" + c[i + 1 :] + raise HTTPException( + status.HTTP_400_BAD_REQUEST, f"Unterminated quote at {annotated!r}" + ) + + # If delimiters are specified, and not optional, then we didn't + # find one, and that's an error. + if not optional and delimiters: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + f"Missing delimiter from {','.join(delimiters)} after {token!r}", + ) + self.buffer = "" + self.offset = len(self.context) + return (token, next_char) if not first_quote else (token[1:-1], next_char) + + +class CrucibleService: + + def __init__(self, configpath="crucible"): + """Initialize a Crucible CDM (OpenSearch) connection. + + This includes making an "info" call to confirm and record the server + response. + + Args: + configpath: The Vyper config path (e.g., "ilab.crucible") + """ + self.cfg = config.get_config() + self.user = self.cfg.get(configpath + ".username") + self.password = self.cfg.get(configpath + ".password") + self.auth = (self.user, self.password) if self.user or self.password else None + self.url = self.cfg.get(configpath + ".url") + self.elastic = Elasticsearch(self.url, basic_auth=self.auth) + self.info = None + try: + self.info = self.elastic.info() # Test the connection + except Exception as e: + print(f"Failed to connect: {e}", file=sys.stderr) + raise HTTPException( + status.HTTP_502_BAD_GATEWAY, + f"The configured Crucible search instance ({self.url}) does not respond", + ) + + @staticmethod + def _split_list(alist: Optional[list[str]] = None) -> list[str]: + """Split a list of parameters + + For simplicity, the APIs supporting "list" query parameters allow + each element in the list to be a comma-separated list of strings. + For example, ["a", "b", "c"] is logically the same as ["a,b,c"]. + + This method normalizes the second form into first to simplify life for + consumers. + + Args: + alist: list of names or name lists + + Returns: + A simple list of options + """ + l: list[str] = [] + if alist: + for n in alist: + l.extend(n.split(",")) + return l + + @staticmethod + def normalize_date(value: Optional[Union[int, str, datetime]]) -> int: + """Normalize date parameters + + The Crucible data model stores dates as string representations of an + integer "millseconds-from-epoch" value. To allow flexibility, this + Crucible service allows incoming dates to be specified as ISO-format + strings, as integers, or as the stringified integer. + + That is, "2024-09-12 18:29:35.123000+00:00", "1726165775123", and + 1726165775123 are identical. + + Args: + value: Representation of a date-time value + + Returns: + The integer milliseconds-from-epoch equivalent + """ + try: + if isinstance(value, int): + return value + elif isinstance(value, datetime): + return int(value.timestamp() * 1000.0) + elif isinstance(value, str): + try: + return int(value) + except ValueError: + pass + try: + d = datetime.fromisoformat(value) + return int(d.timestamp() * 1000.0) + except ValueError: + pass + except Exception as e: + print(f"normalizing {type(value).__name__} {value} failed with {str(e)}") + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + f"Date representation {value} is not a date string or timestamp", + ) + + @staticmethod + def _hits( + payload: dict[str, Any], fields: Optional[list[str]] = None + ) -> Iterator[dict[str, Any]]: + """Helper to iterate through OpenSearch query matches + + Iteratively yields the "_source" of each hit. As a convenience, can + yield a sub-object of "_source" ... for example, specifying the + optional "fields" as ["metric_desc"] will yield the equivalent of + hit["_source"]["metric_desc"] + + Args: + payload: OpenSearch reponse payload + fields: Optional sub-fields of "_source" + + Returns: + Yields each object from the "greatest hits" list + """ + if "hits" not in payload: + raise HTTPException( + status_code=500, detail=f"Attempt to iterate hits for {payload}" + ) + hits = payload.get("hits", {}).get("hits", []) + for h in hits: + source = h["_source"] + if fields: + for f in fields: + source = source[f] + yield source + + @staticmethod + def _aggs(payload: dict[str, Any], aggregation: str) -> Iterator[dict[str, Any]]: + """Helper to access OpenSearch aggregations + + Iteratively yields the name and value of each aggregation returned + by an OpenSearch query. This can also be used for nested aggregations + by specifying an aggregation object. + + Args: + payload: A JSON dict containing an "aggregations" field + + Returns: + Yields each aggregation from an aggregations object + """ + if "aggregations" not in payload: + raise HTTPException( + status_code=500, + detail=f"Attempt to iterate missing aggregations for {payload}", + ) + aggs = payload["aggregations"] + if aggregation not in aggs: + raise HTTPException( + status_code=500, + detail=f"Attempt to iterate missing aggregation {aggregation} for {payload}", + ) + for agg in aggs[aggregation]["buckets"]: + yield agg + + @staticmethod + def _date(timestamp: str) -> str: + """Convert stringified integer milliseconds-from-epoch to ISO date""" + return str(datetime.fromtimestamp(int(timestamp) / 1000, timezone.utc)) + + @classmethod + def _format_data(cls, data: dict[str, Any]) -> dict[str, Any]: + """Helper to format a "metric_data" object + + Crucible stores the date, duration, and value as strings, so this + converts them to more useful values. The end timestamp is converted + to an ISO date-time string; the duration and value to floating point + numbers. + + Args: + data: a "metric_data" object + + Returns: + A neatly formatted "metric_data" object + """ + return { + "begin": cls._date(data["begin"]), + "end": cls._date(data["end"]), + "duration": int(data["duration"]) / 1000, + "value": float(data["value"]), + } + + @classmethod + def _format_period(cls, period: dict[str, Any]) -> dict[str, Any]: + """Helper to format a "period" object + + Crucible stores the date values as stringified integers, so this + converts the begin and end timestamps to ISO date-time strings. + + Args: + period: a "period" object + + Returns: + A neatly formatted "period" object + """ + return { + "begin": cls._date(timestamp=period["begin"]), + "end": cls._date(period["end"]), + "id": period["id"], + "name": period["name"], + } + + @classmethod + def _build_filter_options( + cls, filter: Optional[list[str]] = None + ) -> Tuple[Optional[list[dict[str, Any]]], Optional[list[dict[str, Any]]]]: + """Build filter terms for tag and parameter filter terms + + Args: + filter: list of filter terms like "param:key=value" + + Returns: + An OpenSearch filter list to apply the filters + """ + terms = defaultdict(list) + for term in cls._split_list(filter): + p = Parser(term) + namespace, _ = p._next_token([":"]) + key, operation = p._next_token(["="]) + value, _ = p._next_token() + print(f"FILTER: {namespace}:{key}{operation}{value}") + if namespace == "param": + key_field = "param.arg" + value_field = "param.val" + else: + key_field = "tag.name" + value_field = "tag.val" + terms[namespace].append( + { + "bool": { + "must": [ + {"term": {key_field: key}}, + {"term": {value_field: value}}, + ] + } + } + ) + param_filter = None + tag_filter = None + if "param" in terms: + param_filter = [{"dis_max": {"queries": terms["param"]}}] + if "tag" in terms: + tag_filter = [{"dis_max": {"queries": terms["tag"]}}] + return param_filter, tag_filter + + @classmethod + def _name_filters( + cls, namelist: Optional[list[str]] = None + ) -> list[dict[str, Any]]: + """Build filter terms for metric breakdown names + + for example, "cpu=10" filters for metric data descriptors where the + breakdown name "cpu" exists and has a value of 10. + + Args: + namelist: list of possibly comma-separated list values + + Returns: + A list of filters to match breakout terms + """ + names: list[str] = cls._split_list(namelist) + filters = [] + for e in names: + try: + n, v = e.split("=", maxsplit=1) + except ValueError: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, f"Filter item {e} must be '='" + ) + filters.append({"term": {f"metric_desc.names.{n}": v}}) + return filters + + @classmethod + def _period_filters( + cls, periodlist: Optional[list[str]] = None + ) -> list[dict[str, Any]]: + """Build period filters + + Generate filter terms to match against a list of period IDs. + + Args: + period: list of possibly comma-separated period IDs + + Returns: + A filter term that requires a period.id match only for metric_desc + documents with a period. + """ + pl: list[str] = cls._split_list(periodlist) + if pl: + return [ + { + "dis_max": { + "queries": [ + {"bool": {"must_not": {"exists": {"field": "period"}}}}, + {"terms": {"period.id": pl}}, + ] + } + } + ] + else: + return [] + + @classmethod + def _filter_metric_desc( + cls, + run: str, + metric: str, + names: Optional[list[str]] = None, + periods: Optional[list[str]] = None, + ) -> list[dict[str, Any]]: + """Helper for filtering metric descriptions + + We normally filter by run, metric "label", and optionally by breakout + names and periods. This encapsulates the filter contruction. + + Args: + run: run ID + metric: metric label (ilab::sdg-samples-sec) + names: list of "name=value" filters + periods: list of period IDs + + Returns: + A list of OpenSearch filter expressions + """ + source, type = metric.split("::") + return ( + [ + {"term": {"run.id": run}}, + {"term": {"metric_desc.source": source}}, + {"term": {"metric_desc.type": type}}, + ] + + cls._name_filters(names) + + cls._period_filters(periods) + ) + + @staticmethod + def _get_index(root: str) -> str: + return "cdmv7dev-" + root + + def _search( + self, index: str, query: Optional[dict[str, Any]] = None, **kwargs + ) -> dict[str, Any]: + """Issue an OpenSearch query + + Args: + index: The "base" CDM index name, e.g., "run", "metric_desc" + query: An OpenSearch query object + kwargs: Additional OpenSearch parameters + + Returns: + The OpenSearch response payload (JSON dict) + """ + idx = self._get_index(index) + start = time.time() + value = self.elastic.search(index=idx, body=query, **kwargs) + print( + f"QUERY on {idx} took {time.time() - start} seconds, " + f"hits: {value.get('hits', {}).get('total')}" + ) + return value + + def search( + self, + index: str, + filters: Optional[list[dict[str, Any]]] = None, + aggregations: Optional[dict[str, Any]] = None, + sort: Optional[list[dict[str, str]]] = None, + source: Optional[str] = None, + size: Optional[int] = None, + offset: Optional[int] = None, + **kwargs, + ) -> dict[str, Any]: + """OpenSearch query helper + + Combine index, filters, aggregations, sort, and pagination options + into an OpenSearch query. + + Args: + index: "root" CDM index name ("run", "metric_desc", ...) + filters: list of JSON dict filter terms {"term": {"name": "value}} + aggregations: list of JSON dict aggregations {"name": {"term": "name"}} + sort: list of JSON dict sort terms ("name": "asc") + size: The number of hits to return; defaults to "very large" + offset: The number of hits to skip, for pagination + kwargs: Additional OpenSearch options + + Returns: + The OpenSearch response + """ + f = filters if filters else [] + query = { + "size": 250000 if size is None else size, + "query": {"bool": {"filter": f}}, + } + if sort: + query.update({"sort": sort}) + if source: + query.update({"_source": source}) + if offset: + query.update({"from": offset}) + if aggregations: + query.update({"aggs": aggregations}) + return self._search(index, query, **kwargs) + + def _get_metric_ids( + self, + run: str, + metric: str, + namelist: Optional[list[str]] = None, + periodlist: Optional[list[str]] = None, + highlander: bool = True, + ) -> list[str]: + """Generate a list of matching metric_desc IDs + + Given a specific run and metric name, and a set of breakout filters, + returns a list of metric desc IDs that match. + + Generally, breakout data isn't useful unless the set of filters + produces a single metric desc ID, however this can be overridden. + + If a single ID is required to produce a consistent metric, and the + supplied filters produce more than one, raise a 422 HTTP error + (UNPROCESSABLE CONTENT) with a response body showing the unsatisfied + breakouts (name and available values). + + Args: + run: run ID + metric: combined metric name (e.g., sar-net::packets-sec) + namelist: a list of breakout filters like "type=physical" + periodlist: a list of period IDs + highlander: if True, there can be only one (metric ID) + + Returns: + A list of matching metric_desc ID value(s) + """ + filters = self._filter_metric_desc(run, metric, namelist, periodlist) + metrics = self.search( + "metric_desc", + filters=filters, + ignore_unavailable=True, + ) + if len(metrics["hits"]["hits"]) < 1: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + ( + f"No matches for {metric}" + f"{('+' + ','.join(namelist) if namelist else '')}" + ), + ) + ids = [h["metric_desc"]["id"] for h in self._hits(metrics)] + if len(ids) < 2 or not highlander: + return ids + + # This probably means we're not filtering well enouch for a useful + # summary. Diagnose how to improve it. + names = defaultdict(set) + periods = set() + response = { + "message": f"More than one metric ({len(ids)}) probably means " + "you should add filters" + } + for m in self._hits(metrics): + if "period" in m: + periods.add(m["period"]["id"]) + for n, v in m["metric_desc"]["names"].items(): + names[n].add(v) + + # We want to help filter a consistent summary, so only show those + # names with more than one value. + response["names"] = {n: sorted(v) for n, v in names.items() if len(v) > 1} + response["periods"] = list(periods) + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=[response] + ) + + def _data_range(self, periods: Optional[list[str]] = None) -> list[dict[str, Any]]: + """Create a timestamp range filter + + Args: + periods: a list of CDM period IDs + + Returns: + Constructs a range filter for the earliest begin timestamp and the + latest end timestamp among the specified periods. + """ + if periods: + ps = self._split_list(periods) + matches = self.search("period", filters=[{"terms": {"period.id": ps}}]) + start = min([int(h["begin"]) for h in self._hits(matches, ["period"])]) + end = max([int(h["end"]) for h in self._hits(matches, ["period"])]) + return [ + {"range": {"metric_data.begin": {"gte": str(start)}}}, + {"range": {"metric_data.end": {"lte": str(end)}}}, + ] + else: + return [] + + def _get_run_ids( + self, index: str, filters: Optional[list[dict[str, Any]]] = None + ) -> set[str]: + """Return a set of run IDs matching a filter + + Documents in the specified index must have "run.id" fields. Returns + a set of unique run IDs matched by the filter in the specified index. + + Args: + index: root CDM index name + filters: a list of OpenSearch filter terms + + Returns: + a set of unique run ID values + """ + filtered = self.search( + index, source="run.id", filters=filters, ignore_unavailable=True + ) + return set([x["id"] for x in self._hits(filtered, ["run"])]) + + def run_filters(self) -> dict[str, dict[str, int]]: + """Return possible tag and filter terms + + Return a description of tag and param filter terms meaningful + across all datasets. TODO: we should support date-range and benchmark + filtering. Consider supporting all `run` API filtering, which would + allow adjusting the filter popups to drop options no longer relevant + to a given set. + + Returns: + A three-level JSON dict; the first level is the namespace (param or + tag), the second level is the parameter or tag name, the third + level key is each value present in the index, and the value is the + number of times that value appears. + + { + "param": { + {"gpus": { + "4": 22, + "8": 2 + } + } + } + """ + tags = self.search( + "tag", + size=0, + aggregations={ + "key": { + "terms": {"field": "tag.name", "size": 10000}, + "aggs": {"values": {"terms": {"field": "tag.val", "size": 10000}}}, + } + }, + ignore_unavailable=True, + ) + params = self.search( + "param", + size=0, + aggregations={ + "key": { + "terms": {"field": "param.arg", "size": 10000}, + "aggs": { + "values": {"terms": {"field": "param.val", "size": 10000}} + }, + } + }, + ignore_unavailable=True, + ) + result = defaultdict(lambda: defaultdict(lambda: defaultdict(int))) + for t in self._aggs(params, "key"): + for v in t["values"]["buckets"]: + result["param"][t["key"]][v["key"]] += v["doc_count"] + for t in self._aggs(tags, "key"): + for v in t["values"]["buckets"]: + result["tag"][t["key"]][v["key"]] += v["doc_count"] + return result + + def runs( + self, + benchmark: Optional[str] = None, + filter: Optional[list[str]] = None, + name: Optional[str] = None, + start: Optional[Union[int, str, datetime]] = None, + end: Optional[Union[int, str, datetime]] = None, + offset: Optional[int] = None, + sort: Optional[list[str]] = None, + size: Optional[int] = None, + **kwargs, + ) -> dict[str, Any]: + """Return matching Crucible runs + + Filtered list of runs + + Args: + benchmark: Include runs with specified benchmark name + name: Include runs by owner name + start: Include runs starting at timestamp + end: Include runs ending no later than timestamp + filter: List of tag/param filter terms (parm:key=value) + sort: List of sort terms (column:) + size: Include up to runs in output + offset: Use size/from pagination instead of search_after + + Returns: + JSON object with "runs" list, "size", "next", and "total" fields. + """ + + # We need to remove runs which don't match against 'tag' or 'param' + # filter terms. The CDM schema doesn't make it possible to do this in + # one shot. Instead, we run queries against the param and tag indices + # separately, producing a list of run IDs which we'll exclude from the + # final collection. + # + # If there are no matches, we can exit early. (TODO: should this be an + # error, or just a success with an empty list?) + param_filters, tag_filters = self._build_filter_options(filter) + results = {} + filters = [] + if benchmark: + filters.append({"term": {"run.benchmark": benchmark}}) + if name: + filters.append({"term": {"run.name": name}}) + if start or end: + s = None + e = None + if start: + s = self.normalize_date(start) + results["startDate"] = datetime.fromtimestamp( + s / 1000.0, tz=timezone.utc + ) + if end: + e = self.normalize_date(end) + results["endDate"] = datetime.fromtimestamp(e / 1000.0, tz=timezone.utc) + + if s and e and s > e: + raise HTTPException( + status_code=422, + detail={ + "error": "Invalid date format, start_date must be less than end_date" + }, + ) + cond = {} + if s: + cond["gte"] = str(s) + if e: + cond["lte"] = str(e) + filters.append({"range": {"run.begin": cond}}) + if sort: + sorters = self._split_list(sort) + results["sort"] = sorters + sort_terms = [] + for s in sorters: + DIRECTIONS = ("asc", "desc") + FIELDS = ( + "begin", + "benchmark", + "email", + "end", + "id", + "name", + "source", + "status", + ) + key, dir = s.split(":", maxsplit=1) + if dir not in DIRECTIONS: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + f"Sort direction {dir!r} must be one of {','.join(DIRECTIONS)}", + ) + if key not in FIELDS: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + f"Sort key {key!r} must be one of {','.join(FIELDS)}", + ) + sort_terms.append({f"run.{key}": dir}) + else: + sort_terms = [{"run.begin": "asc"}] + + if size: + results["size"] = size + if offset: + results["offset"] = offset + + # In order to filter by param or tag values, we need to produce a list + # of matching RUN IDs from each index. We'll then drop any RUN ID that's + # not on both lists. + if tag_filters: + tagids = self._get_run_ids("tag", tag_filters) + if param_filters: + paramids = self._get_run_ids("param", param_filters) + + # If it's obvious we can't produce any matches at this point, exit. + if (tag_filters and len(tagids) == 0) or (param_filters and len(paramids) == 0): + results.update({"results": [], "count": 0, "total": 0}) + return results + + hits = self.search( + "iteration", + size=size, + offset=offset, + sort=sort_terms, + filters=filters, + **kwargs, + ignore_unavailable=True, + ) + rawtags = self.search("tag", ignore_unavailable=True) + rawparams = self.search("param", ignore_unavailable=True) + + tags = defaultdict(defaultdict) + params = defaultdict(defaultdict) + run_params = defaultdict(list) + + # Organize tags by run ID + for t in self._hits(rawtags): + tags[t["run"]["id"]][t["tag"]["name"]] = t["tag"]["val"] + + # Organize params by iteration ID + for p in self._hits(rawparams): + run_params[p["run"]["id"]].append(p) + params[p["iteration"]["id"]][p["param"]["arg"]] = p["param"]["val"] + + runs = {} + for h in self._hits(hits): + run = h["run"] + iteration = h["iteration"] + iid = iteration["id"] + rid = run["id"] + iparams = params.get(iid, {}) + + # Filter the runs by our tag and param queries + if param_filters and rid not in paramids: + continue + + if tag_filters and rid not in tagids: + continue + + # Collect unique runs: the status is "fail" if any iteration for + # that run ID failed. + if rid not in runs: + runs[rid] = run + run["status"] = iteration["status"] + try: + run["begin_date"] = self._date(run["begin"]) + run["end_date"] = self._date(run["end"]) + except KeyError as e: + print(f"Missing 'run' key {str(e)} in {run}") + run["begin_date"] = self._date("0") + run["end_date"] = self._date("0") + run["params"] = iparams.copy() + run["iterations"] = [ + { + "iteration": iteration["num"], + "primary_metric": iteration["primary-metric"], + "primary_period": iteration["primary-period"], + "status": iteration["status"], + "params": iparams, + } + ] + run["primary_metrics"] = set([iteration["primary-metric"]]) + run["tags"] = tags.get(rid, {}) + else: + r = runs[rid] + r["iterations"].append( + { + "iteration": iteration["num"], + "metric": iteration["primary-metric"], + "status": iteration["status"], + "params": iparams, + } + ) + + # Iteration-specific parameter names or values are factored out + # of the run summary. (NOTE: listify the keys first so Python + # doesn't complain about deletion during the traversal.) + p = r["params"] + for k in list(p.keys()): + if k not in iparams or p[k] != iparams[k]: + del p[k] + r["primary_metrics"].add(iteration["primary-metric"]) + if iteration["status"] != "pass": + r["status"] = iteration["status"] + results.update( + { + "results": list(runs.values()), + "count": len(runs), + "total": hits["hits"]["total"]["value"], + } + ) + if offset: + results["next_offset"] = offset + size if size else len(runs) + return results + + def tags(self, run: str, **kwargs) -> dict[str, str]: + """Return the set of tags associated with a run + + Args: + run: run ID + + Returns: + JSON dict with "tag" keys showing each value + """ + tags = self.search( + index="tag", + filters=[{"term": {"run.id": run}}], + **kwargs, + ignore_unavailable=True, + ) + return {t["name"]: t["val"] for t in self._hits(tags, ["tag"])} + + def params( + self, run: Optional[str] = None, iteration: Optional[str] = None, **kwargs + ) -> dict[str, dict[str, str]]: + """Return the set of parameters for a run or iteration + + Parameters are technically associated with an iteration, but can be + aggregated for a run. (Note that, technically, values might vary across + iterations, and only one will be returned. This is OK if a run has a + single iteration, or if you know they're consistent.) + + Args: + run: run ID + iteration: iteration ID + kwargs: additional OpenSearch keywords + + Returns: + JSON dict of param key: value + """ + if not run and not iteration: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + "A params query requires either a run or iteration ID", + ) + match = {"run.id" if run else "iteration.id": run if run else iteration} + params = self.search( + index="param", + filters=[{"term": match}], + **kwargs, + ignore_unavailable=True, + ) + response = defaultdict(defaultdict) + for param in self._hits(params): + iter = param["iteration"]["id"] + arg = param["param"]["arg"] + val = param["param"]["val"] + if response.get(iter) and response.get(iter).get(arg): + print(f"Duplicate param {arg} for iteration {iter}") + response[iter][arg] = val + + # Filter out all parameter values that don't exist in all or which have + # different values. + if run: + common = {} + for iter, params in response.items(): + if not common: + common = dict(params) + else: + # We can't change a dict during iteration, so iterate over + # a list of the param keys. + for param in list(common.keys()): + if param not in params or params[param] != common[param]: + del common[param] + response["common"] = common + return response + + def iterations(self, run: str, **kwargs) -> list[dict[str, Any]]: + """Return a list of iterations for a run + + Args: + run: run ID + kwargs: additional OpenSearch keywords + + Returns: + A list of iteration documents + """ + iterations = self.search( + index="iteration", + filters=[{"term": {"run.id": run}}], + **kwargs, + ignore_unavailable=True, + ) + return [i["iteration"] for i in self._hits(iterations)] + + def samples( + self, run: Optional[str] = None, iteration: Optional[str] = None, **kwargs + ): + """Return a list of samples for a run or iteration + + Args: + run: run ID + iteration: iteration ID + kwargs: additional OpenSearch keywords + + Returns: + A list of sample documents. + """ + if not run and not iteration: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + "A sample query requires either a run or iteration ID", + ) + match = {"run.id" if run else "iteration.id": run if run else iteration} + samples = self.search( + index="sample", + filters=[{"term": match}], + **kwargs, + ignore_unavailable=True, + ) + return [i["sample"] for i in self._hits(samples)] + + def periods( + self, + run: Optional[str] = None, + iteration: Optional[str] = None, + sample: Optional[str] = None, + **kwargs, + ): + """Return a list of periods associated with a run, an iteration, or a + sample + + The "period" document is normalized to represent timestamps using ISO + strings. + + Args: + run: run ID + iteration: iteration ID + sample: sample ID + kwargs: additional OpenSearch parameters + + Returns: + a list of normalized period documents + """ + if not any((run, iteration, sample)): + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + "A period query requires a run, iteration, or sample ID", + ) + match = None + if sample: + match = {"sample.id": sample} + elif iteration: + match = {"iteration.id": iteration} + else: + match = {"run.id": run} + periods = self.search( + index="period", + filters=[{"term": match}], + **kwargs, + ignore_unavailable=True, + ) + body = [] + for h in self._hits(periods): + p = h["period"] + body.append(self._format_period(p)) + return body + + def timeline(self, run: str, **kwargs) -> dict[str, Any]: + """Report the relative timeline of a run + + With nested object lists, show runs to iterations to samples to + periods. + + Args: + run: run ID + kwargs: additional OpenSearch parameters + """ + itr = self.search( + index="iteration", + filters=[{"term": {"run.id": run}}], + **kwargs, + ignore_unavailable=True, + ) + sam = self.search( + index="sample", + filters=[{"term": {"run.id": run}}], + **kwargs, + ignore_unavailable=True, + ) + per = self.search( + index="period", + filters=[{"term": {"run.id": run}}], + **kwargs, + ignore_unavailable=True, + ) + samples = defaultdict(list) + periods = defaultdict(list) + + for s in self._hits(sam): + samples[s["iteration"]["id"]].append(s) + for p in self._hits(per): + periods[p["sample"]["id"]].append(p) + + iterations = [] + robj = {"id": run, "iterations": iterations} + body = {"run": robj} + for i in self._hits(itr): + if "begin" not in robj: + robj["begin"] = self._date(i["run"]["begin"]) + robj["end"] = self._date(i["run"]["end"]) + iteration = i["iteration"] + iterations.append(iteration) + iteration["samples"] = [] + for s in samples.get(iteration["id"], []): + sample = s["sample"] + sample["periods"] = [] + for pr in periods.get(sample["id"], []): + period = self._format_period(pr["period"]) + sample["periods"].append(period) + iteration["samples"].append(sample) + return body + + def metrics_list(self, run: str, **kwargs) -> dict[str, Any]: + """Return a list of metrics available for a run + + Each run may have multiple performance metrics stored. This API allows + retrieving a sorted list of the metrics available for a given run, with + the "names" selection criteria available for each and, for "periodic" + (benchmark) metrics, the defined periods for which data was gathered. + + { + "ilab::train-samples-sec": { + "periods": [{"id": , "name": "measurement"}], + "names": {"benchmark-group" ["unknown"], ...} + }, + "iostat::avg-queue-length": { + "periods": [], + "names": {"benchmark-group": ["unknown"], ...}, + }, + ... + } + + Args: + run: run ID + + Returns: + List of metrics available for the run + """ + hits = self.search( + index="metric_desc", + filters=[{"term": {"run.id": run}}], + ignore_unavailable=True, + **kwargs, + ) + met = {} + for h in self._hits(hits): + desc = h["metric_desc"] + name = desc["source"] + "::" + desc["type"] + if name in met: + record = met[name] + else: + record = {"periods": [], "breakdowns": defaultdict(set)} + met[name] = record + if "period" in h: + record["periods"].append(h["period"]["id"]) + for n, v in desc["names"].items(): + record["breakdowns"][n].add(v) + return met + + def metric_breakouts( + self, + run: str, + metric: str, + names: Optional[list[str]] = None, + periods: Optional[list[str]] = None, + ) -> dict[str, Any]: + """Help explore available metric breakdowns + + Args: + run: run ID + metric: metric label (e.g., "mpstat::Busy-CPU") + names: list of name filters ("cpu=3") + periods: list of period IDs + + Returns: + A description of all breakdown names and values, which can be + specified to narrow down metrics returns by the data, summary, and + graph APIs. + + { + "label": "mpstat::Busy-CPU", + "class": [ + "throughput" + ], + "type": "Busy-CPU", + "source": "mpstat", + "breakdowns": { + "num": [ + "8", + "72" + ], + "thread": [ + 0, + 1 + ] + } + } + """ + start = time.time() + filters = self._filter_metric_desc(run, metric, names, periods) + metric_name = metric + ("" if not names else ("+" + ",".join(names))) + metrics = self.search( + "metric_desc", + filters=filters, + ignore_unavailable=True, + ) + if len(metrics["hits"]["hits"]) < 1: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + f"Metric name {metric_name} not found for run {run}", + ) + classes = set() + response = {"label": metric, "class": classes} + breakouts = defaultdict(set) + pl = set() + for m in self._hits(metrics): + desc = m["metric_desc"] + response["type"] = desc["type"] + response["source"] = desc["source"] + if desc.get("class"): + classes.add(desc["class"]) + if "period" in m: + pl.add(m["period"]["id"]) + for n, v in desc["names"].items(): + breakouts[n].add(v) + # We want to help filter a consistent summary, so only show those + # names with more than one value. + if len(pl) > 1: + response["periods"] = pl + response["breakouts"] = {n: v for n, v in breakouts.items() if len(v) > 1} + duration = time.time() - start + print(f"Processing took {duration} seconds") + return response + + def metrics_data( + self, + run: str, + metric: str, + names: Optional[list[str]] = None, + periods: Optional[list[str]] = None, + aggregate: bool = False, + ) -> list[Any]: + """Return a list of metric data + + The "aggregate" option allows aggregating various metrics across + breakout streams and periods: be careful, as this is meaningful only if + the breakout streams are sufficiently related. + + Args: + run: run ID + metric: metric label (e.g., "mpstat::Busy-CPU") + names: list of name filters ("cpu=3") + periods: list of period IDs + aggregate: aggregate multiple metric data streams + + Returns: + A sequence of data samples, showing the aggregate sample along with + the duration and end timestamp of each sample interval. + + [ + { + "end": "2024-09-12 18:27:15+00:00", + "value": 0.0, + "duration": 15.0 + }, + { + "end": "2024-09-12 18:27:30+00:00", + "value": 0.0007, + "duration": 15.0 + }, + { + "end": "2024-09-12 18:27:45+00:00", + "value": 0.0033, + "duration": 15.0 + } + ] + """ + start = time.time() + ids = self._get_metric_ids( + run, metric, names, periodlist=periods, highlander=(not aggregate) + ) + + # If we're searching by periods, filter metric data by the period + # timestamp range rather than just relying on the metric desc IDs as + # we also want to filter non-periodic tool data. + filters = [{"terms": {"metric_desc.id": ids}}] + filters.extend(self._data_range(periods)) + + response = [] + if len(ids) > 1: + # Find the minimum sample interval of the selected metrics + aggdur = self.search( + "metric_data", + size=0, + filters=filters, + aggregations={"duration": {"stats": {"field": "metric_data.duration"}}}, + ) + interval = int(aggdur["aggregations"]["duration"]["min"]) + data = self.search( + index="metric_data", + size=0, + filters=filters, + aggregations={ + "interval": { + "histogram": {"field": "metric_data.end", "interval": interval}, + "aggs": {"value": {"sum": {"field": "metric_data.value"}}}, + } + }, + ) + for h in self._aggs(data, "interval"): + response.append( + { + "begin": self._date(h["key"] - interval), + "end": self._date(h["key"]), + "value": h["value"]["value"], + "duration": interval / 1000.0, + } + ) + else: + data = self.search("metric_data", filters=filters) + for h in self._hits(data, ["metric_data"]): + response.append(self._format_data(h)) + response.sort(key=lambda a: a["end"]) + duration = time.time() - start + print(f"Processing took {duration} seconds") + return response + + def metrics_summary( + self, + run: str, + metric: str, + names: Optional[list[str]] = None, + periods: Optional[list[str]] = None, + ) -> dict[str, Any]: + """Return a statistical summary of metric data + + Provides a statistical summary of selected data samples. + + Args: + run: run ID + metric: metric label (e.g., "mpstat::Busy-CPU") + names: list of name filters ("cpu=3") + periods: list of period IDs + + Returns: + A statistical summary of the selected metric data + + { + "count": 71, + "min": 0.0, + "max": 0.3296, + "avg": 0.02360704225352113, + "sum": 1.6761000000000001 + } + """ + start = time.time() + ids = self._get_metric_ids(run, metric, names, periodlist=periods) + filters = [{"terms": {"metric_desc.id": ids}}] + filters.extend(self._data_range(periods)) + data = self.search( + "metric_data", + size=0, + filters=filters, + aggregations={"score": {"stats": {"field": "metric_data.value"}}}, + ) + duration = time.time() - start + print(f"Processing took {duration} seconds") + return data["aggregations"]["score"] + + def metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: + """Return metrics data for a run + + Each run may have multiple performance metrics stored. This API allows + retrieving graphable time-series representation of a metric over the + period of the run, in the format defined by Plotly as configuration + settings plus an x value array and a y value array. + + { + "data": [ + { + "x": [ + "2024-08-27 09:16:27.371000", + ... + ], + "y": [ + 10.23444312132161, + ... + ], + "name": "Metric ilab::train-samples-sec", + "type": "scatter", + "mode": "line", + "marker": {"color": "black"}, + "labels": {"x": "sample timestamp", "y": "samples / second"} + } + ] + "layout": { + "width": 1500, + "yaxis": { + "title": "mpstat::Busy-CPU core=2,package=0,num=112,type=usr", + "color": "black" + } + } + } + + Args: + graphdata: A GraphList object + + Returns: + A Plotly object with layout + """ + start = time.time() + graphlist = [] + run = graphdata.run + layout: dict[str, Any] = {"width": "1500"} + axes = {} + yaxis = None + cindex = 0 + for g in graphdata.graphs: + names = g.names + metric: str = g.metric + ids = self._get_metric_ids( + run, metric, names, periodlist=g.periods, highlander=(not g.aggregate) + ) + filters = [{"terms": {"metric_desc.id": ids}}] + filters.extend(self._data_range(g.periods)) + y_max = 0.0 + points = [] + + # If we're pulling multiple breakouts, e.g., total CPU across modes + # or cores, we want to aggregate by timestamp. (Note that this will + # not work well unless the samples are aligned.) + if len(ids) > 1: + # Find the minimum sample interval of the selected metrics + aggdur = self.search( + "metric_data", + size=0, + filters=filters, + aggregations={ + "duration": {"stats": {"field": "metric_data.duration"}} + }, + ) + interval = int(aggdur["aggregations"]["duration"]["min"]) + data = self.search( + index="metric_data", + size=0, + filters=filters, + aggregations={ + "interval": { + "histogram": { + "field": "metric_data.end", + "interval": interval, + }, + "aggs": {"value": {"sum": {"field": "metric_data.value"}}}, + } + }, + ) + for h in self._aggs(data, "interval"): + points.append((h["key"], h["value"]["value"])) + else: + data = self.search("metric_data", filters=filters) + for h in self._hits(data, ["metric_data"]): + points.append((h["end"], float(h["value"]))) + + # Graph the "end" timestamp of each sample against the sample + # value. Sort the graph points by timestamp so that Ploty will draw + # nice lines. + x = [] + y = [] + + for t, v in sorted(points): + x.append(self._date(t)) + y.append(v) + y_max = max(y_max, v) + + try: + options = " " + ",".join(names) if names else "" + title = metric + options + + # TODO -- how to identify the period here? Can I filter out + # param differences to label these based on the batch size?? + graphitem = { + "x": x, + "y": y, + "name": title, + "type": "scatter", + "mode": "line", + "marker": {"color": colors[cindex]}, + "labels": { + "x": "sample timestamp", + "y": "samples / second", + }, + } + + # Y-axis scaling and labeling is divided by benchmark label; + # so store each we've created to reuse. (E.g., if we graph + # 5 different mpstat::Busy-CPU periods, they'll share a single + # Y axis.) + if title in axes: + yref = axes[metric] + else: + if yaxis: + name = f"yaxis{yaxis}" + yref = f"y{yaxis}" + yaxis += 1 + layout[name] = { + "title": title, + "color": colors[cindex], + "autorange": True, + "anchor": "free", + "autoshift": True, + "overlaying": "y", + } + else: + name = "yaxis" + yref = "y" + yaxis = 2 + layout[name] = { + "title": title, + "color": colors[cindex], + } + axes[metric] = yref + graphitem["yaxis"] = yref + cindex += 1 + if cindex >= len(colors): + cindex = 0 + graphlist.append(graphitem) + except ValueError as v: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Unexpected data type: {str(v)}", + ) + duration = time.time() - start + print(f"Processing took {duration} seconds") + return {"data": graphlist, "layout": layout} + + def fields(self, index: str) -> dict[str, set]: + """Return the fields of an OpenSearch document from an index + + This fetches the document mapping from OpenSearch and reports it as a + set of subfields for each primary field. + + { + "cdm": [ + "ver" + ], + "metric_data": [ + "begin", + "value", + "end", + "duration" + ], + "metric_desc": [ + "id" + ] + } + + This is mostly useful while developing additional APIs against the + Crucible CDM. + + Args: + index: Root name of index (e.g., "run") + + Returns: + Document layout, like {"cdm": ["ver"], "metric_data": ["begin", "value", ...]} + """ + try: + idx = self._get_index(index) + mapping = self.elastic.indices.get_mapping(index=idx) + fields = defaultdict(set) + for f, subfields in mapping[idx]["mappings"]["properties"].items(): + for s in subfields["properties"].keys(): + fields[f].add(s) + return fields + except NotFoundError: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, f"Index name {index!r} doesn't exist" + ) diff --git a/backend/poetry.lock b/backend/poetry.lock index 842e52ae..3442e6ab 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1,91 +1,118 @@ # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +[[package]] +name = "aiohappyeyeballs" +version = "2.4.0" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"}, + {file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"}, +] + [[package]] name = "aiohttp" -version = "3.9.5" +version = "3.10.5" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, - {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, - {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, - {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, - {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, - {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, - {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, - {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, - {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, - {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f1f1c75c395991ce9c94d3e4aa96e5c59c8356a15b1c9231e783865e2772699"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7acae3cf1a2a2361ec4c8e787eaaa86a94171d2417aae53c0cca6ca3118ff6"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94c4381ffba9cc508b37d2e536b418d5ea9cfdc2848b9a7fea6aebad4ec6aac1"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c31ad0c0c507894e3eaa843415841995bf8de4d6b2d24c6e33099f4bc9fc0d4f"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0912b8a8fadeb32ff67a3ed44249448c20148397c1ed905d5dac185b4ca547bb"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d93400c18596b7dc4794d48a63fb361b01a0d8eb39f28800dc900c8fbdaca91"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d00f3c5e0d764a5c9aa5a62d99728c56d455310bcc288a79cab10157b3af426f"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d742c36ed44f2798c8d3f4bc511f479b9ceef2b93f348671184139e7d708042c"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:814375093edae5f1cb31e3407997cf3eacefb9010f96df10d64829362ae2df69"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8224f98be68a84b19f48e0bdc14224b5a71339aff3a27df69989fa47d01296f3"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9a487ef090aea982d748b1b0d74fe7c3950b109df967630a20584f9a99c0683"}, + {file = "aiohttp-3.10.5-cp310-cp310-win32.whl", hash = "sha256:d9ef084e3dc690ad50137cc05831c52b6ca428096e6deb3c43e95827f531d5ef"}, + {file = "aiohttp-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:66bf9234e08fe561dccd62083bf67400bdbf1c67ba9efdc3dac03650e97c6088"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c6a4e5e40156d72a40241a25cc226051c0a8d816610097a8e8f517aeacd59a2"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c634a3207a5445be65536d38c13791904fda0748b9eabf908d3fe86a52941cf"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aff049b5e629ef9b3e9e617fa6e2dfeda1bf87e01bcfecaf3949af9e210105e"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1942244f00baaacaa8155eca94dbd9e8cc7017deb69b75ef67c78e89fdad3c77"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04a1f2a65ad2f93aa20f9ff9f1b672bf912413e5547f60749fa2ef8a644e061"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f2bfc0032a00405d4af2ba27f3c429e851d04fad1e5ceee4080a1c570476697"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424ae21498790e12eb759040bbb504e5e280cab64693d14775c54269fd1d2bb7"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:975218eee0e6d24eb336d0328c768ebc5d617609affaca5dbbd6dd1984f16ed0"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4120d7fefa1e2d8fb6f650b11489710091788de554e2b6f8347c7a20ceb003f5"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b90078989ef3fc45cf9221d3859acd1108af7560c52397ff4ace8ad7052a132e"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ba5a8b74c2a8af7d862399cdedce1533642fa727def0b8c3e3e02fcb52dca1b1"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:02594361128f780eecc2a29939d9dfc870e17b45178a867bf61a11b2a4367277"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8fb4fc029e135859f533025bc82047334e24b0d489e75513144f25408ecaf058"}, + {file = "aiohttp-3.10.5-cp311-cp311-win32.whl", hash = "sha256:e1ca1ef5ba129718a8fc827b0867f6aa4e893c56eb00003b7367f8a733a9b072"}, + {file = "aiohttp-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:349ef8a73a7c5665cca65c88ab24abe75447e28aa3bc4c93ea5093474dfdf0ff"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:305be5ff2081fa1d283a76113b8df7a14c10d75602a38d9f012935df20731487"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a1c32a19ee6bbde02f1cb189e13a71b321256cc1d431196a9f824050b160d5a"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:61645818edd40cc6f455b851277a21bf420ce347baa0b86eaa41d51ef58ba23d"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c225286f2b13bab5987425558baa5cbdb2bc925b2998038fa028245ef421e75"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ba01ebc6175e1e6b7275c907a3a36be48a2d487549b656aa90c8a910d9f3178"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8eaf44ccbc4e35762683078b72bf293f476561d8b68ec8a64f98cf32811c323e"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c43eb1ab7cbf411b8e387dc169acb31f0ca0d8c09ba63f9eac67829585b44f"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7a5299827253023c55ea549444e058c0eb496931fa05d693b95140a947cb73"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4790f0e15f00058f7599dab2b206d3049d7ac464dc2e5eae0e93fa18aee9e7bf"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44b324a6b8376a23e6ba25d368726ee3bc281e6ab306db80b5819999c737d820"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0d277cfb304118079e7044aad0b76685d30ecb86f83a0711fc5fb257ffe832ca"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:54d9ddea424cd19d3ff6128601a4a4d23d54a421f9b4c0fff740505813739a91"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f1c9866ccf48a6df2b06823e6ae80573529f2af3a0992ec4fe75b1a510df8a6"}, + {file = "aiohttp-3.10.5-cp312-cp312-win32.whl", hash = "sha256:dc4826823121783dccc0871e3f405417ac116055bf184ac04c36f98b75aacd12"}, + {file = "aiohttp-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:22c0a23a3b3138a6bf76fc553789cb1a703836da86b0f306b6f0dc1617398abc"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7f6b639c36734eaa80a6c152a238242bedcee9b953f23bb887e9102976343092"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29930bc2921cef955ba39a3ff87d2c4398a0394ae217f41cb02d5c26c8b1b77"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f489a2c9e6455d87eabf907ac0b7d230a9786be43fbe884ad184ddf9e9c1e385"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:123dd5b16b75b2962d0fff566effb7a065e33cd4538c1692fb31c3bda2bfb972"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b98e698dc34966e5976e10bbca6d26d6724e6bdea853c7c10162a3235aba6e16"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3b9162bab7e42f21243effc822652dc5bb5e8ff42a4eb62fe7782bcbcdfacf6"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1923a5c44061bffd5eebeef58cecf68096e35003907d8201a4d0d6f6e387ccaa"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55f011da0a843c3d3df2c2cf4e537b8070a419f891c930245f05d329c4b0689"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afe16a84498441d05e9189a15900640a2d2b5e76cf4efe8cbb088ab4f112ee57"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8112fb501b1e0567a1251a2fd0747baae60a4ab325a871e975b7bb67e59221f"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e72589da4c90337837fdfe2026ae1952c0f4a6e793adbbfbdd40efed7c63599"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4d46c7b4173415d8e583045fbc4daa48b40e31b19ce595b8d92cf639396c15d5"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33e6bc4bab477c772a541f76cd91e11ccb6d2efa2b8d7d7883591dfb523e5987"}, + {file = "aiohttp-3.10.5-cp313-cp313-win32.whl", hash = "sha256:c58c6837a2c2a7cf3133983e64173aec11f9c2cd8e87ec2fdc16ce727bcf1a04"}, + {file = "aiohttp-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:38172a70005252b6893088c0f5e8a47d173df7cc2b2bd88650957eb84fcf5022"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f6f18898ace4bcd2d41a122916475344a87f1dfdec626ecde9ee802a711bc569"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5ede29d91a40ba22ac1b922ef510aab871652f6c88ef60b9dcdf773c6d32ad7a"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:673f988370f5954df96cc31fd99c7312a3af0a97f09e407399f61583f30da9bc"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58718e181c56a3c02d25b09d4115eb02aafe1a732ce5714ab70326d9776457c3"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b38b1570242fbab8d86a84128fb5b5234a2f70c2e32f3070143a6d94bc854cf"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:074d1bff0163e107e97bd48cad9f928fa5a3eb4b9d33366137ffce08a63e37fe"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd31f176429cecbc1ba499d4aba31aaccfea488f418d60376b911269d3b883c5"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7384d0b87d4635ec38db9263e6a3f1eb609e2e06087f0aa7f63b76833737b471"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8989f46f3d7ef79585e98fa991e6ded55d2f48ae56d2c9fa5e491a6e4effb589"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c83f7a107abb89a227d6c454c613e7606c12a42b9a4ca9c5d7dad25d47c776ae"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cde98f323d6bf161041e7627a5fd763f9fd829bcfcd089804a5fdce7bb6e1b7d"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:676f94c5480d8eefd97c0c7e3953315e4d8c2b71f3b49539beb2aa676c58272f"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2d21ac12dc943c68135ff858c3a989f2194a709e6e10b4c8977d7fcd67dfd511"}, + {file = "aiohttp-3.10.5-cp38-cp38-win32.whl", hash = "sha256:17e997105bd1a260850272bfb50e2a328e029c941c2708170d9d978d5a30ad9a"}, + {file = "aiohttp-3.10.5-cp38-cp38-win_amd64.whl", hash = "sha256:1c19de68896747a2aa6257ae4cf6ef59d73917a36a35ee9d0a6f48cff0f94db8"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7e2fe37ac654032db1f3499fe56e77190282534810e2a8e833141a021faaab0e"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5bf3ead3cb66ab990ee2561373b009db5bc0e857549b6c9ba84b20bc462e172"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b2c16a919d936ca87a3c5f0e43af12a89a3ce7ccbce59a2d6784caba945b68b"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad146dae5977c4dd435eb31373b3fe9b0b1bf26858c6fc452bf6af394067e10b"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c5c6fa16412b35999320f5c9690c0f554392dc222c04e559217e0f9ae244b92"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95c4dc6f61d610bc0ee1edc6f29d993f10febfe5b76bb470b486d90bbece6b22"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da452c2c322e9ce0cfef392e469a26d63d42860f829026a63374fde6b5c5876f"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:898715cf566ec2869d5cb4d5fb4be408964704c46c96b4be267442d265390f32"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:391cc3a9c1527e424c6865e087897e766a917f15dddb360174a70467572ac6ce"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:380f926b51b92d02a34119d072f178d80bbda334d1a7e10fa22d467a66e494db"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce91db90dbf37bb6fa0997f26574107e1b9d5ff939315247b7e615baa8ec313b"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9093a81e18c45227eebe4c16124ebf3e0d893830c6aca7cc310bfca8fe59d857"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ee40b40aa753d844162dcc80d0fe256b87cba48ca0054f64e68000453caead11"}, + {file = "aiohttp-3.10.5-cp39-cp39-win32.whl", hash = "sha256:03f2645adbe17f274444953bdea69f8327e9d278d961d85657cb0d06864814c1"}, + {file = "aiohttp-3.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:d17920f18e6ee090bdd3d0bfffd769d9f2cb4c8ffde3eb203777a3895c128862"}, + {file = "aiohttp-3.10.5.tar.gz", hash = "sha256:f071854b47d39591ce9a17981c46790acb30518e2f83dfca8db2dfa091178691"}, ] [package.dependencies] +aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" @@ -94,7 +121,7 @@ multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] [[package]] name = "aiosignal" @@ -183,13 +210,13 @@ files = [ [[package]] name = "atlassian-python-api" -version = "3.41.13" +version = "3.41.15" description = "Python Atlassian REST API Wrapper" optional = false python-versions = "*" files = [ - {file = "atlassian_python_api-3.41.13-py3-none-any.whl", hash = "sha256:f3887c5fe0149e90d22cd0fd8d99cd6a626e74ce80c190a40515d02c4a7a1a92"}, - {file = "atlassian_python_api-3.41.13.tar.gz", hash = "sha256:b77b081da3242794060f553079d93a9b26bd0aa047d86abf1ae9e7bcf59fe4e8"}, + {file = "atlassian_python_api-3.41.15-py3-none-any.whl", hash = "sha256:1c271ca9b1688acdaef09ad6f763570868a381394530d1fba49b5b104fffe54a"}, + {file = "atlassian_python_api-3.41.15.tar.gz", hash = "sha256:3c852f38ad8645887fbfe1526c12f2c1951ba06a24a1bbb36bdf7ccdc6d7b1ac"}, ] [package.dependencies] @@ -206,22 +233,22 @@ kerberos = ["requests-kerberos"] [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "beautifulsoup4" @@ -246,74 +273,89 @@ lxml = ["lxml"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -553,13 +595,13 @@ requests = ["requests (>=2.4.0,<3.0.0)"] [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -750,13 +792,13 @@ http2 = ["h2 (==3.*)"] [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] @@ -787,103 +829,108 @@ typing-extensions = ">=4.5.0" [[package]] name = "multidict" -version = "6.0.5" +version = "6.1.0" description = "multidict implementation" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + [[package]] name = "numpy" version = "1.26.4" @@ -947,57 +994,68 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "orjson" -version = "3.10.3" +version = "3.10.7" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"}, - {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"}, - {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"}, - {file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"}, - {file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"}, - {file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"}, - {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"}, - {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"}, - {file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"}, - {file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"}, - {file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"}, - {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"}, - {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"}, - {file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"}, - {file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"}, - {file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"}, - {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"}, - {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"}, - {file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"}, - {file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"}, - {file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"}, - {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"}, - {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"}, - {file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"}, - {file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"}, - {file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"}, + {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, + {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, + {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, + {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, + {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, + {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, + {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, + {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, + {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, + {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, + {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, + {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, + {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, + {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, + {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, + {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, + {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, + {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, + {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, + {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, + {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, + {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, + {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, + {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, + {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, + {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, + {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, + {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, + {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, + {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, + {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, ] [[package]] @@ -1016,47 +1074,87 @@ attrs = ">=19.2.0" [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] name = "pandas" -version = "1.2.4" +version = "2.2.2" description = "Powerful data structures for data analysis, time series, and statistics" optional = false -python-versions = ">=3.7.1" -files = [ - {file = "pandas-1.2.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c601c6fdebc729df4438ec1f62275d6136a0dd14d332fc0e8ce3f7d2aadb4dd6"}, - {file = "pandas-1.2.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:8d4c74177c26aadcfb4fd1de6c1c43c2bf822b3e0fc7a9b409eeaf84b3e92aaa"}, - {file = "pandas-1.2.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:b730add5267f873b3383c18cac4df2527ac4f0f0eed1c6cf37fcb437e25cf558"}, - {file = "pandas-1.2.4-cp37-cp37m-win32.whl", hash = "sha256:2cb7e8f4f152f27dc93f30b5c7a98f6c748601ea65da359af734dd0cf3fa733f"}, - {file = "pandas-1.2.4-cp37-cp37m-win_amd64.whl", hash = "sha256:2111c25e69fa9365ba80bbf4f959400054b2771ac5d041ed19415a8b488dc70a"}, - {file = "pandas-1.2.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:167693a80abc8eb28051fbd184c1b7afd13ce2c727a5af47b048f1ea3afefff4"}, - {file = "pandas-1.2.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:612add929bf3ba9d27b436cc8853f5acc337242d6b584203f207e364bb46cb12"}, - {file = "pandas-1.2.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:971e2a414fce20cc5331fe791153513d076814d30a60cd7348466943e6e909e4"}, - {file = "pandas-1.2.4-cp38-cp38-win32.whl", hash = "sha256:68d7baa80c74aaacbed597265ca2308f017859123231542ff8a5266d489e1858"}, - {file = "pandas-1.2.4-cp38-cp38-win_amd64.whl", hash = "sha256:bd659c11a4578af740782288cac141a322057a2e36920016e0fc7b25c5a4b686"}, - {file = "pandas-1.2.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9db70ffa8b280bb4de83f9739d514cd0735825e79eef3a61d312420b9f16b758"}, - {file = "pandas-1.2.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:298f0553fd3ba8e002c4070a723a59cdb28eda579f3e243bc2ee397773f5398b"}, - {file = "pandas-1.2.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52d2472acbb8a56819a87aafdb8b5b6d2b3386e15c95bde56b281882529a7ded"}, - {file = "pandas-1.2.4-cp39-cp39-win32.whl", hash = "sha256:d0877407359811f7b853b548a614aacd7dea83b0c0c84620a9a643f180060950"}, - {file = "pandas-1.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:2b063d41803b6a19703b845609c0b700913593de067b552a8b24dd8eeb8c9895"}, - {file = "pandas-1.2.4.tar.gz", hash = "sha256:649ecab692fade3cbfcf967ff936496b0cfba0af00a55dfaacd82bdda5cb2279"}, +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, ] [package.dependencies] -numpy = ">=1.16.5" -python-dateutil = ">=2.7.3" -pytz = ">=2017.3" +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" [package.extras] -test = ["hypothesis (>=3.58)", "pytest (>=5.0.1)", "pytest-xdist"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "pycparser" @@ -1242,84 +1340,86 @@ docs = ["Sphinx (>=6.1.0,<7.0.0)", "alabaster (>=0.7.12,<0.8.0)", "commonmark (> [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "requests" -version = "2.32.2" +version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ - {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, - {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1427,13 +1527,13 @@ files = [ [[package]] name = "soupsieve" -version = "2.5" +version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] @@ -1500,24 +1600,35 @@ sortedcontainers = "*" [[package]] name = "typing-extensions" -version = "4.12.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"}, - {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] name = "urllib3" -version = "1.26.18" +version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, + {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, + {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] @@ -1587,43 +1698,41 @@ watchdog = ">=0.10.0" [[package]] name = "watchdog" -version = "4.0.1" +version = "5.0.2" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, - {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, - {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, - {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, - {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, - {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, - {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, + {file = "watchdog-5.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d961f4123bb3c447d9fcdcb67e1530c366f10ab3a0c7d1c0c9943050936d4877"}, + {file = "watchdog-5.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72990192cb63872c47d5e5fefe230a401b87fd59d257ee577d61c9e5564c62e5"}, + {file = "watchdog-5.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6bec703ad90b35a848e05e1b40bf0050da7ca28ead7ac4be724ae5ac2653a1a0"}, + {file = "watchdog-5.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dae7a1879918f6544201d33666909b040a46421054a50e0f773e0d870ed7438d"}, + {file = "watchdog-5.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c4a440f725f3b99133de610bfec93d570b13826f89616377715b9cd60424db6e"}, + {file = "watchdog-5.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8b2918c19e0d48f5f20df458c84692e2a054f02d9df25e6c3c930063eca64c1"}, + {file = "watchdog-5.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:aa9cd6e24126d4afb3752a3e70fce39f92d0e1a58a236ddf6ee823ff7dba28ee"}, + {file = "watchdog-5.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f627c5bf5759fdd90195b0c0431f99cff4867d212a67b384442c51136a098ed7"}, + {file = "watchdog-5.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d7594a6d32cda2b49df3fd9abf9b37c8d2f3eab5df45c24056b4a671ac661619"}, + {file = "watchdog-5.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba32efcccfe2c58f4d01115440d1672b4eb26cdd6fc5b5818f1fb41f7c3e1889"}, + {file = "watchdog-5.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:963f7c4c91e3f51c998eeff1b3fb24a52a8a34da4f956e470f4b068bb47b78ee"}, + {file = "watchdog-5.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8c47150aa12f775e22efff1eee9f0f6beee542a7aa1a985c271b1997d340184f"}, + {file = "watchdog-5.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:14dd4ed023d79d1f670aa659f449bcd2733c33a35c8ffd88689d9d243885198b"}, + {file = "watchdog-5.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b84bff0391ad4abe25c2740c7aec0e3de316fdf7764007f41e248422a7760a7f"}, + {file = "watchdog-5.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e8d5ff39f0a9968952cce548e8e08f849141a4fcc1290b1c17c032ba697b9d7"}, + {file = "watchdog-5.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fb223456db6e5f7bd9bbd5cd969f05aae82ae21acc00643b60d81c770abd402b"}, + {file = "watchdog-5.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9814adb768c23727a27792c77812cf4e2fd9853cd280eafa2bcfa62a99e8bd6e"}, + {file = "watchdog-5.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:901ee48c23f70193d1a7bc2d9ee297df66081dd5f46f0ca011be4f70dec80dab"}, + {file = "watchdog-5.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:638bcca3d5b1885c6ec47be67bf712b00a9ab3d4b22ec0881f4889ad870bc7e8"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5597c051587f8757798216f2485e85eac583c3b343e9aa09127a3a6f82c65ee8"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:53ed1bf71fcb8475dd0ef4912ab139c294c87b903724b6f4a8bd98e026862e6d"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:29e4a2607bd407d9552c502d38b45a05ec26a8e40cc7e94db9bb48f861fa5abc"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:b6dc8f1d770a8280997e4beae7b9a75a33b268c59e033e72c8a10990097e5fde"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:d2ab34adc9bf1489452965cdb16a924e97d4452fcf88a50b21859068b50b5c3b"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:7d1aa7e4bb0f0c65a1a91ba37c10e19dabf7eaaa282c5787e51371f090748f4b"}, + {file = "watchdog-5.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:726eef8f8c634ac6584f86c9c53353a010d9f311f6c15a034f3800a7a891d941"}, + {file = "watchdog-5.0.2-py3-none-win32.whl", hash = "sha256:bda40c57115684d0216556671875e008279dea2dc00fcd3dde126ac8e0d7a2fb"}, + {file = "watchdog-5.0.2-py3-none-win_amd64.whl", hash = "sha256:d010be060c996db725fbce7e3ef14687cdcc76f4ca0e4339a68cc4532c382a73"}, + {file = "watchdog-5.0.2-py3-none-win_ia64.whl", hash = "sha256:3960136b2b619510569b90f0cd96408591d6c251a75c97690f4553ca88889769"}, + {file = "watchdog-5.0.2.tar.gz", hash = "sha256:dcebf7e475001d2cdeb020be630dc5b687e9acdd60d16fea6bb4508e7b94cf76"}, ] [package.extras] @@ -1721,101 +1830,103 @@ files = [ [[package]] name = "yarl" -version = "1.9.4" +version = "1.11.1" description = "Yet another URL library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, + {file = "yarl-1.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:400cd42185f92de559d29eeb529e71d80dfbd2f45c36844914a4a34297ca6f00"}, + {file = "yarl-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8258c86f47e080a258993eed877d579c71da7bda26af86ce6c2d2d072c11320d"}, + {file = "yarl-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2164cd9725092761fed26f299e3f276bb4b537ca58e6ff6b252eae9631b5c96e"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08ea567c16f140af8ddc7cb58e27e9138a1386e3e6e53982abaa6f2377b38cc"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:768ecc550096b028754ea28bf90fde071c379c62c43afa574edc6f33ee5daaec"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2909fa3a7d249ef64eeb2faa04b7957e34fefb6ec9966506312349ed8a7e77bf"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01a8697ec24f17c349c4f655763c4db70eebc56a5f82995e5e26e837c6eb0e49"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e286580b6511aac7c3268a78cdb861ec739d3e5a2a53b4809faef6b49778eaff"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4179522dc0305c3fc9782549175c8e8849252fefeb077c92a73889ccbcd508ad"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:27fcb271a41b746bd0e2a92182df507e1c204759f460ff784ca614e12dd85145"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f61db3b7e870914dbd9434b560075e0366771eecbe6d2b5561f5bc7485f39efd"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:c92261eb2ad367629dc437536463dc934030c9e7caca861cc51990fe6c565f26"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d95b52fbef190ca87d8c42f49e314eace4fc52070f3dfa5f87a6594b0c1c6e46"}, + {file = "yarl-1.11.1-cp310-cp310-win32.whl", hash = "sha256:489fa8bde4f1244ad6c5f6d11bb33e09cf0d1d0367edb197619c3e3fc06f3d91"}, + {file = "yarl-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:476e20c433b356e16e9a141449f25161e6b69984fb4cdbd7cd4bd54c17844998"}, + {file = "yarl-1.11.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:946eedc12895873891aaceb39bceb484b4977f70373e0122da483f6c38faaa68"}, + {file = "yarl-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21a7c12321436b066c11ec19c7e3cb9aec18884fe0d5b25d03d756a9e654edfe"}, + {file = "yarl-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c35f493b867912f6fda721a59cc7c4766d382040bdf1ddaeeaa7fa4d072f4675"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25861303e0be76b60fddc1250ec5986c42f0a5c0c50ff57cc30b1be199c00e63"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4b53f73077e839b3f89c992223f15b1d2ab314bdbdf502afdc7bb18e95eae27"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:327c724b01b8641a1bf1ab3b232fb638706e50f76c0b5bf16051ab65c868fac5"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4307d9a3417eea87715c9736d050c83e8c1904e9b7aada6ce61b46361b733d92"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a28bed68ab8fb7e380775f0029a079f08a17799cb3387a65d14ace16c12e2b"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:067b961853c8e62725ff2893226fef3d0da060656a9827f3f520fb1d19b2b68a"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8215f6f21394d1f46e222abeb06316e77ef328d628f593502d8fc2a9117bde83"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:498442e3af2a860a663baa14fbf23fb04b0dd758039c0e7c8f91cb9279799bff"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:69721b8effdb588cb055cc22f7c5105ca6fdaa5aeb3ea09021d517882c4a904c"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e969fa4c1e0b1a391f3fcbcb9ec31e84440253325b534519be0d28f4b6b533e"}, + {file = "yarl-1.11.1-cp311-cp311-win32.whl", hash = "sha256:7d51324a04fc4b0e097ff8a153e9276c2593106a811704025bbc1d6916f45ca6"}, + {file = "yarl-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:15061ce6584ece023457fb8b7a7a69ec40bf7114d781a8c4f5dcd68e28b5c53b"}, + {file = "yarl-1.11.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a4264515f9117be204935cd230fb2a052dd3792789cc94c101c535d349b3dab0"}, + {file = "yarl-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f41fa79114a1d2eddb5eea7b912d6160508f57440bd302ce96eaa384914cd265"}, + {file = "yarl-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02da8759b47d964f9173c8675710720b468aa1c1693be0c9c64abb9d8d9a4867"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9361628f28f48dcf8b2f528420d4d68102f593f9c2e592bfc842f5fb337e44fd"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b91044952da03b6f95fdba398d7993dd983b64d3c31c358a4c89e3c19b6f7aef"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74db2ef03b442276d25951749a803ddb6e270d02dda1d1c556f6ae595a0d76a8"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e975a2211952a8a083d1b9d9ba26472981ae338e720b419eb50535de3c02870"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aef97ba1dd2138112890ef848e17d8526fe80b21f743b4ee65947ea184f07a2"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a7915ea49b0c113641dc4d9338efa9bd66b6a9a485ffe75b9907e8573ca94b84"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:504cf0d4c5e4579a51261d6091267f9fd997ef58558c4ffa7a3e1460bd2336fa"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3de5292f9f0ee285e6bd168b2a77b2a00d74cbcfa420ed078456d3023d2f6dff"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a34e1e30f1774fa35d37202bbeae62423e9a79d78d0874e5556a593479fdf239"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66b63c504d2ca43bf7221a1f72fbe981ff56ecb39004c70a94485d13e37ebf45"}, + {file = "yarl-1.11.1-cp312-cp312-win32.whl", hash = "sha256:a28b70c9e2213de425d9cba5ab2e7f7a1c8ca23a99c4b5159bf77b9c31251447"}, + {file = "yarl-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:17b5a386d0d36fb828e2fb3ef08c8829c1ebf977eef88e5367d1c8c94b454639"}, + {file = "yarl-1.11.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1fa2e7a406fbd45b61b4433e3aa254a2c3e14c4b3186f6e952d08a730807fa0c"}, + {file = "yarl-1.11.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:750f656832d7d3cb0c76be137ee79405cc17e792f31e0a01eee390e383b2936e"}, + {file = "yarl-1.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b8486f322d8f6a38539136a22c55f94d269addb24db5cb6f61adc61eabc9d93"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fce4da3703ee6048ad4138fe74619c50874afe98b1ad87b2698ef95bf92c96d"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed653638ef669e0efc6fe2acb792275cb419bf9cb5c5049399f3556995f23c7"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18ac56c9dd70941ecad42b5a906820824ca72ff84ad6fa18db33c2537ae2e089"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:688654f8507464745ab563b041d1fb7dab5d9912ca6b06e61d1c4708366832f5"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4973eac1e2ff63cf187073cd4e1f1148dcd119314ab79b88e1b3fad74a18c9d5"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:964a428132227edff96d6f3cf261573cb0f1a60c9a764ce28cda9525f18f7786"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6d23754b9939cbab02c63434776df1170e43b09c6a517585c7ce2b3d449b7318"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c2dc4250fe94d8cd864d66018f8344d4af50e3758e9d725e94fecfa27588ff82"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09696438cb43ea6f9492ef237761b043f9179f455f405279e609f2bc9100212a"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:999bfee0a5b7385a0af5ffb606393509cfde70ecca4f01c36985be6d33e336da"}, + {file = "yarl-1.11.1-cp313-cp313-win32.whl", hash = "sha256:ce928c9c6409c79e10f39604a7e214b3cb69552952fbda8d836c052832e6a979"}, + {file = "yarl-1.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:501c503eed2bb306638ccb60c174f856cc3246c861829ff40eaa80e2f0330367"}, + {file = "yarl-1.11.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dae7bd0daeb33aa3e79e72877d3d51052e8b19c9025ecf0374f542ea8ec120e4"}, + {file = "yarl-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3ff6b1617aa39279fe18a76c8d165469c48b159931d9b48239065767ee455b2b"}, + {file = "yarl-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3257978c870728a52dcce8c2902bf01f6c53b65094b457bf87b2644ee6238ddc"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f351fa31234699d6084ff98283cb1e852270fe9e250a3b3bf7804eb493bd937"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8aef1b64da41d18026632d99a06b3fefe1d08e85dd81d849fa7c96301ed22f1b"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7175a87ab8f7fbde37160a15e58e138ba3b2b0e05492d7351314a250d61b1591"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba444bdd4caa2a94456ef67a2f383710928820dd0117aae6650a4d17029fa25e"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ea9682124fc062e3d931c6911934a678cb28453f957ddccf51f568c2f2b5e05"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8418c053aeb236b20b0ab8fa6bacfc2feaaf7d4683dd96528610989c99723d5f"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:61a5f2c14d0a1adfdd82258f756b23a550c13ba4c86c84106be4c111a3a4e413"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f3a6d90cab0bdf07df8f176eae3a07127daafcf7457b997b2bf46776da2c7eb7"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:077da604852be488c9a05a524068cdae1e972b7dc02438161c32420fb4ec5e14"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:15439f3c5c72686b6c3ff235279630d08936ace67d0fe5c8d5bbc3ef06f5a420"}, + {file = "yarl-1.11.1-cp38-cp38-win32.whl", hash = "sha256:238a21849dd7554cb4d25a14ffbfa0ef380bb7ba201f45b144a14454a72ffa5a"}, + {file = "yarl-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:67459cf8cf31da0e2cbdb4b040507e535d25cfbb1604ca76396a3a66b8ba37a6"}, + {file = "yarl-1.11.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:884eab2ce97cbaf89f264372eae58388862c33c4f551c15680dd80f53c89a269"}, + {file = "yarl-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a336eaa7ee7e87cdece3cedb395c9657d227bfceb6781295cf56abcd3386a26"}, + {file = "yarl-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87f020d010ba80a247c4abc335fc13421037800ca20b42af5ae40e5fd75e7909"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:637c7ddb585a62d4469f843dac221f23eec3cbad31693b23abbc2c366ad41ff4"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48dfd117ab93f0129084577a07287376cc69c08138694396f305636e229caa1a"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e0ae31fb5ccab6eda09ba1494e87eb226dcbd2372dae96b87800e1dcc98804"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f46f81501160c28d0c0b7333b4f7be8983dbbc161983b6fb814024d1b4952f79"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04293941646647b3bfb1719d1d11ff1028e9c30199509a844da3c0f5919dc520"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:250e888fa62d73e721f3041e3a9abf427788a1934b426b45e1b92f62c1f68366"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e8f63904df26d1a66aabc141bfd258bf738b9bc7bc6bdef22713b4f5ef789a4c"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:aac44097d838dda26526cffb63bdd8737a2dbdf5f2c68efb72ad83aec6673c7e"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:267b24f891e74eccbdff42241c5fb4f974de2d6271dcc7d7e0c9ae1079a560d9"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6907daa4b9d7a688063ed098c472f96e8181733c525e03e866fb5db480a424df"}, + {file = "yarl-1.11.1-cp39-cp39-win32.whl", hash = "sha256:14438dfc5015661f75f85bc5adad0743678eefee266ff0c9a8e32969d5d69f74"}, + {file = "yarl-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:94d0caaa912bfcdc702a4204cd5e2bb01eb917fc4f5ea2315aa23962549561b0"}, + {file = "yarl-1.11.1-py3-none-any.whl", hash = "sha256:72bf26f66456baa0584eff63e44545c9f0eaed9b73cb6601b647c91f14c11f38"}, + {file = "yarl-1.11.1.tar.gz", hash = "sha256:1bb2d9e212fb7449b8fb73bc461b51eaa17cc8430b4a87d87be7b25052d92f53"}, ] [package.dependencies] @@ -1825,4 +1936,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "b61c6117aaf307076a1f0eee6b9177cd1f5accf330cd75a28669c36b4c51ad69" +content-hash = "eea5f44a5be305136e1e8d0bf9fe86791e4397c286d83bd46963f0535716d1c4" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 984496a1..342f0776 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -7,7 +7,7 @@ authors = ["mleader "] [tool.poetry.dependencies] python = "^3.9" cryptography = "^3.4.8" -pandas = "1.2.4" +pandas = "2.2.2" vyper-config = "1.0.0" semver = "2.13.0" splunk-sdk = "2.0.1" @@ -15,6 +15,7 @@ uvloop = "^0.15.2" httptools = "^0.2.0" elasticsearch = "7.13.4" fastapi = "^0.104.1" +numpy = "1.26.4" pydantic = "2.3.0" uvicorn = "^0.14.0" trio = "^0.18.0" diff --git a/backend/scripts/start-reload.sh b/backend/scripts/start-reload.sh index 764e707c..8f1e4eb5 100755 --- a/backend/scripts/start-reload.sh +++ b/backend/scripts/start-reload.sh @@ -1,3 +1,3 @@ #!/usr/bin/bash - -uvicorn --reload --host="0.0.0.0" --port=8000 --forwarded-allow-ips='*' --proxy-headers app.main:app +LOG=${CPT_BACKEND_LOG_LEVEL:-info} +uvicorn --reload --log-level="${LOG}" --host="0.0.0.0" --port=8000 --forwarded-allow-ips='*' --proxy-headers app.main:app diff --git a/backend/skeleton.toml b/backend/skeleton.toml index 81662a55..2aac4574 100644 --- a/backend/skeleton.toml +++ b/backend/skeleton.toml @@ -15,3 +15,8 @@ personal_access_token= url= username= password= + +[crucible] +url= +username= +password= diff --git a/frontend/README.md b/frontend/README.md index 0b01bbaf..6bd63aab 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -1,5 +1,5 @@ -# Openshift Performance Dashbaord +# Openshift Performance Dashboard ## Dashboard directory structure diff --git a/frontend/src/App.js b/frontend/src/App.js deleted file mode 100644 index 4b8c6382..00000000 --- a/frontend/src/App.js +++ /dev/null @@ -1,58 +0,0 @@ -import React, {useEffect} from 'react'; -import '@patternfly/react-core/dist/styles/base.css'; - -import { - Page, - PageSection, - PageSectionVariants, -} from '@patternfly/react-core'; -import {fetchOCPJobsData, fetchCPTJobsData, fetchQuayJobsData, fetchTelcoJobsData} from "./store/Actions/ActionCreator"; -import {useDispatch} from "react-redux"; -import {Route, Switch, BrowserRouter as Router} from "react-router-dom"; -import {NavBar} from "./components/NavBar/NavBar"; -import {HomeView} from "./components/Home/HomeView"; -import {OCPHome} from './components/OCP/OCPHome'; -import {QuayHome} from './components/Quay/QuayHome'; -import {TelcoHome} from './components/Telco/TelcoHome'; - - -export const App = () => { - const dispatch = useDispatch() - - useEffect(() => { - const fetchData = async () =>{ - await dispatch(fetchOCPJobsData()) - await dispatch(fetchCPTJobsData()) - await dispatch(fetchQuayJobsData()) - await dispatch(fetchTelcoJobsData()) - } - fetchData() - }, [dispatch]) - - - - - return ( - - } - groupProps={{ - stickyOnBreakpoint: { default: 'top' }, - sticky: 'top' - }} - > - - - - - - - - - - - - ); -}; - -export default App diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index c5f48549..d93c960e 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -5,6 +5,7 @@ import * as APP_ROUTES from "./utils/routeConstants"; import { BrowserRouter, Route, Routes } from "react-router-dom"; import Home from "./components/templates/Home"; +import ILab from "./components/templates/ILab"; import MainLayout from "./containers/MainLayout"; import OCP from "./components/templates/OCP"; import Quay from "./components/templates/Quay"; @@ -26,6 +27,7 @@ function App() { } /> } /> } /> + } /> diff --git a/frontend/src/actions/filterActions.js b/frontend/src/actions/filterActions.js index 7f565887..0307bcdf 100644 --- a/frontend/src/actions/filterActions.js +++ b/frontend/src/actions/filterActions.js @@ -27,6 +27,7 @@ import { setTelcoOtherSummaryFilter, } from "./telcoActions"; +import { setIlabDateFilter } from "./ilabActions"; import store from "@/store/store"; const { dispatch } = store; @@ -76,6 +77,8 @@ export const setDateFilter = (date, key, navigation, currType) => { dispatch(setQuayDateFilter(date, key, navigation)); } else if (currType === "telco") { dispatch(setTelcoDateFilter(date, key, navigation)); + } else if (currType === "ilab") { + dispatch(setIlabDateFilter(date, key, navigation)); } }; diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js new file mode 100644 index 00000000..f3277edd --- /dev/null +++ b/frontend/src/actions/ilabActions.js @@ -0,0 +1,98 @@ +import * as API_ROUTES from "@/utils/apiConstants"; +import * as TYPES from "./types.js"; + +import API from "@/utils/axiosInstance"; +import { appendQueryString } from "@/utils/helper"; +import { showToast, showFailureToast } from "@/actions/toastActions"; + +export const fetchILabJobs = () => async (dispatch, getState) => { + try { + dispatch({ type: TYPES.LOADING }); + const { start_date, end_date } = getState().ilab; + const response = await API.get(API_ROUTES.ILABS_JOBS_API_V1, { + params: { + ...(start_date && { start_date }), + ...(end_date && { end_date }), + }, + }); + if (response.status === 200 && response?.data?.results.length > 0) { + // const startDate = new Date(response.data.startDate), + // endDate = new Date(response.data.endDate); + const startDate = response.data.startDate, + endDate = response.data.endDate; + dispatch({ + type: TYPES.SET_ILAB_JOBS_DATA, + payload: response.data.results, + }); + // dispatch({ + // type: TYPES.SET_ILAB_DATE_FILTER, + // payload: { + // start_date: `${startDate.getFullYear()}-${startDate.getMonth()}-${startDate.getDate()}`, + // end_date: `${endDate.getFullYear()}-${endDate.getMonth()}-${endDate.getDate()}`, + // }, + // }); + dispatch({ + type: TYPES.SET_ILAB_DATE_FILTER, + payload: { + start_date: startDate, + end_date: endDate, + }, + }); + } + } catch (error) { + dispatch(showFailureToast()); + } + dispatch({ type: TYPES.COMPLETED }); +}; + +export const setIlabDateFilter = + (start_date, end_date, navigate) => (dispatch, getState) => { + const appliedFilters = getState().cpt.appliedFilters; + + dispatch({ + type: TYPES.SET_CPT_DATE_FILTER, + payload: { + start_date, + end_date, + }, + }); + + appendQueryString({ ...appliedFilters, start_date, end_date }, navigate); + + dispatch(fetchILabJobs()); + }; + +export const fetchGraphData = (uid, metric) => async (dispatch) => { + try { + dispatch({ type: TYPES.GRAPH_LOADING }); + const periods = await API.get(`/api/v1/ilab/runs/${uid}/periods`); + let graphs = []; + periods.data.forEach((p) => { + graphs.push({ metric, periods: [p.id] }); + graphs.push({ + metric: "mpstat::Busy-CPU", + aggregate: true, + periods: [p.id], + }); + }); + const response = await API.post(`/api/v1/ilab/runs/multigraph`, { + run: uid, + name: metric, + graphs, + }); + if (response.status === 200) { + dispatch({ + type: TYPES.SET_ILAB_GRAPH_DATA, + payload: { + uid, + data: response.data.data, + layout: response.data.layout, + }, + }); + } + } catch (error) { + console.error(error); + dispatch(showToast("danger", "Graph error", error.data)); + } + dispatch({ type: TYPES.GRAPH_COMPLETED }); +}; diff --git a/frontend/src/actions/types.js b/frontend/src/actions/types.js index 1804cf21..2dab4cca 100644 --- a/frontend/src/actions/types.js +++ b/frontend/src/actions/types.js @@ -77,3 +77,7 @@ export const SET_TELCO_SELECTED_FILTERS = "SET_TELCO_SELECTED_FILTERS"; export const SET_TELCO_SUMMARY = "SET_TELCO_SUMMARY"; export const SET_TELCO_COLUMNS = "SET_TELCO_COLUMNS"; export const SET_TELCO_GRAPH_DATA = "SET_TELCO_GRAPH_DATA"; +/* ILAB JOBS */ +export const SET_ILAB_JOBS_DATA = "SET_ILAB_JOBS_DATA"; +export const SET_ILAB_DATE_FILTER = "SET_ILAB_DATE_FILTER"; +export const SET_ILAB_GRAPH_DATA = "SET_ILAB_GRAPH_DATA"; diff --git a/frontend/src/assets/constants/SidemenuConstants.js b/frontend/src/assets/constants/SidemenuConstants.js index bc04fd52..e65a2103 100644 --- a/frontend/src/assets/constants/SidemenuConstants.js +++ b/frontend/src/assets/constants/SidemenuConstants.js @@ -2,3 +2,4 @@ export const HOME_NAV = "home"; export const QUAY_NAV = "quay"; export const OCP_NAV = "ocp"; export const TELCO_NAV = "telco"; +export const ILAB_NAV = "ilab"; diff --git a/frontend/src/components/atoms/PlotGraph/index.jsx b/frontend/src/components/atoms/PlotGraph/index.jsx index 182496f7..d93a3d3b 100644 --- a/frontend/src/components/atoms/PlotGraph/index.jsx +++ b/frontend/src/components/atoms/PlotGraph/index.jsx @@ -1,6 +1,5 @@ import Plotly from "react-plotly.js"; import PropTypes from "prop-types"; - const PlotGraph = (props) => { return ( { }, []); return ( - + {content.map((unit) => ( diff --git a/frontend/src/components/molecules/SideMenuOptions/index.jsx b/frontend/src/components/molecules/SideMenuOptions/index.jsx index 48bed8de..17a00160 100644 --- a/frontend/src/components/molecules/SideMenuOptions/index.jsx +++ b/frontend/src/components/molecules/SideMenuOptions/index.jsx @@ -28,6 +28,11 @@ const sideMenuOptions = [ key: "telco", displayName: "Telco", }, + { + id: CONSTANTS.ILAB_NAV, + key: "ilab", + displayName: "ILAB", + }, ]; const MenuOptions = () => { diff --git a/frontend/src/components/organisms/TableFilters/index.jsx b/frontend/src/components/organisms/TableFilters/index.jsx index c5f5ae62..0dd5885d 100644 --- a/frontend/src/components/organisms/TableFilters/index.jsx +++ b/frontend/src/components/organisms/TableFilters/index.jsx @@ -124,7 +124,8 @@ const TableFilter = (props) => { )} - {Object.keys(appliedFilters).length > 0 && + {appliedFilters && + Object.keys(appliedFilters).length > 0 && Object.keys(appliedFilters).map((key) => ( {getFilterName(key)} : diff --git a/frontend/src/components/templates/ILab/index.jsx b/frontend/src/components/templates/ILab/index.jsx new file mode 100644 index 00000000..ace948fe --- /dev/null +++ b/frontend/src/components/templates/ILab/index.jsx @@ -0,0 +1,202 @@ +import { + CheckCircleIcon, + ExclamationCircleIcon, +} from "@patternfly/react-icons"; +import { + ExpandableRowContent, + Table, + Tbody, + Td, + Th, + Thead, + Tr, +} from "@patternfly/react-table"; +import { Grid, GridItem, Label } from "@patternfly/react-core"; +import { fetchGraphData, fetchILabJobs } from "@/actions/ilabActions"; +import { formatDateTime, uid } from "@/utils/helper"; +import { useDispatch, useSelector } from "react-redux"; +import { useEffect, useState } from "react"; + +import Plot from "react-plotly.js"; +import TableFilter from "@/components/organisms/TableFilters"; +import { useNavigate } from "react-router-dom"; +import { cloneDeep } from "lodash"; + +const ILab = () => { + const dispatch = useDispatch(); + const navigate = useNavigate(); + + const { results, start_date, end_date, graphData } = useSelector( + (state) => state.ilab + ); + const isGraphLoading = useSelector((state) => state.loading.isGraphLoading); + + const [expandedResult, setExpandedResult] = useState([]); + + const isResultExpanded = (res) => expandedResult?.includes(res); + const setExpanded = (run, isExpanding = true) => { + setExpandedResult((prevExpanded) => { + const otherExpandedRunNames = prevExpanded.filter((r) => r !== run.id); + return isExpanding + ? [...otherExpandedRunNames, run.id] + : otherExpandedRunNames; + }); + if (isExpanding) { + dispatch(fetchGraphData(run.id, run?.primary_metrics[0])); + } + }; + + const getGraphData = (id) => { + const data = graphData?.filter((a) => a.uid === id); + return cloneDeep(data); + }; + const hasGraphData = (uuid) => { + const hasData = getGraphData(uuid).length > 0; + + return hasData; + }; + + useEffect(() => { + dispatch(fetchILabJobs()); + }, [dispatch]); + + const columnNames = { + benchmark: "Benchmark", + email: "Email", + name: "Name", + source: "Source", + metric: "Metric", + begin_date: "Start Date", + end_date: "End Date", + status: "Status", + }; + + const StatusCell = (props) => { + return props.value?.toLowerCase() === "pass" ? ( + + ) : ( + + ); + }; + + const RenderKey = (props) => { + const { value } = props; + return ( + <> + {Object.keys(value).length > 0 && + Object.keys(value).map((unit) => { + return ( + <> + + {" "} + {value[unit]} {""} + + + ); + })} + + ); + }; + + return ( + <> + + + + + + + + + + + + {results.map((item, rowIndex) => ( + <> + + + + + + + + + + + ))} + +
+ {columnNames.metric}{columnNames.begin_date}{columnNames.end_date}{columnNames.status}
+ setExpanded(item, !isResultExpanded(item.id)), + expandId: `expandId-${uid()}`, + }} + /> + + {item.primary_metrics[0]}{formatDateTime(item.begin_date)}{formatDateTime(item.end_date)} + +
+ + + + + Tags + + {Object.keys(item.tags).length > 0 && + Object.keys(item.tags).map((key) => ( + <> +
+ {key}:{" "} + {item.tags[key]} +
+ + ))} +
+ + + Parameters + + {Object.keys(item.params).length > 0 && + Object.keys(item.params).map((key) => ( + <> +
+ {key}:{" "} + {item.params[key]} +
+ + ))} +
+ + {isGraphLoading && !hasGraphData(item.id) ? ( +
+ ) : ( + <> + + + )} +
+
+
+
+ + ); +}; + +export default ILab; diff --git a/frontend/src/reducers/ilabReducer.js b/frontend/src/reducers/ilabReducer.js new file mode 100644 index 00000000..c91ce767 --- /dev/null +++ b/frontend/src/reducers/ilabReducer.js @@ -0,0 +1,30 @@ +import * as TYPES from "@/actions/types"; + +const initialState = { + results: [], + start_date: "", + end_date: "", + graphData: [], +}; +const ILabReducer = (state = initialState, action = {}) => { + const { type, payload } = action; + switch (type) { + case TYPES.SET_ILAB_JOBS_DATA: + return { + ...state, + results: payload, + }; + case TYPES.SET_ILAB_DATE_FILTER: + return { + ...state, + start_date: payload.start_date, + end_date: payload.end_date, + }; + case TYPES.SET_ILAB_GRAPH_DATA: + return { ...state, graphData: [...state.graphData, payload] }; + default: + return state; + } +}; + +export default ILabReducer; diff --git a/frontend/src/reducers/index.js b/frontend/src/reducers/index.js index 1fb4c555..43970170 100644 --- a/frontend/src/reducers/index.js +++ b/frontend/src/reducers/index.js @@ -1,4 +1,5 @@ import HomeReducer from "./homeReducer"; +import ILabReducer from "./ilabReducer"; import LoadingReducer from "./loadingReducer"; import OCPReducer from "./ocpReducer"; import QuayReducer from "./quayReducer"; @@ -15,4 +16,5 @@ export default combineReducers({ ocp: OCPReducer, quay: QuayReducer, telco: TelcoReducer, + ilab: ILabReducer, }); diff --git a/frontend/src/store/reducers/InitialData.js b/frontend/src/store/reducers/InitialData.js deleted file mode 100644 index 80503b3c..00000000 --- a/frontend/src/store/reducers/InitialData.js +++ /dev/null @@ -1,181 +0,0 @@ - -export const OCP_INITIAL_DATA = { - initialState: true, - success: 0, - failure: 0, - total: 0, - others: 0, - duration:0, - benchmarks: ["All"], - versions: ["All"], - workers: ["All"], - ciSystems: ["All"], - networkTypes: ["All"], - jobTypes: ["All"], - rehearses: ["All"], - allIpsec: ["All"], - allFips: ["All"], - allEncrypted: ["All"], - encryptionTypes: ["All"], - allPublish: ["All"], - computeArchs: ["All"], - controlPlaneArchs: ["All"], - jobStatuses: ["All"], - selectedBenchmark: "All", - selectedVersion: "All", - selectedPlatform: "All", - selectedWorkerCount: "All", - selectedNetworkType: "All", - selectedCiSystem: "All", - selectedJobType: "All", - selectedRehearse: "All", - selectedIpsec: "All", - selectedFips: "All", - selectedEncrypted: "All", - selectedEncryptionType: "All", - selectedPublish: "All", - selectedComputeArch: "All", - selectedControlPlaneArch: "All", - selectedJobStatus: "All", - waitForUpdate: false, - platforms: ["All"], - copyData: [], - data: [], - updatedTime: 'Loading', - error: null, - startDate: '', - endDate: '', - tableData : [{ name: "Benchmark", value: "benchmark" }, - {name:"Release Stream", value: "releaseStream"}, - {name:"Build", value: "build"}, - {name: "Worker Count", value: "workerNodesCount"}, - {name: "Start Date", value: "startDate"}, - {name: "End Date", value: "endDate"}, - {name: "Status", value: "jobStatus"}], -} - -export const QUAY_INITIAL_DATA = { - initialState: true, - success: 0, - failure: 0, - total: 0, - others: 0, - duration:0, - ciSystems: ["All"], - platforms: ["All"], - benchmarks: ["All"], - releaseStreams: ["All"], - workers: ["All"], - hitSizes: ["All"], - concurrencies: ["All"], - imagePushPulls: ["All"], - selectedCiSystem: "All", - selectedPlatform: "All", - selectedBenchmark: "All", - selectedReleaseStream: "All", - selectedWorkerCount: "All", - selectedHitSize: "All", - selectedConcurrency: "All", - selectedImagePushPulls: "All", - waitForUpdate: false, - copyData: [], - data: [], - updatedTime: 'Loading', - error: null, - startDate: '', - endDate: '', - tableData : [{ name: "Benchmark", value: "benchmark" }, - {name:"Release Stream", value: "releaseStream"}, - {name:"Platform", value: "platform"}, - {name: "Worker Count", value: "workerNodesCount"}, - {name: "Start Date", value: "startDate"}, - {name: "End Date", value: "endDate"}, - {name: "Status", value: "jobStatus"}], -} - -export const TELCO_INITIAL_DATA = { - initialState: true, - success: 0, - failure: 0, - total: 0, - others: 0, - duration:0, - ciSystems: ["All"], - benchmarks: ["All"], - versions: ["All"], - releaseStreams: ["All"], - formals: ["All"], - nodeNames: ["All"], - cpus: ["All"], - selectedCiSystem: "All", - selectedBenchmark: "All", - selectedVersion: "All", - selectedReleaseStream: "All", - selectedFormal: "All", - selectedCpu: "All", - selectedNodeName: "All", - waitForUpdate: false, - copyData: [], - data: [], - updatedTime: 'Loading', - error: null, - startDate: '', - endDate: '', - tableData : [{ name: "Benchmark", value: "benchmark" }, - {name:"Release Stream", value: "releaseStream"}, - {name:"Build", value: "ocpVersion"}, - {name:"CPU", value: "cpu"}, - {name:"Node Name", value: "nodeName"}, - {name: "Start Date", value: "startDate"}, - {name: "End Date", value: "endDate"}, - {name: "Status", value: "jobStatus"}], -} - -export const CPT_INITIAL_DATA = { - initialState: true, - success: 0, - failure: 0, - total: 0, - others: 0, - testNames: ["All"], - products: ["All"], - ciSystems: ["All"], - statuses: ["All"], - releaseStreams: ["All"], - selectedCiSystem: "All", - selectedProduct: "All", - selectedTestName: "All", - selectedJobStatus: "All", - selectedReleaseStream: "All", - waitForUpdate: false, - copyData: [], - data: [], - updatedTime: 'Loading', - error: null, - startDate: '', - endDate: '', - tableData : [{name:"Product", value: "product"}, - { name: "CI System", value: "ciSystem" }, - {name: "Test Name", value: "testName"}, - {name: "Version", value: "version"}, - {name: "Release Stream", value: "releaseStream"}, - {name: "Start Date", value: "startDate"}, - {name: "End Date", value: "endDate"}, - {name: "Build URL", value: "buildUrl"}, - {name: "Status", value: "jobStatus"},], -} - -export const GRAPH_INITIAL_DATA = { - uuid_results: {}, - graphError: false, -} - -export const QUAY_GRAPH_INITIAL_DATA = { - uuid_results: {}, - graphError: false, -} - -export const TELCO_GRAPH_INITIAL_DATA = { - uuid_results: {}, - graphError: false, -} diff --git a/frontend/src/store/reducers/index.js b/frontend/src/store/reducers/index.js deleted file mode 100644 index fe4fddad..00000000 --- a/frontend/src/store/reducers/index.js +++ /dev/null @@ -1,18 +0,0 @@ -import ocpJobsReducer from "./OCPJobsReducer"; -import cptJobsReducer from "./CPTJobsReducer"; -import quayJobsReducer from "./QuayJobsReducer"; -import telcoJobsReducer from "./TelcoJobsReducer"; -import graphReducer from "./GraphReducer"; -import quayGraphReducer from "./QuayGraphReducer"; -import telcoGraphReducer from "./TelcoGraphReducer"; - - -export const rootReducer = { - 'ocpJobs': ocpJobsReducer, - 'cptJobs': cptJobsReducer, - 'quayJobs': quayJobsReducer, - 'telcoJobs': telcoJobsReducer, - 'graph': graphReducer, - 'quayGraph': quayGraphReducer, - 'telcoGraph': telcoGraphReducer, -} diff --git a/frontend/src/utils/apiConstants.js b/frontend/src/utils/apiConstants.js index 52576b4a..33fe0ccd 100644 --- a/frontend/src/utils/apiConstants.js +++ b/frontend/src/utils/apiConstants.js @@ -1,7 +1,7 @@ export const getUrl = () => { const { hostname, protocol } = window.location; return hostname === "localhost" - ? "http://localhost:8000" + ? "http://0.0.0.0:8000" : `${protocol}//${hostname}`; }; @@ -17,3 +17,6 @@ export const QUAY_GRAPH_API_V1 = "/api/v1/quay/graph"; export const TELCO_JOBS_API_V1 = "/api/v1/telco/jobs"; export const TELCO_GRAPH_API_V1 = "/api/v1/telco/graph"; + +export const ILABS_JOBS_API_V1 = "/api/v1/ilab/runs"; +export const ILAB_GRAPH_API_V1 = "/api/v1/ilab/runs/"; diff --git a/frontend/src/utils/routeConstants.js b/frontend/src/utils/routeConstants.js index 53f271fa..c46bab55 100644 --- a/frontend/src/utils/routeConstants.js +++ b/frontend/src/utils/routeConstants.js @@ -2,3 +2,4 @@ export const HOME = "Home"; export const OCP = "OCP"; export const QUAY = "QUAY"; export const TELCO = "TELCO"; +export const ILAB = "ILAB"; diff --git a/local-compose.sh b/local-compose.sh index 284595a5..98b4e01f 100755 --- a/local-compose.sh +++ b/local-compose.sh @@ -11,11 +11,11 @@ CPT_CONFIG=${CPT_CONFIG:-"$PWD/backend/ocpperf.toml"} podman rm -f front back podman build -f backend/backend.containerfile --tag backend -podman build -f frontend/frontend.containerfile --tag frontend +#podman build -f frontend/frontend.containerfile --tag frontend # NOTE: add --network=host to test against a local containerized Horreum podman run -d --name=back -p ${CPT_BACKEND_PORT}:8000 --network=host -v "${CPT_CONFIG}:/backend/ocpperf.toml:Z" localhost/backend -podman run -d --name=front --net=host -p ${CPT_FRONTEND_PORT}:3000 localhost/frontend +#podman run -d --name=front --net=host -p ${CPT_FRONTEND_PORT}:3000 localhost/frontend From e6c8ee0940718d216d8db0c6991f50f567dce2aa Mon Sep 17 00:00:00 2001 From: MVarshini Date: Mon, 23 Sep 2024 18:37:25 +0530 Subject: [PATCH 02/29] UI code updates --- backend/app/services/crucible_svc.py | 2 +- frontend/src/actions/ilabActions.js | 146 +++++++++---- frontend/src/actions/paginationActions.js | 12 ++ frontend/src/actions/types.js | 6 + .../src/components/atoms/PlotGraph/index.jsx | 4 +- .../components/organisms/Pagination/index.jsx | 16 +- .../components/templates/ILab/ILabGraph.jsx | 44 ++++ .../src/components/templates/ILab/MetaRow.jsx | 40 ++++ .../templates/ILab/MetricsDropdown.jsx | 86 ++++++++ .../components/templates/ILab/StatusCell.jsx | 24 +++ .../src/components/templates/ILab/index.jsx | 196 +++++++++--------- .../src/components/templates/ILab/index.less | 13 ++ frontend/src/reducers/ilabReducer.js | 29 ++- frontend/src/reducers/loadingReducer.js | 2 +- 14 files changed, 479 insertions(+), 141 deletions(-) create mode 100644 frontend/src/components/templates/ILab/ILabGraph.jsx create mode 100644 frontend/src/components/templates/ILab/MetaRow.jsx create mode 100644 frontend/src/components/templates/ILab/MetricsDropdown.jsx create mode 100644 frontend/src/components/templates/ILab/StatusCell.jsx create mode 100644 frontend/src/components/templates/ILab/index.less diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index 6a61dadc..6b750377 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -603,7 +603,7 @@ def _get_metric_ids( # We want to help filter a consistent summary, so only show those # names with more than one value. - response["names"] = {n: sorted(v) for n, v in names.items() if len(v) > 1} + response["names"] = {n: sorted(v) for n, v in names.items() if v and len(v) > 1} response["periods"] = list(periods) raise HTTPException( status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=[response] diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index f3277edd..d76a003d 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -1,36 +1,32 @@ import * as API_ROUTES from "@/utils/apiConstants"; import * as TYPES from "./types.js"; +import { showFailureToast, showToast } from "@/actions/toastActions"; + import API from "@/utils/axiosInstance"; import { appendQueryString } from "@/utils/helper"; -import { showToast, showFailureToast } from "@/actions/toastActions"; +import { cloneDeep } from "lodash"; export const fetchILabJobs = () => async (dispatch, getState) => { try { dispatch({ type: TYPES.LOADING }); - const { start_date, end_date } = getState().ilab; + const { start_date, end_date, size, offset } = getState().ilab; const response = await API.get(API_ROUTES.ILABS_JOBS_API_V1, { params: { ...(start_date && { start_date }), ...(end_date && { end_date }), + ...(size && { size }), + ...(offset && { offset }), }, }); if (response.status === 200 && response?.data?.results.length > 0) { - // const startDate = new Date(response.data.startDate), - // endDate = new Date(response.data.endDate); const startDate = response.data.startDate, endDate = response.data.endDate; dispatch({ type: TYPES.SET_ILAB_JOBS_DATA, payload: response.data.results, }); - // dispatch({ - // type: TYPES.SET_ILAB_DATE_FILTER, - // payload: { - // start_date: `${startDate.getFullYear()}-${startDate.getMonth()}-${startDate.getDate()}`, - // end_date: `${endDate.getFullYear()}-${endDate.getMonth()}-${endDate.getDate()}`, - // }, - // }); + dispatch({ type: TYPES.SET_ILAB_DATE_FILTER, payload: { @@ -38,6 +34,15 @@ export const fetchILabJobs = () => async (dispatch, getState) => { end_date: endDate, }, }); + + dispatch({ + type: TYPES.SET_ILAB_TOTAL_ITEMS, + payload: response.data.total, + }); + dispatch({ + type: TYPES.SET_ILAB_OFFSET, + payload: response.data.next_offset, + }); } } catch (error) { dispatch(showFailureToast()); @@ -62,37 +67,108 @@ export const setIlabDateFilter = dispatch(fetchILabJobs()); }; -export const fetchGraphData = (uid, metric) => async (dispatch) => { +export const fetchMetricsInfo = (uid) => async (dispatch) => { try { - dispatch({ type: TYPES.GRAPH_LOADING }); - const periods = await API.get(`/api/v1/ilab/runs/${uid}/periods`); - let graphs = []; - periods.data.forEach((p) => { - graphs.push({ metric, periods: [p.id] }); - graphs.push({ - metric: "mpstat::Busy-CPU", - aggregate: true, - periods: [p.id], - }); - }); - const response = await API.post(`/api/v1/ilab/runs/multigraph`, { - run: uid, - name: metric, - graphs, - }); + dispatch({ type: TYPES.LOADING }); + const response = await API.get(`/api/v1/ilab/runs/${uid}/metrics`); if (response.status === 200) { + if ( + response.data.constructor === Object && + Object.keys(response.data).length > 0 + ) { + dispatch({ + type: TYPES.SET_ILAB_METRICS, + payload: { uid, metrics: Object.keys(response.data) }, + }); + } + } + } catch (error) { + console.error(error); + dispatch(showFailureToast()); + } + dispatch({ type: TYPES.COMPLETED }); +}; + +export const fetchGraphData = + (uid, metric, primary_metric) => async (dispatch, getState) => { + try { + const graphData = cloneDeep(getState().ilab.graphData); + const filterData = graphData.filter((i) => i.uid !== uid); dispatch({ type: TYPES.SET_ILAB_GRAPH_DATA, - payload: { + payload: filterData, + }); + const copyData = cloneDeep(filterData); + dispatch({ type: TYPES.GRAPH_LOADING }); + const periods = await API.get(`/api/v1/ilab/runs/${uid}/periods`); + let graphs = []; + periods.data.forEach((p) => { + graphs.push({ metric, periods: [p.id] }); + graphs.push({ + metric: metric, + aggregate: true, + periods: [p.id], + }); + }); + const response = await API.post(`/api/v1/ilab/runs/multigraph`, { + run: uid, + name: primary_metric, + graphs, + }); + if (response.status === 200) { + copyData.push({ uid, data: response.data.data, layout: response.data.layout, - }, - }); + }); + dispatch({ + type: TYPES.SET_ILAB_GRAPH_DATA, + payload: copyData, + }); + } + } catch (error) { + console.error(error); + dispatch(showToast("danger", "Graph error", error.data)); } - } catch (error) { - console.error(error); - dispatch(showToast("danger", "Graph error", error.data)); + dispatch({ type: TYPES.GRAPH_COMPLETED }); + }; + +export const setIlabPage = (pageNo) => ({ + type: TYPES.SET_ILAB_PAGE, + payload: pageNo, +}); + +export const setIlabPageOptions = (page, perPage) => ({ + type: TYPES.SET_ILAB_PAGE_OPTIONS, + payload: { page, perPage }, +}); + +export const checkIlabJobs = (newPage) => (dispatch, getState) => { + const results = cloneDeep(getState().ilab.results); + const { totalItems, perPage } = getState().ilab; + + const startIdx = (newPage - 1) * perPage; + const endIdx = newPage * perPage; + + if ( + (typeof results[startIdx] === "undefined" || + typeof results[endIdx] === "undefined") && + results.length < totalItems + ) { + dispatch(fetchILabJobs()); } - dispatch({ type: TYPES.GRAPH_COMPLETED }); +}; + +export const setSelectedMetrics = (id, metrics) => (dispatch, getState) => { + const metrics_selected = cloneDeep(getState().ilab.metrics_selected); + // if (id in metrics_selected) { + // metrics_selected[id] = metrics; + // } else { + // metrics_selected[id] = metrics; + // } + metrics_selected[id] = metrics; + dispatch({ + type: TYPES.SET_ILAB_SELECTED_METRICS, + payload: metrics_selected, + }); }; diff --git a/frontend/src/actions/paginationActions.js b/frontend/src/actions/paginationActions.js index 80a7dff1..8cbc9641 100644 --- a/frontend/src/actions/paginationActions.js +++ b/frontend/src/actions/paginationActions.js @@ -3,9 +3,13 @@ import { setCPTPageOptions, sliceCPTTableRows, } from "./homeActions"; +import { setIlabPage, setIlabPageOptions } from "./ilabActions"; import { setOCPPage, setOCPPageOptions, sliceOCPTableRows } from "./ocpActions"; import { setQuayPage, setQuayPageOptions } from "./quayActions"; import { setTelcoPage, setTelcoPageOptions } from "./telcoActions"; + +import { checkIlabJobs } from "./ilabActions"; + export const setPage = (newPage, currType) => (dispatch) => { if (currType === "cpt") { dispatch(setCPTPage(newPage)); @@ -15,6 +19,8 @@ export const setPage = (newPage, currType) => (dispatch) => { dispatch(setQuayPage(newPage)); } else if (currType === "telco") { dispatch(setTelcoPage(newPage)); + } else if (currType === "ilab") { + dispatch(setIlabPage(newPage)); } }; @@ -27,6 +33,8 @@ export const setPageOptions = (newPage, newPerPage, currType) => (dispatch) => { dispatch(setQuayPageOptions(newPage, newPerPage)); } else if (currType === "telco") { dispatch(setTelcoPageOptions(newPage, newPerPage)); + } else if (currType === "ilab") { + dispatch(setIlabPageOptions(newPage, newPerPage)); } }; @@ -37,3 +45,7 @@ export const sliceTableRows = (startIdx, endIdx, currType) => (dispatch) => { dispatch(sliceOCPTableRows(startIdx, endIdx)); } }; + +export const fetchNextJobs = (newPage) => (dispatch) => { + dispatch(checkIlabJobs(newPage)); +}; diff --git a/frontend/src/actions/types.js b/frontend/src/actions/types.js index 2dab4cca..e4739422 100644 --- a/frontend/src/actions/types.js +++ b/frontend/src/actions/types.js @@ -81,3 +81,9 @@ export const SET_TELCO_GRAPH_DATA = "SET_TELCO_GRAPH_DATA"; export const SET_ILAB_JOBS_DATA = "SET_ILAB_JOBS_DATA"; export const SET_ILAB_DATE_FILTER = "SET_ILAB_DATE_FILTER"; export const SET_ILAB_GRAPH_DATA = "SET_ILAB_GRAPH_DATA"; +export const SET_ILAB_TOTAL_ITEMS = "SET_ILAB_TOTAL_ITEMS"; +export const SET_ILAB_OFFSET = "SET_ILAB_OFFSET"; +export const SET_ILAB_PAGE = "SET_ILAB_PAGE"; +export const SET_ILAB_PAGE_OPTIONS = "SET_ILAB_PAGE_OPTIONS"; +export const SET_ILAB_METRICS = "SET_ILAB_METRICS"; +export const SET_ILAB_SELECTED_METRICS = "SET_ILAB_SELECTED_METRICS"; diff --git a/frontend/src/components/atoms/PlotGraph/index.jsx b/frontend/src/components/atoms/PlotGraph/index.jsx index d93a3d3b..2afb0f1c 100644 --- a/frontend/src/components/atoms/PlotGraph/index.jsx +++ b/frontend/src/components/atoms/PlotGraph/index.jsx @@ -5,7 +5,8 @@ const PlotGraph = (props) => { ); }; @@ -13,4 +14,5 @@ export default PlotGraph; PlotGraph.propTypes = { data: PropTypes.arr, + layout: PropTypes.object, }; diff --git a/frontend/src/components/organisms/Pagination/index.jsx b/frontend/src/components/organisms/Pagination/index.jsx index 7b316a21..3eb2c706 100644 --- a/frontend/src/components/organisms/Pagination/index.jsx +++ b/frontend/src/components/organisms/Pagination/index.jsx @@ -1,5 +1,6 @@ import { Pagination, PaginationVariant } from "@patternfly/react-core"; import { + fetchNextJobs, setPage, setPageOptions, sliceTableRows, @@ -13,6 +14,7 @@ const RenderPagination = (props) => { const dispatch = useDispatch(); const perPageOptions = [ + { title: "10", value: 10 }, { title: "25", value: 25 }, { title: "50", value: 50 }, { title: "100", value: 100 }, @@ -21,18 +23,27 @@ const RenderPagination = (props) => { const onSetPage = useCallback( (_evt, newPage, _perPage, startIdx, endIdx) => { dispatch(setPage(newPage, props.type)); - dispatch(sliceTableRows(startIdx, endIdx, props.type)); + if (props.type !== "ilab") { + dispatch(sliceTableRows(startIdx, endIdx, props.type)); + } }, [dispatch, props.type] ); const onPerPageSelect = useCallback( (_evt, newPerPage, newPage, startIdx, endIdx) => { dispatch(setPageOptions(newPage, newPerPage, props.type)); - dispatch(sliceTableRows(startIdx, endIdx, props.type)); + if (props.type !== "ilab") { + dispatch(sliceTableRows(startIdx, endIdx, props.type)); + } }, [dispatch, props.type] ); + const checkAndFetch = (_evt, newPage) => { + if (props.type === "ilab") { + dispatch(fetchNextJobs(newPage)); + } + }; return ( { perPage={props.perPage} page={props.page} variant={PaginationVariant.bottom} + onNextClick={checkAndFetch} perPageOptions={perPageOptions} onSetPage={onSetPage} onPerPageSelect={onPerPageSelect} diff --git a/frontend/src/components/templates/ILab/ILabGraph.jsx b/frontend/src/components/templates/ILab/ILabGraph.jsx new file mode 100644 index 00000000..565cb45e --- /dev/null +++ b/frontend/src/components/templates/ILab/ILabGraph.jsx @@ -0,0 +1,44 @@ +import Plot from "react-plotly.js"; +import PropType from "prop-types"; +import { cloneDeep } from "lodash"; +import { uid } from "@/utils/helper"; +import { useSelector } from "react-redux"; + +const ILabGraph = (props) => { + const { item } = props; + const isGraphLoading = useSelector((state) => state.loading.isGraphLoading); + const { graphData } = useSelector((state) => state.ilab); + + const graphDataCopy = cloneDeep(graphData); + + const getGraphData = (id) => { + const data = graphDataCopy?.filter((a) => a.uid === id); + return data; + }; + const hasGraphData = (uuid) => { + const hasData = getGraphData(uuid).length > 0; + + return hasData; + }; + + return ( + <> + {isGraphLoading && !hasGraphData(item.id) ? ( +
+ ) : !isGraphLoading ? ( + <> + ) : ( + + )} + + ); +}; + +ILabGraph.propTypes = { + item: PropType.object, +}; +export default ILabGraph; diff --git a/frontend/src/components/templates/ILab/MetaRow.jsx b/frontend/src/components/templates/ILab/MetaRow.jsx new file mode 100644 index 00000000..c196e79f --- /dev/null +++ b/frontend/src/components/templates/ILab/MetaRow.jsx @@ -0,0 +1,40 @@ +import { Table, Tbody, Th, Thead, Tr } from "@patternfly/react-table"; + +import Proptypes from "prop-types"; +import { Title } from "@patternfly/react-core"; +import { uid } from "@/utils/helper"; + +const MetaRow = (props) => { + const { metadata, heading } = props; + return ( + <> + + {heading} + + + + + + + + + + {metadata.map((item) => ( + + + + + ))} + +
+ Key + Value
{item[0]}{item[1]}
+ + ); +}; + +MetaRow.propTypes = { + heading: Proptypes.string, + metadata: Proptypes.array, +}; +export default MetaRow; diff --git a/frontend/src/components/templates/ILab/MetricsDropdown.jsx b/frontend/src/components/templates/ILab/MetricsDropdown.jsx new file mode 100644 index 00000000..f301953d --- /dev/null +++ b/frontend/src/components/templates/ILab/MetricsDropdown.jsx @@ -0,0 +1,86 @@ +import { + MenuToggle, + Select, + SelectList, + SelectOption, +} from "@patternfly/react-core"; +import { fetchGraphData, setSelectedMetrics } from "@/actions/ilabActions"; +import { useDispatch, useSelector } from "react-redux"; + +import { cloneDeep } from "lodash"; +import { uid } from "@/utils/helper"; +import { useState } from "react"; + +const MetricsSelect = (props) => { + const { metrics, metrics_selected } = useSelector((state) => state.ilab); + const { item } = props; + /* Metrics select */ + const [isOpen, setIsOpen] = useState(false); + const dispatch = useDispatch(); + // const [selected, setSelected] = useState("Select a value"); + + const toggle1 = (toggleRef, selected) => ( + + {selected} + + ); + + const onToggleClick = () => { + setIsOpen(!isOpen); + }; + const onSelect = (_event, value) => { + console.log("selected", value); + const run = value.split("*"); + //setSelected(run[1].trim()); + dispatch(setSelectedMetrics(run[0].trim(), run[1].trim())); + setIsOpen(false); + dispatch(fetchGraphData(run[0].trim(), run[1].trim(), run[2].trim())); + }; + const metricsDataCopy = cloneDeep(metrics); + + const getMetricsData = (id) => { + const data = metricsDataCopy?.filter((a) => a.uid === id); + return data; + }; + const hasMetricsData = (uuid) => { + const hasData = getMetricsData(uuid).length > 0; + + return hasData; + }; + /* Metrics select */ + return ( + <> + {hasMetricsData(item.id) && ( + + )} + + ); +}; + +export default MetricsSelect; diff --git a/frontend/src/components/templates/ILab/StatusCell.jsx b/frontend/src/components/templates/ILab/StatusCell.jsx new file mode 100644 index 00000000..a4bd208f --- /dev/null +++ b/frontend/src/components/templates/ILab/StatusCell.jsx @@ -0,0 +1,24 @@ +import { + CheckCircleIcon, + ExclamationCircleIcon, +} from "@patternfly/react-icons"; + +import { Label } from "@patternfly/react-core"; +import Proptype from "prop-types"; + +const StatusCell = (props) => { + return props.value?.toLowerCase() === "pass" ? ( + + ) : ( + + ); +}; +StatusCell.propTypes = { + value: Proptype.string, +}; + +export default StatusCell; diff --git a/frontend/src/components/templates/ILab/index.jsx b/frontend/src/components/templates/ILab/index.jsx index ace948fe..7e99d41c 100644 --- a/frontend/src/components/templates/ILab/index.jsx +++ b/frontend/src/components/templates/ILab/index.jsx @@ -1,7 +1,13 @@ +import "./index.less"; + import { - CheckCircleIcon, - ExclamationCircleIcon, -} from "@patternfly/react-icons"; + Accordion, + AccordionContent, + AccordionItem, + AccordionToggle, + Card, + CardBody, +} from "@patternfly/react-core"; import { ExpandableRowContent, Table, @@ -11,30 +17,40 @@ import { Thead, Tr, } from "@patternfly/react-table"; -import { Grid, GridItem, Label } from "@patternfly/react-core"; -import { fetchGraphData, fetchILabJobs } from "@/actions/ilabActions"; +import { fetchILabJobs, fetchMetricsInfo } from "@/actions/ilabActions"; import { formatDateTime, uid } from "@/utils/helper"; import { useDispatch, useSelector } from "react-redux"; import { useEffect, useState } from "react"; -import Plot from "react-plotly.js"; +import ILabGraph from "./ILabGraph"; +import MetaRow from "./MetaRow"; +import MetricsSelect from "./MetricsDropdown"; +import RenderPagination from "@/components/organisms/Pagination"; +import StatusCell from "./StatusCell"; import TableFilter from "@/components/organisms/TableFilters"; import { useNavigate } from "react-router-dom"; -import { cloneDeep } from "lodash"; const ILab = () => { const dispatch = useDispatch(); const navigate = useNavigate(); - const { results, start_date, end_date, graphData } = useSelector( - (state) => state.ilab - ); - const isGraphLoading = useSelector((state) => state.loading.isGraphLoading); - + const { results, start_date, end_date } = useSelector((state) => state.ilab); const [expandedResult, setExpandedResult] = useState([]); + const [expanded, setAccExpanded] = useState(["bordered-toggle1"]); + const onToggle = (id) => { + const index = expanded.indexOf(id); + const newExpanded = + index >= 0 + ? [ + ...expanded.slice(0, index), + ...expanded.slice(index + 1, expanded.length), + ] + : [...expanded, id]; + setAccExpanded(newExpanded); + }; const isResultExpanded = (res) => expandedResult?.includes(res); - const setExpanded = (run, isExpanding = true) => { + const setExpanded = async (run, isExpanding = true) => { setExpandedResult((prevExpanded) => { const otherExpandedRunNames = prevExpanded.filter((r) => r !== run.id); return isExpanding @@ -42,20 +58,12 @@ const ILab = () => { : otherExpandedRunNames; }); if (isExpanding) { - dispatch(fetchGraphData(run.id, run?.primary_metrics[0])); + dispatch(fetchMetricsInfo(run.id)); + // dispatch(fetchGraphData(run.id, run?.primary_metrics[0])); } }; - const getGraphData = (id) => { - const data = graphData?.filter((a) => a.uid === id); - return cloneDeep(data); - }; - const hasGraphData = (uuid) => { - const hasData = getGraphData(uuid).length > 0; - - return hasData; - }; - + const { totalItems, page, perPage } = useSelector((state) => state.ilab); useEffect(() => { dispatch(fetchILabJobs()); }, [dispatch]); @@ -70,38 +78,6 @@ const ILab = () => { end_date: "End Date", status: "Status", }; - - const StatusCell = (props) => { - return props.value?.toLowerCase() === "pass" ? ( - - ) : ( - - ); - }; - - const RenderKey = (props) => { - const { value } = props; - return ( - <> - {Object.keys(value).length > 0 && - Object.keys(value).map((unit) => { - return ( - <> - - {" "} - {value[unit]} {""} - - - ); - })} - - ); - }; - return ( <> { - - - - Tags - - {Object.keys(item.tags).length > 0 && - Object.keys(item.tags).map((key) => ( - <> -
- {key}:{" "} - {item.tags[key]} -
- - ))} -
- - - Parameters - - {Object.keys(item.params).length > 0 && - Object.keys(item.params).map((key) => ( - <> -
- {key}:{" "} - {item.params[key]} -
- - ))} -
- - {isGraphLoading && !hasGraphData(item.id) ? ( -
- ) : ( - <> - - - )} -
-
+ + + { + onToggle("bordered-toggle1"); + }} + isExpanded={expanded.includes("bordered-toggle1")} + id="bordered-toggle1" + > + Metadata + + + +
+ + + + + + + + + + +
+
+
+ + { + onToggle("bordered-toggle2"); + }} + isExpanded={expanded.includes("bordered-toggle2")} + id="bordered-toggle2" + > + Metrics & Graph + + + Metrics: +
+ +
+
+
+
@@ -195,6 +187,12 @@ const ILab = () => { ))} + ); }; diff --git a/frontend/src/components/templates/ILab/index.less b/frontend/src/components/templates/ILab/index.less new file mode 100644 index 00000000..02cd02cb --- /dev/null +++ b/frontend/src/components/templates/ILab/index.less @@ -0,0 +1,13 @@ +.pf-v5-c-accordion__expandable-content-body { + display: block; +} +.metadata-wrapper { + display: flex; + flex-direction: row; + margin-bottom: 1vw; + .metadata-card { + flex: 1; /* additionally, equal width */ + padding: 1em; + margin-right: 1.5vw; + } +} diff --git a/frontend/src/reducers/ilabReducer.js b/frontend/src/reducers/ilabReducer.js index c91ce767..a4e5ec50 100644 --- a/frontend/src/reducers/ilabReducer.js +++ b/frontend/src/reducers/ilabReducer.js @@ -5,6 +5,13 @@ const initialState = { start_date: "", end_date: "", graphData: [], + totalItems: 0, + page: 1, + perPage: 10, + size: 10, + offset: 1, + metrics: [], + metrics_selected: {}, }; const ILabReducer = (state = initialState, action = {}) => { const { type, payload } = action; @@ -12,7 +19,7 @@ const ILabReducer = (state = initialState, action = {}) => { case TYPES.SET_ILAB_JOBS_DATA: return { ...state, - results: payload, + results: [...state.results, ...payload], }; case TYPES.SET_ILAB_DATE_FILTER: return { @@ -20,8 +27,26 @@ const ILabReducer = (state = initialState, action = {}) => { start_date: payload.start_date, end_date: payload.end_date, }; + case TYPES.SET_ILAB_TOTAL_ITEMS: + return { + ...state, + totalItems: payload, + }; + case TYPES.SET_ILAB_OFFSET: + return { ...state, offset: payload }; + case TYPES.SET_ILAB_PAGE: + return { ...state, page: payload }; + case TYPES.SET_ILAB_PAGE_OPTIONS: + return { ...state, page: payload.page, perPage: payload.perPage }; + case TYPES.SET_ILAB_METRICS: + return { ...state, metrics: [...state.metrics, payload] }; + case TYPES.SET_ILAB_SELECTED_METRICS: + return { + ...state, + metrics_selected: payload, + }; case TYPES.SET_ILAB_GRAPH_DATA: - return { ...state, graphData: [...state.graphData, payload] }; + return { ...state, graphData: payload }; default: return state; } diff --git a/frontend/src/reducers/loadingReducer.js b/frontend/src/reducers/loadingReducer.js index 496a4e65..52f0c732 100644 --- a/frontend/src/reducers/loadingReducer.js +++ b/frontend/src/reducers/loadingReducer.js @@ -7,7 +7,7 @@ import { const initialState = { isLoading: false, - isGraphLoading: true, + isGraphLoading: false, }; const LoadingReducer = (state = initialState, action = {}) => { From 5e735a75482ba41ae43d4f2e3c9328595e04d9b5 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Mon, 23 Sep 2024 12:06:50 -0400 Subject: [PATCH 03/29] Improve periodic graph names. Add Crucible readme file. Cleanups and refactoring --- backend/app/api/v1/endpoints/ilab/ilab.py | 114 +-- backend/app/services/crucible_readme.md | 89 ++ backend/app/services/crucible_svc.py | 805 ++++++++++++------ frontend/src/actions/ilabActions.js | 4 +- .../components/templates/ILab/ILabGraph.jsx | 10 +- .../src/components/templates/ILab/index.jsx | 22 +- 6 files changed, 714 insertions(+), 330 deletions(-) create mode 100644 backend/app/services/crucible_readme.md diff --git a/backend/app/api/v1/endpoints/ilab/ilab.py b/backend/app/api/v1/endpoints/ilab/ilab.py index e20849dc..5a5897f5 100644 --- a/backend/app/api/v1/endpoints/ilab/ilab.py +++ b/backend/app/api/v1/endpoints/ilab/ilab.py @@ -1,9 +1,10 @@ from datetime import datetime, timedelta, timezone from typing import Annotated, Any, Optional -from app.services.crucible_svc import CrucibleService, Graph, GraphList from fastapi import APIRouter, Query +from app.services.crucible_svc import CrucibleService, Graph, GraphList + router = APIRouter() @@ -28,39 +29,39 @@ def example_error(message: str) -> dict[str, Any]: 200: example_response( { "param": { - "model": { - "/home/models/granite-7b-lab/": 26, - "/home/models/Mixtral-8x7B-Instruct-v0.1": 20, - "/home/models/granite-7b-redhat-lab": 11, - }, - "gpus": {"4": 53}, - "workflow": {"sdg": 22, "train": 16, "train+eval": 5}, - "data-path": { - "/home/data/training/jul19-knowledge-26k.jsonl": 16, - "/home/data/training/knowledge_data.jsonl": 13, - "/home/data/jun12-phase05.jsonl": 4, - "/home/data/training/jun12-phase05.jsonl": 4, - }, - "nnodes": {"1": 37}, - "train-until": {"checkpoint:1": 17, "complete": 16}, - "save-samples": {"2500": 11, "10000": 5, "5000": 1}, - "deepspeed-cpu-offload-optimizer": {"1": 13, "0": 2}, - "deepspeed-cpu-offload-optimizer-pin-memory": {"1": 13, "0": 2}, - "batch-size": {"0": 2, "12": 2, "16": 2, "4": 2, "8": 2}, - "cpu-offload-optimizer": {"1": 6}, - "cpu-offload-pin-memory": {"1": 6}, - "nproc-per-node": {"4": 4}, - "num-runavg-samples": {"2": 2, "6": 2}, - "num-cpus": {"30": 2}, + "model": [ + "/home/models/granite-7b-redhat-lab", + "/home/models/granite-7b-lab/", + "/home/models/Mixtral-8x7B-Instruct-v0.1", + ], + "gpus": ["4"], + "workflow": ["train", "sdg", "train+eval"], + "data-path": [ + "/home/data/training/jun12-phase05.jsonl", + "/home/data/training/knowledge_data.jsonl", + "/home/data/training/jul19-knowledge-26k.jsonl", + "/home/data/jun12-phase05.jsonl", + ], + "nnodes": ["1"], + "train-until": ["checkpoint:1", "complete"], + "save-samples": ["5000", "2500", "10000"], + "deepspeed-cpu-offload-optimizer": ["0", "1"], + "deepspeed-cpu-offload-optimizer-pin-memory": ["0", "1"], + "batch-size": ["4", "8", "16", "12", "0"], + "cpu-offload-optimizer": ["1"], + "cpu-offload-pin-memory": ["1"], + "nproc-per-node": ["4"], + "num-runavg-samples": ["2", "6"], + "num-cpus": ["30"], }, - "tag": {"topology": {"none": 21}}, + "tag": {"topology": ["none"]}, } ) }, ) async def run_filters(): crucible = CrucibleService(CONFIGPATH) - return crucible.run_filters() + return crucible.get_run_filters() @router.get( @@ -139,16 +140,19 @@ async def runs( Optional[int], Query(description="Number of runs in a page", examples=[10]) ] = None, offset: Annotated[ - Optional[int], + int, Query(description="Page offset to start", examples=[10]), - ] = None, + ] = 0, ): crucible = CrucibleService(CONFIGPATH) - if start_date is None or end_date is None: + if start_date is None and end_date is None: now = datetime.now(timezone.utc) - start = now - timedelta(days=30) if start_date is None else start_date - end = now if end_date is None else end_date - results: dict[str, Any] = crucible.runs( + start = now - timedelta(days=30) + end = now + else: + start = start_date + end = end_date + results: dict[str, Any] = crucible.get_runs( start=start, end=end, filter=filter, sort=sort, size=size, offset=offset ) return results @@ -165,7 +169,7 @@ async def runs( ) async def tags(run: str): crucible = CrucibleService(CONFIGPATH) - return crucible.tags(run) + return crucible.get_tags(run) @router.get( @@ -200,7 +204,7 @@ async def tags(run: str): ) async def params(run: str): crucible = CrucibleService(CONFIGPATH) - return crucible.params(run) + return crucible.get_params(run) @router.get( @@ -233,7 +237,7 @@ async def params(run: str): ) async def iterations(run: str): crucible = CrucibleService(CONFIGPATH) - return crucible.iterations(run) + return crucible.get_iterations(run) @router.get( @@ -262,7 +266,7 @@ async def iterations(run: str): ) async def run_samples(run: str): crucible = CrucibleService(CONFIGPATH) - return crucible.samples(run) + return crucible.get_samples(run) @router.get( @@ -291,7 +295,7 @@ async def run_samples(run: str): ) async def run_periods(run: str): crucible = CrucibleService(CONFIGPATH) - return crucible.periods(run) + return crucible.get_periods(run) @router.get( @@ -314,7 +318,7 @@ async def run_periods(run: str): ) async def iteration_samples(iteration: str): crucible = CrucibleService(CONFIGPATH) - return crucible.samples(iteration=iteration) + return crucible.get_samples(iteration=iteration) @router.get( @@ -362,7 +366,7 @@ async def iteration_samples(iteration: str): ) async def timeline(run: str): crucible = CrucibleService(CONFIGPATH) - return crucible.timeline(run) + return crucible.get_timeline(run) @router.get( @@ -399,7 +403,7 @@ async def timeline(run: str): ) async def metrics(run: str): crucible = CrucibleService(CONFIGPATH) - return crucible.metrics_list(run) + return crucible.get_metrics_list(run) @router.get( @@ -438,7 +442,7 @@ async def metric_breakouts( ] = None, ): crucible = CrucibleService(CONFIGPATH) - return crucible.metric_breakouts(run, metric, names=name, periods=period) + return crucible.get_metric_breakouts(run, metric, names=name, periods=period) @router.get( @@ -498,7 +502,7 @@ async def metric_data( ] = False, ): crucible = CrucibleService(CONFIGPATH) - return crucible.metrics_data( + return crucible.get_metrics_data( run, metric, names=name, periods=period, aggregate=aggregate ) @@ -550,12 +554,12 @@ async def metric_summary( ] = None, ): crucible = CrucibleService(CONFIGPATH) - return crucible.metrics_summary(run, metric, names=name, periods=period) + return crucible.get_metrics_summary(run, metric, names=name, periods=period) @router.post( "/api/v1/ilab/runs/multigraph", - summary="Returns overlaid Plotly graph objects for a run", + summary="Returns overlaid Plotly graph objects", description="Returns metric data in a form usable by the Plot React component.", responses={ 200: example_response( @@ -593,7 +597,7 @@ async def metric_summary( ) async def metric_graph_body(graphs: GraphList): crucible = CrucibleService(CONFIGPATH) - return crucible.metrics_graph(graphs) + return crucible.get_metrics_graph(graphs) @router.get( @@ -656,7 +660,7 @@ async def metric_graph_param( ] = False, ): crucible = CrucibleService(CONFIGPATH) - return crucible.metrics_graph( + return crucible.get_metrics_graph( GraphList( run=run, name=metric, @@ -704,8 +708,20 @@ async def info(): responses={ 200: example_response( { - "cdm": ["ver"], - "run": ["name", "end", "begin", "email", "benchmark", "source", "id"], + "cdm": {"doctype": "keyword", "ver": "keyword"}, + "run": { + "begin": "date", + "benchmark": "keyword", + "desc": "text", + "email": "keyword", + "end": "date", + "harness": "keyword", + "host": "keyword", + "id": "keyword", + "name": "keyword", + "source": "keyword", + "tags": "text", + }, } ), 400: example_error("Index name 'foo' doesn't exist"), diff --git a/backend/app/services/crucible_readme.md b/backend/app/services/crucible_readme.md new file mode 100644 index 00000000..fdd9e77d --- /dev/null +++ b/backend/app/services/crucible_readme.md @@ -0,0 +1,89 @@ +The `crucible_svc` allows CPT project APIs to access a Crucible CDM backing +store to find information about runs, tags, params, iterations, samples, +periods, plus various ways to expose and aggregate metric data both for +primary benchmarks and non-periodic tools. + +The `get_runs` API is the primary entry point, returning an object that +supports filtering, sorting, and pagination of the Crucible run data decorated +with useful iteration, tag, and parameter data. + +The metrics data APIs (data, breakouts, summary, and graph) now allow +filtering by the metric "name" data. This allows "drilling down" through +the non-periodic "tool data". For example, IO data is per-disk, CPU +information is broken down by core and package. You can now aggregate +all global data (e.g., total system CPU), or filter by breakout names to +select by CPU, mode (usr, sys, irq), etc. + +For example, to return `mpstat` `Busy-CPU` graph data for one core, you +might query: + +``` +/api/v1/ilab/runs/f542a50c-55df-4ead-92d1-8c55367f2e79/graph/mpstat::Busy-CPU?name=core=12,package=1,num=77,type=guest +``` + +If you make a `graph`, `data`, or `summary` query that doesn't translate +to a unique metric, and don't allow aggregation, you'll get a diagnostic +message identifying possible additional filters. For example, with +`type=guest` removed, that same query will show the available values for +the `type` name: + +``` +{ + "detail": [ + { + "message": "More than one metric (5) probably means you should add filters", + "names": { + "type": [ + "guest", + "irq", + "soft", + "sys", + "usr" + ] + }, + "periods": [] + } + ] +} +``` + +This capability can be used to build an interactive exploratory UI to +allow displaying breakout details. The `get_metrics` API will show all +recorded metrics, along with information the names and values used in +those. Metrics that show "names" with more than one value will need to be +filtered to produce meaningful summaries or graphs. The `get_breakdowns` API +can be used to explore the namespace recorded for that metric in the specified +run. For example, + +``` +GET /api/v1/ilab/runs//breakdowns/sar-net::packets-sec?name=direction=rx +{ + "label": "sar-net::packets-sec", + "source": "sar-net", + "type": "packets-sec", + "class": [], + "names": { + "dev": [ + "lo", + "eno12409", + "eno12399" + ], + "type": [ + "physical", + "virtual" + ] + } +} +``` + +Metric data access (including graphing) is now sensitive to the Crucible +"period". The UI iterates through all periods for the selected run, +requesting the primary metric and a selected secondary non-periodic +metric for each period. The labeling for the graph is based on finding +"param" values unique for each period's iteration. + +The `get_filters` API reports all the tag and param filter tags and +values for the runs. These can be used for the `filters` query parameter +on `get_runs` to restrict the set of runs reported; for example, +`/api/v1/ilab/runs?filter=param:workflow=sdg` shows only runs with the param +arg `workflow` set to the value `sdg`. diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index 6b750377..672a5eb7 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -1,30 +1,59 @@ -from dataclasses import dataclass import sys +import time from collections import defaultdict +from dataclasses import dataclass from datetime import datetime, timezone -import time from typing import Any, Iterator, Optional, Tuple, Union +from elasticsearch import Elasticsearch, NotFoundError +from fastapi import HTTPException, status from pydantic import BaseModel from app import config -from elasticsearch import Elasticsearch, NotFoundError -from fastapi import HTTPException, status class Graph(BaseModel): + """Describe a single graph + + metric: the metric label, "ilab::train-samples-sec" + aggregate: True to aggregate unspecified breakouts + names: Lock in breakouts + periods: Select metrics for specific test period(s) + run: Override the default run ID from GraphList + """ + metric: str aggregate: bool = False names: Optional[list[str]] = None periods: Optional[list[str]] = None + run: Optional[str] = None class GraphList(BaseModel): + """Describe a set of overlaid graphs + + run: Specify the (default) run ID + name: Specify a name for the set of graphs + graphs: a list of Graph objects + """ + run: str name: str graphs: list[Graph] +@dataclass +class Point: + """Graph point + + Record the start & end timestamp and value of a metric data point + """ + + begin: int + end: int + value: float + + colors = [ "black", "aqua", @@ -142,12 +171,30 @@ class Quote: class CrucibleService: + """Support convenient generalized access to Crucible data + + This implements access to the "v7" Crucible "Common Data Model" through + OpenSearch queries. + """ - def __init__(self, configpath="crucible"): + # OpenSearch massive limit on hits in a single query + BIGQUERY = 262144 + + # 'run' document fields that support general `?filter=:` + # + # TODO: this excludes 'desc', which isn't used by the ilab runs, and needs + # different treatment as its a text field rather than a term. + RUN_FILTERS = ("benchmark", "email", "name", "source", "harness", "host") + + def __init__(self, configpath: str = "crucible"): """Initialize a Crucible CDM (OpenSearch) connection. - This includes making an "info" call to confirm and record the server - response. + Generally the `configpath` should be scoped, like `ilab.crucible` so + that multiple APIs based on access to distinct Crucible controllers can + coexist. + + Initialization includes making an "info" call to confirm and record the + server response. Args: configpath: The Vyper config path (e.g., "ilab.crucible") @@ -192,7 +239,7 @@ def _split_list(alist: Optional[list[str]] = None) -> list[str]: return l @staticmethod - def normalize_date(value: Optional[Union[int, str, datetime]]) -> int: + def _normalize_date(value: Optional[Union[int, str, datetime]]) -> int: """Normalize date parameters The Crucible data model stores dates as string representations of an @@ -273,7 +320,7 @@ def _aggs(payload: dict[str, Any], aggregation: str) -> Iterator[dict[str, Any]] payload: A JSON dict containing an "aggregations" field Returns: - Yields each aggregation from an aggregations object + Yields each aggregation from an aggregation bucket list """ if "aggregations" not in payload: raise HTTPException( @@ -290,7 +337,7 @@ def _aggs(payload: dict[str, Any], aggregation: str) -> Iterator[dict[str, Any]] yield agg @staticmethod - def _date(timestamp: str) -> str: + def _format_timestamp(timestamp: Union[str, int]) -> str: """Convert stringified integer milliseconds-from-epoch to ISO date""" return str(datetime.fromtimestamp(int(timestamp) / 1000, timezone.utc)) @@ -310,8 +357,8 @@ def _format_data(cls, data: dict[str, Any]) -> dict[str, Any]: A neatly formatted "metric_data" object """ return { - "begin": cls._date(data["begin"]), - "end": cls._date(data["end"]), + "begin": cls._format_timestamp(data["begin"]), + "end": cls._format_timestamp(data["end"]), "duration": int(data["duration"]) / 1000, "value": float(data["value"]), } @@ -330,18 +377,33 @@ def _format_period(cls, period: dict[str, Any]) -> dict[str, Any]: A neatly formatted "period" object """ return { - "begin": cls._date(timestamp=period["begin"]), - "end": cls._date(period["end"]), + "begin": cls._format_timestamp(timestamp=period["begin"]), + "end": cls._format_timestamp(period["end"]), "id": period["id"], "name": period["name"], } @classmethod - def _build_filter_options( - cls, filter: Optional[list[str]] = None - ) -> Tuple[Optional[list[dict[str, Any]]], Optional[list[dict[str, Any]]]]: + def _build_filter_options(cls, filter: Optional[list[str]] = None) -> Tuple[ + Optional[list[dict[str, Any]]], + Optional[list[dict[str, Any]]], + Optional[list[dict[str, Any]]], + ]: """Build filter terms for tag and parameter filter terms + Each term has the form ":". Any term + may be quoted: quotes are stripped and ignored. (This is generally only + useful on the to include spaces.) + + We support three namespaces: + param: Match against param index arg/val + tag: Match against tag index name/val + run: Match against run index fields + + We support two operators: + =: Exact match + ~: Partial match + Args: filter: list of filter terms like "param:key=value" @@ -352,35 +414,47 @@ def _build_filter_options( for term in cls._split_list(filter): p = Parser(term) namespace, _ = p._next_token([":"]) - key, operation = p._next_token(["="]) + key, operation = p._next_token(["=", "~"]) value, _ = p._next_token() - print(f"FILTER: {namespace}:{key}{operation}{value}") - if namespace == "param": - key_field = "param.arg" - value_field = "param.val" + if operation == "~": + value = f".*{value}.*" + matcher = "regexp" else: - key_field = "tag.name" - value_field = "tag.val" - terms[namespace].append( - { - "bool": { - "must": [ - {"term": {key_field: key}}, - {"term": {value_field: value}}, - ] + matcher = "term" + if namespace in ("param", "tag"): + if namespace == "param": + key_field = "param.arg" + value_field = "param.val" + else: + key_field = "tag.name" + value_field = "tag.val" + terms[namespace].append( + { + "bool": { + "must": [ + {"term": {key_field: key}}, + {matcher: {value_field: value}}, + ] + } } - } - ) + ) + elif namespace == "run": + terms[namespace].append({matcher: {f"run.{key}": value}}) + else: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + f"unknown filter namespace {namespace!r}", + ) param_filter = None tag_filter = None if "param" in terms: param_filter = [{"dis_max": {"queries": terms["param"]}}] if "tag" in terms: tag_filter = [{"dis_max": {"queries": terms["tag"]}}] - return param_filter, tag_filter + return param_filter, tag_filter, terms.get("run") @classmethod - def _name_filters( + def _build_name_filters( cls, namelist: Optional[list[str]] = None ) -> list[dict[str, Any]]: """Build filter terms for metric breakdown names @@ -407,12 +481,18 @@ def _name_filters( return filters @classmethod - def _period_filters( + def _build_period_filters( cls, periodlist: Optional[list[str]] = None ) -> list[dict[str, Any]]: """Build period filters - Generate filter terms to match against a list of period IDs. + Generate metric_desc filter terms to match against a list of period IDs. + + Note that not all metric descriptions are periodic, and we don't want + these filters to exclude them -- so the filter will exclude only + documents that have a period and don't match. (That is, we won't drop + any non-periodic metrics. We expect those to be filtered by timestamp + instead.) Args: period: list of possibly comma-separated period IDs @@ -437,7 +517,7 @@ def _period_filters( return [] @classmethod - def _filter_metric_desc( + def _build_metric_filters( cls, run: str, metric: str, @@ -465,8 +545,8 @@ def _filter_metric_desc( {"term": {"metric_desc.source": source}}, {"term": {"metric_desc.type": type}}, ] - + cls._name_filters(names) - + cls._period_filters(periods) + + cls._build_name_filters(names) + + cls._build_period_filters(periods) ) @staticmethod @@ -525,7 +605,7 @@ def search( """ f = filters if filters else [] query = { - "size": 250000 if size is None else size, + "size": self.BIGQUERY if size is None else size, "query": {"bool": {"filter": f}}, } if sort: @@ -544,32 +624,29 @@ def _get_metric_ids( metric: str, namelist: Optional[list[str]] = None, periodlist: Optional[list[str]] = None, - highlander: bool = True, + aggregate: bool = False, ) -> list[str]: """Generate a list of matching metric_desc IDs Given a specific run and metric name, and a set of breakout filters, returns a list of metric desc IDs that match. - Generally, breakout data isn't useful unless the set of filters - produces a single metric desc ID, however this can be overridden. - If a single ID is required to produce a consistent metric, and the - supplied filters produce more than one, raise a 422 HTTP error - (UNPROCESSABLE CONTENT) with a response body showing the unsatisfied - breakouts (name and available values). + supplied filters produce more than one without aggregation, raise a + 422 HTTP error (UNPROCESSABLE CONTENT) with a response body showing + the unsatisfied breakouts (name and available values). Args: run: run ID metric: combined metric name (e.g., sar-net::packets-sec) namelist: a list of breakout filters like "type=physical" periodlist: a list of period IDs - highlander: if True, there can be only one (metric ID) + aggregate: if True, allow multiple metric IDs Returns: A list of matching metric_desc ID value(s) """ - filters = self._filter_metric_desc(run, metric, namelist, periodlist) + filters = self._build_metric_filters(run, metric, namelist, periodlist) metrics = self.search( "metric_desc", filters=filters, @@ -584,7 +661,7 @@ def _get_metric_ids( ), ) ids = [h["metric_desc"]["id"] for h in self._hits(metrics)] - if len(ids) < 2 or not highlander: + if len(ids) < 2 or aggregate: return ids # This probably means we're not filtering well enouch for a useful @@ -609,9 +686,15 @@ def _get_metric_ids( status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=[response] ) - def _data_range(self, periods: Optional[list[str]] = None) -> list[dict[str, Any]]: + def _build_timestamp_range_filters( + self, periods: Optional[list[str]] = None + ) -> list[dict[str, Any]]: """Create a timestamp range filter + This extracts the begin and end timestamps from the list of periods and + builds a timestamp filter range to select documents on or after the + earliest begin timestamp and on or before the latest end timestamp. + Args: periods: a list of CDM period IDs @@ -631,6 +714,52 @@ def _data_range(self, periods: Optional[list[str]] = None) -> list[dict[str, Any else: return [] + @classmethod + def _build_sort_terms(cls, sorters: Optional[list[str]]) -> list[dict[str, str]]: + """Build sort term list + + Sorters may reference any native `run` index field and must specify + either "asc"(ending) or "desc"(ending) sort order. Any number of + sorters may be combined, like ["name:asc,benchmark:desc", "end:desc"] + + Args: + sorters: list of : sort terms + + Returns: + list of OpenSearch sort terms + """ + if sorters: + sort_terms = [] + for s in sorters: + DIRECTIONS = ("asc", "desc") + FIELDS = ( + "begin", + "benchmark", + "desc", + "email", + "end", + "harness", + "host", + "id", + "name", + "source", + ) + key, dir = s.split(":", maxsplit=1) + if dir not in DIRECTIONS: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + f"Sort direction {dir!r} must be one of {','.join(DIRECTIONS)}", + ) + if key not in FIELDS: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, + f"Sort key {key!r} must be one of {','.join(FIELDS)}", + ) + sort_terms.append({f"run.{key}": dir}) + else: + sort_terms = [{"run.begin": "asc"}] + return sort_terms + def _get_run_ids( self, index: str, filters: Optional[list[dict[str, Any]]] = None ) -> set[str]: @@ -651,7 +780,7 @@ def _get_run_ids( ) return set([x["id"] for x in self._hits(filtered, ["run"])]) - def run_filters(self) -> dict[str, dict[str, int]]: + def get_run_filters(self) -> dict[str, dict[str, list[str]]]: """Return possible tag and filter terms Return a description of tag and param filter terms meaningful @@ -660,12 +789,6 @@ def run_filters(self) -> dict[str, dict[str, int]]: allow adjusting the filter popups to drop options no longer relevant to a given set. - Returns: - A three-level JSON dict; the first level is the namespace (param or - tag), the second level is the parameter or tag name, the third - level key is each value present in the index, and the value is the - number of times that value appears. - { "param": { {"gpus": { @@ -674,14 +797,22 @@ def run_filters(self) -> dict[str, dict[str, int]]: } } } + + Returns: + A three-level JSON dict; the first level is the namespace (param or + tag), the second level is the parameter or tag name, the third + level key is each value present in the index, and the value is the + number of times that value appears. """ tags = self.search( "tag", size=0, aggregations={ "key": { - "terms": {"field": "tag.name", "size": 10000}, - "aggs": {"values": {"terms": {"field": "tag.val", "size": 10000}}}, + "terms": {"field": "tag.name", "size": self.BIGQUERY}, + "aggs": { + "values": {"terms": {"field": "tag.val", "size": self.BIGQUERY}} + }, } }, ignore_unavailable=True, @@ -691,42 +822,108 @@ def run_filters(self) -> dict[str, dict[str, int]]: size=0, aggregations={ "key": { - "terms": {"field": "param.arg", "size": 10000}, + "terms": {"field": "param.arg", "size": self.BIGQUERY}, "aggs": { - "values": {"terms": {"field": "param.val", "size": 10000}} + "values": { + "terms": {"field": "param.val", "size": self.BIGQUERY} + } }, } }, ignore_unavailable=True, ) - result = defaultdict(lambda: defaultdict(lambda: defaultdict(int))) - for t in self._aggs(params, "key"): - for v in t["values"]["buckets"]: - result["param"][t["key"]][v["key"]] += v["doc_count"] + aggs = { + k: {"terms": {"field": f"run.{k}", "size": self.BIGQUERY}} + for k in self.RUN_FILTERS + } + runs = self.search( + "run", + size=0, + aggregations=aggs, + ) + result = defaultdict(lambda: defaultdict(lambda: set())) + for p in self._aggs(params, "key"): + for v in p["values"]["buckets"]: + result["param"][p["key"]].add(v["key"]) for t in self._aggs(tags, "key"): for v in t["values"]["buckets"]: - result["tag"][t["key"]][v["key"]] += v["doc_count"] - return result + result["tag"][t["key"]].add(v["key"]) + for name in self.RUN_FILTERS: + for f in self._aggs(runs, name): + result["run"][name].add(f["key"]) + return {s: {k: list(v) for k, v in keys.items()} for s, keys in result.items()} - def runs( + def get_runs( self, - benchmark: Optional[str] = None, filter: Optional[list[str]] = None, - name: Optional[str] = None, start: Optional[Union[int, str, datetime]] = None, end: Optional[Union[int, str, datetime]] = None, - offset: Optional[int] = None, + offset: int = 0, sort: Optional[list[str]] = None, size: Optional[int] = None, **kwargs, ) -> dict[str, Any]: """Return matching Crucible runs - Filtered list of runs + Filtered and sorted list of runs. + + { + "sort": [], + "startDate": "2024-01-01T05:00:00+00:00", + "size": 1, + "offset": 0, + "results": [ + { + "begin": "1722878906342", + "benchmark": "ilab", + "email": "A@email", + "end": "1722880503544", + "id": "4e1d2c3c-b01c-4007-a92d-23a561af2c11", + "name": "\"A User\"", + "source": "node.example.com//var/lib/crucible/run/ilab--2024-08-05_17:17:13_UTC--4e1d2c3c-b01c-4007-a92d-23a561af2c11", + "tags": { + "topology": "none" + }, + "iterations": [ + { + "iteration": 1, + "primary_metric": "ilab::train-samples-sec", + "primary_period": "measurement", + "status": "pass", + "params": { + "cpu-offload-pin-memory": "1", + "model": "/home/models/granite-7b-lab/", + "data-path": "/home/data/training/knowledge_data.jsonl", + "cpu-offload-optimizer": "1", + "nnodes": "1", + "nproc-per-node": "4", + "num-runavg-samples": "2" + } + } + ], + "primary_metrics": [ + "ilab::train-samples-sec" + ], + "status": "pass", + "params": { + "cpu-offload-pin-memory": "1", + "model": "/home/models/granite-7b-lab/", + "data-path": "/home/data/training/knowledge_data.jsonl", + "cpu-offload-optimizer": "1", + "nnodes": "1", + "nproc-per-node": "4", + "num-runavg-samples": "2" + }, + "begin_date": "2024-08-05 17:28:26.342000+00:00", + "end_date": "2024-08-05 17:55:03.544000+00:00" + } + ], + "count": 1, + "total": 15, + "next_offset": 1 + } Args: - benchmark: Include runs with specified benchmark name - name: Include runs by owner name start: Include runs starting at timestamp end: Include runs ending no later than timestamp filter: List of tag/param filter terms (parm:key=value) @@ -746,23 +943,24 @@ def runs( # # If there are no matches, we can exit early. (TODO: should this be an # error, or just a success with an empty list?) - param_filters, tag_filters = self._build_filter_options(filter) results = {} filters = [] - if benchmark: - filters.append({"term": {"run.benchmark": benchmark}}) - if name: - filters.append({"term": {"run.name": name}}) + sorters = self._split_list(sort) + results["sort"] = sorters + sort_terms = self._build_sort_terms(sorters) + param_filters, tag_filters, run_filters = self._build_filter_options(filter) + if run_filters: + filters.extend(run_filters) if start or end: s = None e = None if start: - s = self.normalize_date(start) + s = self._normalize_date(start) results["startDate"] = datetime.fromtimestamp( s / 1000.0, tz=timezone.utc ) if end: - e = self.normalize_date(end) + e = self._normalize_date(end) results["endDate"] = datetime.fromtimestamp(e / 1000.0, tz=timezone.utc) if s and e and s > e: @@ -778,41 +976,9 @@ def runs( if e: cond["lte"] = str(e) filters.append({"range": {"run.begin": cond}}) - if sort: - sorters = self._split_list(sort) - results["sort"] = sorters - sort_terms = [] - for s in sorters: - DIRECTIONS = ("asc", "desc") - FIELDS = ( - "begin", - "benchmark", - "email", - "end", - "id", - "name", - "source", - "status", - ) - key, dir = s.split(":", maxsplit=1) - if dir not in DIRECTIONS: - raise HTTPException( - status.HTTP_400_BAD_REQUEST, - f"Sort direction {dir!r} must be one of {','.join(DIRECTIONS)}", - ) - if key not in FIELDS: - raise HTTPException( - status.HTTP_400_BAD_REQUEST, - f"Sort key {key!r} must be one of {','.join(FIELDS)}", - ) - sort_terms.append({f"run.{key}": dir}) - else: - sort_terms = [{"run.begin": "asc"}] - if size: results["size"] = size - if offset: - results["offset"] = offset + results["offset"] = offset if offset is not None else 0 # In order to filter by param or tag values, we need to produce a list # of matching RUN IDs from each index. We'll then drop any RUN ID that's @@ -828,7 +994,7 @@ def runs( return results hits = self.search( - "iteration", + "run", size=size, offset=offset, sort=sort_terms, @@ -836,13 +1002,18 @@ def runs( **kwargs, ignore_unavailable=True, ) + rawiterations = self.search("iteration", ignore_unavailable=True) rawtags = self.search("tag", ignore_unavailable=True) rawparams = self.search("param", ignore_unavailable=True) + iterations = defaultdict(list) tags = defaultdict(defaultdict) params = defaultdict(defaultdict) run_params = defaultdict(list) + for i in self._hits(rawiterations): + iterations[i["run"]["id"]].append(i["iteration"]) + # Organize tags by run ID for t in self._hits(rawtags): tags[t["run"]["id"]][t["tag"]["name"]] = t["tag"]["val"] @@ -855,10 +1026,8 @@ def runs( runs = {} for h in self._hits(hits): run = h["run"] - iteration = h["iteration"] - iid = iteration["id"] rid = run["id"] - iparams = params.get(iid, {}) + runs[rid] = run # Filter the runs by our tag and param queries if param_filters and rid not in paramids: @@ -869,61 +1038,58 @@ def runs( # Collect unique runs: the status is "fail" if any iteration for # that run ID failed. - if rid not in runs: - runs[rid] = run - run["status"] = iteration["status"] - try: - run["begin_date"] = self._date(run["begin"]) - run["end_date"] = self._date(run["end"]) - except KeyError as e: - print(f"Missing 'run' key {str(e)} in {run}") - run["begin_date"] = self._date("0") - run["end_date"] = self._date("0") - run["params"] = iparams.copy() - run["iterations"] = [ - { - "iteration": iteration["num"], - "primary_metric": iteration["primary-metric"], - "primary_period": iteration["primary-period"], - "status": iteration["status"], - "params": iparams, - } - ] - run["primary_metrics"] = set([iteration["primary-metric"]]) - run["tags"] = tags.get(rid, {}) - else: - r = runs[rid] - r["iterations"].append( + run["tags"] = tags.get(rid, {}) + run["iterations"] = [] + run["primary_metrics"] = set() + for i in iterations.get(rid, []): + iparams = params.get(i["id"], {}) + if "status" not in run: + run["status"] = i["status"] + else: + if i["status"] != "pass": + run["status"] = i["status"] + if "params" not in run: + run["params"] = iparams.copy() + else: + # Iteration-specific parameter names or values are factored out + # of the run summary. (NOTE: listify the keys first so Python + # doesn't complain about deletion during the traversal.) + p = run["params"] + for k in list(p.keys()): + if k not in iparams or p[k] != iparams[k]: + del p[k] + run["primary_metrics"].add(i["primary-metric"]) + run["iterations"].append( { - "iteration": iteration["num"], - "metric": iteration["primary-metric"], - "status": iteration["status"], + "iteration": i["num"], + "primary_metric": i["primary-metric"], + "primary_period": i["primary-period"], + "status": i["status"], "params": iparams, } ) - - # Iteration-specific parameter names or values are factored out - # of the run summary. (NOTE: listify the keys first so Python - # doesn't complain about deletion during the traversal.) - p = r["params"] - for k in list(p.keys()): - if k not in iparams or p[k] != iparams[k]: - del p[k] - r["primary_metrics"].add(iteration["primary-metric"]) - if iteration["status"] != "pass": - r["status"] = iteration["status"] + try: + run["begin_date"] = self._format_timestamp(run["begin"]) + run["end_date"] = self._format_timestamp(run["end"]) + except KeyError as e: + print(f"Missing 'run' key {str(e)} in {run}") + run["begin_date"] = self._format_timestamp("0") + run["end_date"] = self._format_timestamp("0") + + count = len(runs) + total = hits["hits"]["total"]["value"] results.update( { "results": list(runs.values()), - "count": len(runs), - "total": hits["hits"]["total"]["value"], + "count": count, + "total": total, } ) - if offset: - results["next_offset"] = offset + size if size else len(runs) + if size and (offset + count < total): + results["next_offset"] = offset + size return results - def tags(self, run: str, **kwargs) -> dict[str, str]: + def get_tags(self, run: str, **kwargs) -> dict[str, str]: """Return the set of tags associated with a run Args: @@ -940,7 +1106,7 @@ def tags(self, run: str, **kwargs) -> dict[str, str]: ) return {t["name"]: t["val"] for t in self._hits(tags, ["tag"])} - def params( + def get_params( self, run: Optional[str] = None, iteration: Optional[str] = None, **kwargs ) -> dict[str, dict[str, str]]: """Return the set of parameters for a run or iteration @@ -995,7 +1161,7 @@ def params( response["common"] = common return response - def iterations(self, run: str, **kwargs) -> list[dict[str, Any]]: + def get_iterations(self, run: str, **kwargs) -> list[dict[str, Any]]: """Return a list of iterations for a run Args: @@ -1008,12 +1174,13 @@ def iterations(self, run: str, **kwargs) -> list[dict[str, Any]]: iterations = self.search( index="iteration", filters=[{"term": {"run.id": run}}], + sort=[{"iteration.num": "asc"}], **kwargs, ignore_unavailable=True, ) return [i["iteration"] for i in self._hits(iterations)] - def samples( + def get_samples( self, run: Optional[str] = None, iteration: Optional[str] = None, **kwargs ): """Return a list of samples for a run or iteration @@ -1032,15 +1199,23 @@ def samples( "A sample query requires either a run or iteration ID", ) match = {"run.id" if run else "iteration.id": run if run else iteration} - samples = self.search( + hits = self.search( index="sample", filters=[{"term": match}], **kwargs, ignore_unavailable=True, ) - return [i["sample"] for i in self._hits(samples)] - - def periods( + samples = [] + for s in self._hits(hits): + print(f"SAMPLE's ITERATION {s['iteration']}") + sample = s["sample"] + sample["iteration"] = s["iteration"]["num"] + sample["primary_metric"] = s["iteration"]["primary-metric"] + sample["status"] = s["iteration"]["status"] + samples.append(sample) + return samples + + def get_periods( self, run: Optional[str] = None, iteration: Optional[str] = None, @@ -1077,16 +1252,20 @@ def periods( periods = self.search( index="period", filters=[{"term": match}], + sort=[{"period.begin": "asc"}], **kwargs, ignore_unavailable=True, ) body = [] for h in self._hits(periods): - p = h["period"] - body.append(self._format_period(p)) + period = self._format_period(period=h["period"]) + period["iteration"] = h["iteration"]["num"] + period["primary_metric"] = h["iteration"]["primary-metric"] + period["status"] = h["iteration"]["status"] + body.append(period) return body - def timeline(self, run: str, **kwargs) -> dict[str, Any]: + def get_timeline(self, run: str, **kwargs) -> dict[str, Any]: """Report the relative timeline of a run With nested object lists, show runs to iterations to samples to @@ -1127,8 +1306,8 @@ def timeline(self, run: str, **kwargs) -> dict[str, Any]: body = {"run": robj} for i in self._hits(itr): if "begin" not in robj: - robj["begin"] = self._date(i["run"]["begin"]) - robj["end"] = self._date(i["run"]["end"]) + robj["begin"] = self._format_timestamp(i["run"]["begin"]) + robj["end"] = self._format_timestamp(i["run"]["end"]) iteration = i["iteration"] iterations.append(iteration) iteration["samples"] = [] @@ -1141,7 +1320,7 @@ def timeline(self, run: str, **kwargs) -> dict[str, Any]: iteration["samples"].append(sample) return body - def metrics_list(self, run: str, **kwargs) -> dict[str, Any]: + def get_metrics_list(self, run: str, **kwargs) -> dict[str, Any]: """Return a list of metrics available for a run Each run may have multiple performance metrics stored. This API allows @@ -1188,7 +1367,7 @@ def metrics_list(self, run: str, **kwargs) -> dict[str, Any]: record["breakdowns"][n].add(v) return met - def metric_breakouts( + def get_metric_breakouts( self, run: str, metric: str, @@ -1228,7 +1407,7 @@ def metric_breakouts( } """ start = time.time() - filters = self._filter_metric_desc(run, metric, names, periods) + filters = self._build_metric_filters(run, metric, names, periods) metric_name = metric + ("" if not names else ("+" + ",".join(names))) metrics = self.search( "metric_desc", @@ -1263,7 +1442,7 @@ def metric_breakouts( print(f"Processing took {duration} seconds") return response - def metrics_data( + def get_metrics_data( self, run: str, metric: str, @@ -1308,14 +1487,14 @@ def metrics_data( """ start = time.time() ids = self._get_metric_ids( - run, metric, names, periodlist=periods, highlander=(not aggregate) + run, metric, names, periodlist=periods, aggregate=aggregate ) # If we're searching by periods, filter metric data by the period # timestamp range rather than just relying on the metric desc IDs as # we also want to filter non-periodic tool data. filters = [{"terms": {"metric_desc.id": ids}}] - filters.extend(self._data_range(periods)) + filters.extend(self._build_timestamp_range_filters(periods)) response = [] if len(ids) > 1: @@ -1341,8 +1520,8 @@ def metrics_data( for h in self._aggs(data, "interval"): response.append( { - "begin": self._date(h["key"] - interval), - "end": self._date(h["key"]), + "begin": self._format_timestamp(h["key"] - interval), + "end": self._format_timestamp(h["key"]), "value": h["value"]["value"], "duration": interval / 1000.0, } @@ -1356,7 +1535,7 @@ def metrics_data( print(f"Processing took {duration} seconds") return response - def metrics_summary( + def get_metrics_summary( self, run: str, metric: str, @@ -1381,13 +1560,13 @@ def metrics_summary( "min": 0.0, "max": 0.3296, "avg": 0.02360704225352113, - "sum": 1.6761000000000001 + "sum": 1.676self.BIGQUERY00000001 } """ start = time.time() ids = self._get_metric_ids(run, metric, names, periodlist=periods) filters = [{"terms": {"metric_desc.id": ids}}] - filters.extend(self._data_range(periods)) + filters.extend(self._build_timestamp_range_filters(periods)) data = self.search( "metric_data", size=0, @@ -1398,7 +1577,7 @@ def metrics_summary( print(f"Processing took {duration} seconds") return data["aggregations"]["score"] - def metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: + def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: """Return metrics data for a run Each run may have multiple performance metrics stored. This API allows @@ -1441,25 +1620,111 @@ def metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: """ start = time.time() graphlist = [] - run = graphdata.run + default_run_id = graphdata.run layout: dict[str, Any] = {"width": "1500"} axes = {} yaxis = None cindex = 0 + params_by_run = {} + periods_by_run = {} + suffix_by_run = {} + + # Construct a de-duped ordered list of run IDs, starting with the + # default. + run_id_list = [] + if default_run_id: + run_id_list.append(default_run_id) + for g in graphdata.graphs: + if g.run and g.run not in run_id_list: + run_id_list.append(g.run) + for g in graphdata.graphs: + run_id = g.run if g.run else default_run_id names = g.names metric: str = g.metric + + if run_id not in params_by_run: + # Gather iteration parameters outside the loop for help in generating + # useful lables. + all_params = self.search( + "param", filters=[{"term": {"run.id": default_run_id}}] + ) + collector = defaultdict(defaultdict) + for h in self._hits(all_params): + collector[h["iteration"]["id"]][h["param"]["arg"]] = h["param"][ + "val" + ] + params_by_run[run_id] = collector + else: + collector = params_by_run[run_id] + + if run_id not in periods_by_run: + # Capture period IDs for the run iterations outside the loop + periods = self.search( + "period", filters=[{"term": {"run.id": default_run_id}}] + ) + iteration_periods = defaultdict(set) + for p in self._hits(periods): + if run_id not in suffix_by_run: + print(f"Run {run_id} suffix {p['run']['begin']}") + suffix_by_run[run_id] = p["run"]["begin"] + iteration_periods[p["iteration"]["id"]].add(p["period"]["id"]) + periods_by_run[run_id] = iteration_periods + else: + iteration_periods = periods_by_run[run_id] + ids = self._get_metric_ids( - run, metric, names, periodlist=g.periods, highlander=(not g.aggregate) + run_id, + metric, + names, + periodlist=g.periods, + aggregate=g.aggregate, ) + + # We can easily end up with multiple graphs across distinct periods + # or iterations, so we want to be able to provide some labeling to + # the graphs. We do this by looking for unique iteration parameters + # values, since the iteration number and period name aren't useful + # by themselves. + name_suffix = "" + if g.periods: + iteration = None + for i, pset in iteration_periods.items(): + if set(g.periods) <= pset: + iteration = i + break + + # If the period(s) we're graphing resolve to a single iteration + # in a run with multiple iterations, then we can try to find a + # unique title suffix based on distinct param values for that + # iteration. + if iteration and len(collector) > 1: + unique = collector[iteration].copy() + for i, params in collector.items(): + if i != iteration: + for p in list(unique.keys()): + if p in params and unique[p] == params[p]: + del unique[p] + if unique: + name_suffix = ( + " (" + + ",".join([f"{p}={v}" for p, v in unique.items()]) + + ")" + ) + + if len(run_id_list) > 1: + name_suffix += f" {{run {run_id_list.index(run_id) + 1}}}" + filters = [{"terms": {"metric_desc.id": ids}}] - filters.extend(self._data_range(g.periods)) + filters.extend(self._build_timestamp_range_filters(g.periods)) y_max = 0.0 - points = [] + points: list[Point] = [] # If we're pulling multiple breakouts, e.g., total CPU across modes - # or cores, we want to aggregate by timestamp. (Note that this will - # not work well unless the samples are aligned.) + # or cores, we want to aggregate by timestamp interval. Sample + # timstamps don't necessarily align, so the "histogram" aggregation + # normalizes within the interval (based on the minimum actual + # interval duration). if len(ids) > 1: # Find the minimum sample interval of the selected metrics aggdur = self.search( @@ -1478,7 +1743,7 @@ def metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: aggregations={ "interval": { "histogram": { - "field": "metric_data.end", + "field": "metric_data.begin", "interval": interval, }, "aggs": {"value": {"sum": {"field": "metric_data.value"}}}, @@ -1486,85 +1751,83 @@ def metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: }, ) for h in self._aggs(data, "interval"): - points.append((h["key"], h["value"]["value"])) + begin = int(h["key"]) + end = begin + interval - 1 + points.append(Point(begin, end, float(h["value"]["value"]))) else: data = self.search("metric_data", filters=filters) for h in self._hits(data, ["metric_data"]): - points.append((h["end"], float(h["value"]))) + points.append( + Point(int(h["begin"]), int(h["end"]), float(h["value"])) + ) - # Graph the "end" timestamp of each sample against the sample - # value. Sort the graph points by timestamp so that Ploty will draw - # nice lines. + # Sort the graph points by timestamp so that Ploty will draw nice + # lines. We graph both the "begin" and "end" timestamp of each + # sample against the value to more clearly show the sampling + # interval. x = [] y = [] - for t, v in sorted(points): - x.append(self._date(t)) - y.append(v) - y_max = max(y_max, v) - - try: - options = " " + ",".join(names) if names else "" - title = metric + options - - # TODO -- how to identify the period here? Can I filter out - # param differences to label these based on the batch size?? - graphitem = { - "x": x, - "y": y, - "name": title, - "type": "scatter", - "mode": "line", - "marker": {"color": colors[cindex]}, - "labels": { - "x": "sample timestamp", - "y": "samples / second", - }, - } + for p in sorted(points, key=lambda a: a.begin): + x.extend( + [self._format_timestamp(p.begin), self._format_timestamp(p.end)] + ) + y.extend([p.value, p.value]) + y_max = max(y_max, p.value) + + options = (" [" + ",".join(names) + "]") if names else "" + title = metric + options + graphitem = { + "x": x, + "y": y, + "name": title + name_suffix, + "type": "scatter", + "mode": "line", + "marker": {"color": colors[cindex]}, + "labels": { + "x": "sample timestamp", + "y": "samples / second", + }, + } - # Y-axis scaling and labeling is divided by benchmark label; - # so store each we've created to reuse. (E.g., if we graph - # 5 different mpstat::Busy-CPU periods, they'll share a single - # Y axis.) - if title in axes: - yref = axes[metric] + # Y-axis scaling and labeling is divided by benchmark label; + # so store each we've created to reuse. (E.g., if we graph + # 5 different mpstat::Busy-CPU periods, they'll share a single + # Y axis.) + if metric in axes: + yref = axes[metric] + else: + if yaxis: + name = f"yaxis{yaxis}" + yref = f"y{yaxis}" + yaxis += 1 + layout[name] = { + "title": metric, + "color": colors[cindex], + "autorange": True, + "anchor": "free", + "autoshift": True, + "overlaying": "y", + } else: - if yaxis: - name = f"yaxis{yaxis}" - yref = f"y{yaxis}" - yaxis += 1 - layout[name] = { - "title": title, - "color": colors[cindex], - "autorange": True, - "anchor": "free", - "autoshift": True, - "overlaying": "y", - } - else: - name = "yaxis" - yref = "y" - yaxis = 2 - layout[name] = { - "title": title, - "color": colors[cindex], - } - axes[metric] = yref - graphitem["yaxis"] = yref - cindex += 1 - if cindex >= len(colors): - cindex = 0 - graphlist.append(graphitem) - except ValueError as v: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Unexpected data type: {str(v)}", - ) + name = "yaxis" + yref = "y" + yaxis = 2 + layout[name] = { + "title": metric, + "color": colors[cindex], + } + axes[metric] = yref + graphitem["yaxis"] = yref + cindex += 1 + if cindex >= len(colors): + cindex = 0 + graphlist.append(graphitem) duration = time.time() - start print(f"Processing took {duration} seconds") return {"data": graphlist, "layout": layout} - def fields(self, index: str) -> dict[str, set]: + def fields(self, index: str) -> dict[str, dict[str, str]]: """Return the fields of an OpenSearch document from an index This fetches the document mapping from OpenSearch and reports it as a @@ -1572,16 +1835,17 @@ def fields(self, index: str) -> dict[str, set]: { "cdm": [ - "ver" + "doctype": "keyword", + "ver": "keyword" ], "metric_data": [ - "begin", - "value", - "end", - "duration" + "begin": "date", + "value": "double", + "end": "date", + "duration": "long" ], "metric_desc": [ - "id" + "id": "keyword" ] } @@ -1597,10 +1861,11 @@ def fields(self, index: str) -> dict[str, set]: try: idx = self._get_index(index) mapping = self.elastic.indices.get_mapping(index=idx) - fields = defaultdict(set) + fields = defaultdict(defaultdict) for f, subfields in mapping[idx]["mappings"]["properties"].items(): - for s in subfields["properties"].keys(): - fields[f].add(s) + print(f"{f}: {subfields}") + for s, info in subfields["properties"].items(): + fields[f][s] = info.get("type") return fields except NotFoundError: raise HTTPException( diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index d76a003d..e80ac651 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -103,9 +103,9 @@ export const fetchGraphData = const periods = await API.get(`/api/v1/ilab/runs/${uid}/periods`); let graphs = []; periods.data.forEach((p) => { - graphs.push({ metric, periods: [p.id] }); + graphs.push({ metric: p.primary_metric, periods: [p.id] }); graphs.push({ - metric: metric, + metric, aggregate: true, periods: [p.id], }); diff --git a/frontend/src/components/templates/ILab/ILabGraph.jsx b/frontend/src/components/templates/ILab/ILabGraph.jsx index 565cb45e..c41300ba 100644 --- a/frontend/src/components/templates/ILab/ILabGraph.jsx +++ b/frontend/src/components/templates/ILab/ILabGraph.jsx @@ -23,16 +23,16 @@ const ILabGraph = (props) => { return ( <> - {isGraphLoading && !hasGraphData(item.id) ? ( -
- ) : !isGraphLoading ? ( - <> - ) : ( + {hasGraphData(item.id) ? ( + ) : isGraphLoading && !hasGraphData(item.id) ? ( +
+ ) : ( + <> )} ); diff --git a/frontend/src/components/templates/ILab/index.jsx b/frontend/src/components/templates/ILab/index.jsx index 7e99d41c..7a836563 100644 --- a/frontend/src/components/templates/ILab/index.jsx +++ b/frontend/src/components/templates/ILab/index.jsx @@ -121,7 +121,7 @@ const ILab = () => { - + { @@ -138,7 +138,21 @@ const ILab = () => { isHidden={!expanded.includes("bordered-toggle1")} >
- + + + + + + { /> - + From 505903bf6b006c9f2227c88ccbf34f4e0012b24e Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Mon, 30 Sep 2024 16:24:46 -0400 Subject: [PATCH 04/29] Documentation and cleanup Also added the option to override the default graph title generator using the new `Graph.title` field. --- backend/app/api/v1/endpoints/ilab/ilab.py | 28 ++- backend/app/services/crucible_readme.md | 96 +++++++++-- backend/app/services/crucible_svc.py | 198 +++++++++++++--------- 3 files changed, 215 insertions(+), 107 deletions(-) diff --git a/backend/app/api/v1/endpoints/ilab/ilab.py b/backend/app/api/v1/endpoints/ilab/ilab.py index 5a5897f5..897f2ae0 100644 --- a/backend/app/api/v1/endpoints/ilab/ilab.py +++ b/backend/app/api/v1/endpoints/ilab/ilab.py @@ -1,3 +1,10 @@ +"""Access RHEL AI InstructLab performance data through Crucible + +This defines an API to expose and filter performance data from InstructLab +CPT runs via a persistent Crucuble controller instance as defined in the +configuration path "ilab.crucible". +""" + from datetime import datetime, timedelta, timezone from typing import Annotated, Any, Optional @@ -378,7 +385,7 @@ async def timeline(run: str): { "ilab::train-samples-sec": { "periods": ["C022CDC6-60C8-11EF-BA80-AFE7B4B2692B"], - "breakdowns": { + "breakouts": { "benchmark-group": ["unknown"], "benchmark-name": ["unknown"], "benchmark-role": ["client"], @@ -417,7 +424,7 @@ async def metrics(run: str): "class": ["throughput"], "type": "Busy-CPU", "source": "mpstat", - "breakdowns": {"num": ["8", "72"], "thread": [0, 1]}, + "breakouts": {"num": ["8", "72"], "thread": [0, 1]}, } ), 400: example_error("Metric name not found for run "), @@ -641,6 +648,9 @@ async def metric_graph_body(graphs: GraphList): async def metric_graph_param( run: str, metric: str, + aggregate: Annotated[ + bool, Query(description="Allow aggregation of metrics") + ] = False, name: Annotated[ Optional[list[str]], Query( @@ -655,9 +665,7 @@ async def metric_graph_param( examples=["", ","], ), ] = None, - aggregate: Annotated[ - bool, Query(description="Allow aggregation of metrics") - ] = False, + title: Annotated[Optional[str], Query(description="Title for graph")] = None, ): crucible = CrucibleService(CONFIGPATH) return crucible.get_metrics_graph( @@ -665,7 +673,13 @@ async def metric_graph_param( run=run, name=metric, graphs=[ - Graph(metric=metric, aggregate=aggregate, names=name, periods=period) + Graph( + metric=metric, + aggregate=aggregate, + names=name, + periods=period, + title=title, + ) ], ) ) @@ -729,4 +743,4 @@ async def info(): ) async def fields(index: str): crucible = CrucibleService(CONFIGPATH) - return crucible.fields(index=index) + return crucible.get_fields(index=index) diff --git a/backend/app/services/crucible_readme.md b/backend/app/services/crucible_readme.md index fdd9e77d..4b61e5eb 100644 --- a/backend/app/services/crucible_readme.md +++ b/backend/app/services/crucible_readme.md @@ -1,3 +1,55 @@ +Crucible divides data across a set of OpenSearch (or ElasticSearch) indices, +each with a specific document mapping. CDM index names include a "root" name +(like "run") with a versioned prefix, like "cdmv7dev-run". + +Crucible timestamps are integers in "millisecond-from-the-epoch" format. + +
+
RUN
this contains the basic information about a performance run, including a + generated UUID, begin and end timestamps, a benchmark name, a user name and + email, the (host/directory) "source" of the indexed data (which is usable on + the controler's local file system), plus host and test harness names.
+
TAG
this contains information about a general purpose "tag" to associate some + arbitrary context with a run, for example software versions, hardware, or + other metadata. This can be considered a SQL JOIN with the run document, + adding a tag UUID, name, and value.
+
ITERATION
this contains basic information about a performance run iteration. + This is a JOIN with the RUN document, duplicating the run fields while + adding an iteration UUID, number, the primary (benchmark) metric associated + with the iteration, plus the primary "period" of the iteration, and the + iteration status.
+
PARAM
this contains information about a benchmark parameter value affecting + the behavior of an iteration. This is a JOIN with the run and iteration + data, adding a parameter ID, argument, and value. While parameters are + iteration-specific, parameters that don't vary between iterations are often + represented as run parameters.
+
SAMPLE
this contains basic information about a sample of an iteration. This is + effectively a JOIN against iteration and run, adding a sample UUID and + number, along with a "path" for sample data and a sample status.
+
PERIOD
this contains basic information about a period during which data is + collected within a sample. This is a JOIN against sample, iteration, and + period, adding the period UUID, name, and begin and end timestamps. A set + of periods can be "linked" through a "prev_id" field.
+
METRIC_DESC
this contains descriptive data about a specific set of benchmark + metric data. This is another JOIN, containing the associated period, + sample, iteration, and run data while adding information specific to a + sequence of metric data values. These include the metric UUID, a class, + type, and source, and a set of "names" which define breakouts that narrow + down a specific source and type. For example source:mpstat, type:Busy-CPU + data is broken down by package, cpu, core, and other breakouts which can + be isolated or aggregated for data reporting.
+
METRIC_DATA
this describes a specific data point, sampled over a specified + duration with a fixed begin and end timestamp, plus a floating point value. + Each is tied to a specific metric_desc UUID value. Depending on the varied + semantics of metric_desc breakouts, it's often valid to aggregate these + across a set of relatead metric_desc IDs, based on source and type, for + example to get aggregate CPU load across all modes, cores, or across all + modes within a core. This service allows arbitrary aggregation within a + given metric source and type, but by default will attempt to direct the + caller to specifying a set of breakouts that result in a single metric_desc + ID.
+
+ The `crucible_svc` allows CPT project APIs to access a Crucible CDM backing store to find information about runs, tags, params, iterations, samples, periods, plus various ways to expose and aggregate metric data both for @@ -14,18 +66,18 @@ information is broken down by core and package. You can now aggregate all global data (e.g., total system CPU), or filter by breakout names to select by CPU, mode (usr, sys, irq), etc. -For example, to return `mpstat` `Busy-CPU` graph data for one core, you -might query: +For example, to return `Busy-CPU` ("type") graph data from the `mpstat` +("source") tool for system mode on one core, you might query: ``` -/api/v1/ilab/runs/f542a50c-55df-4ead-92d1-8c55367f2e79/graph/mpstat::Busy-CPU?name=core=12,package=1,num=77,type=guest +/api/v1/ilab/runs//graph/mpstat::Busy-CPU?name=core=12,package=1,num=77,type=sys ``` If you make a `graph`, `data`, or `summary` query that doesn't translate -to a unique metric, and don't allow aggregation, you'll get a diagnostic +to a unique metric, and don't select aggregation, you'll get a diagnostic message identifying possible additional filters. For example, with -`type=guest` removed, that same query will show the available values for -the `type` name: +`type=sys` removed, that same query will show the available values for +the `type` breakout name: ``` { @@ -51,12 +103,26 @@ This capability can be used to build an interactive exploratory UI to allow displaying breakout details. The `get_metrics` API will show all recorded metrics, along with information the names and values used in those. Metrics that show "names" with more than one value will need to be -filtered to produce meaningful summaries or graphs. The `get_breakdowns` API -can be used to explore the namespace recorded for that metric in the specified -run. For example, +filtered to produce meaningful summaries or graphs. + +You can instead aggregate metrics across breakouts using the `?aggregate` +query parameter, like `GET /api/v1/ilab/runs//graph/mpstat::Busy-CPU?aggregate` +which will aggregate all CPU busy data for the system. + +Normally you'll want to display data based on sample periods, for example the +primary period of an iteration, using `?period=`. This will +implicitly constrain the metric data based on the period ID associated with +the `metric_desc` document *and* the begin/end time period of the selected +periods. Normally, a benchmark will will separate iterations because each is +run with a different parameter value, and the default graph labeling will +look for a set of distinct parameters not used by other iterations: for +example, `mpstat::Busy-CPU (batch-size=16)`. + +The `get_breakouts` API can be used to explore the namespace recorded for that +metric in the specified run. For example, ``` -GET /api/v1/ilab/runs//breakdowns/sar-net::packets-sec?name=direction=rx +GET /api/v1/ilab/runs//breakouts/sar-net::packets-sec?name=direction=rx { "label": "sar-net::packets-sec", "source": "sar-net", @@ -76,14 +142,10 @@ GET /api/v1/ilab/runs//breakdowns/sar-net::packets-sec?name=direction=rx } ``` -Metric data access (including graphing) is now sensitive to the Crucible -"period". The UI iterates through all periods for the selected run, -requesting the primary metric and a selected secondary non-periodic -metric for each period. The labeling for the graph is based on finding -"param" values unique for each period's iteration. - The `get_filters` API reports all the tag and param filter tags and values for the runs. These can be used for the `filters` query parameter on `get_runs` to restrict the set of runs reported; for example, `/api/v1/ilab/runs?filter=param:workflow=sdg` shows only runs with the param -arg `workflow` set to the value `sdg`. +arg `workflow` set to the value `sdg`. You can search for a subset of the +string value using the operator "~" instead of "=". For example, +`?filter=param:user~user` will match `user` values of "A user" or "The user". diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index 672a5eb7..2451aa8a 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -1,3 +1,15 @@ +"""Service to pull data from a Crucible CDM OpenSearch data store + +A set of helper methods to enable a project API to easily process data from a +Crucible controller's OpenSearch data backend. + +This includes paginated, filtered, and sorted lists of benchmark runs, along +access to the associated Crucible documents such as iterations, samples, and +periods. Metric data can be accessed by breakout names, or aggregated by +breakout subsets or collection periods as either raw data points, statistical +aggregate, or Plotly graph format for UI display. +""" + import sys import time from collections import defaultdict @@ -15,11 +27,22 @@ class Graph(BaseModel): """Describe a single graph - metric: the metric label, "ilab::train-samples-sec" - aggregate: True to aggregate unspecified breakouts - names: Lock in breakouts - periods: Select metrics for specific test period(s) - run: Override the default run ID from GraphList + This represents a JSON object provided by a caller through the get_graph + API to describe a specific metric graph. + + The default title (if the field is omitted) is the metric label with a + suffix denoting breakout values selected, any unique parameter values + in a selected iteration, and (if multiple runs are selected in any Graph + list) an indication of the run index. For example, + "mpstat::Busy-CPU [core=2,type=usr] (batch-size=16)". + + Fields: + metric: the metric label, "ilab::train-samples-sec" + aggregate: True to aggregate unspecified breakouts + names: Lock in breakouts + periods: Select metrics for specific test period(s) + run: Override the default run ID from GraphList + title: Provide a title for the graph. The default is a generated title """ metric: str @@ -27,14 +50,22 @@ class Graph(BaseModel): names: Optional[list[str]] = None periods: Optional[list[str]] = None run: Optional[str] = None + title: Optional[str] = None class GraphList(BaseModel): """Describe a set of overlaid graphs - run: Specify the (default) run ID - name: Specify a name for the set of graphs - graphs: a list of Graph objects + This represents a JSON object provided by a caller through the get_graph + API to introduce a set of constrained metrics to be graphed. The "run + ID" here provides a default for the embedded Graph objects, and can be + omitted if all Graph objects specify a run ID. (This is most useful to + select a set of graphs all for a single run ID.) + + Fields: + run: Specify the (default) run ID + name: Specify a name for the set of graphs + graphs: a list of Graph objects """ run: str @@ -457,10 +488,10 @@ def _build_filter_options(cls, filter: Optional[list[str]] = None) -> Tuple[ def _build_name_filters( cls, namelist: Optional[list[str]] = None ) -> list[dict[str, Any]]: - """Build filter terms for metric breakdown names + """Build filter terms for metric breakout names for example, "cpu=10" filters for metric data descriptors where the - breakdown name "cpu" exists and has a value of 10. + breakout name "cpu" exists and has a value of 10. Args: namelist: list of possibly comma-separated list values @@ -1331,11 +1362,11 @@ def get_metrics_list(self, run: str, **kwargs) -> dict[str, Any]: { "ilab::train-samples-sec": { "periods": [{"id": , "name": "measurement"}], - "names": {"benchmark-group" ["unknown"], ...} + "breakouts": {"benchmark-group" ["unknown"], ...} }, "iostat::avg-queue-length": { "periods": [], - "names": {"benchmark-group": ["unknown"], ...}, + "breakouts": {"benchmark-group": ["unknown"], ...}, }, ... } @@ -1359,12 +1390,12 @@ def get_metrics_list(self, run: str, **kwargs) -> dict[str, Any]: if name in met: record = met[name] else: - record = {"periods": [], "breakdowns": defaultdict(set)} + record = {"periods": [], "breakouts": defaultdict(set)} met[name] = record if "period" in h: record["periods"].append(h["period"]["id"]) for n, v in desc["names"].items(): - record["breakdowns"][n].add(v) + record["breakouts"][n].add(v) return met def get_metric_breakouts( @@ -1374,7 +1405,7 @@ def get_metric_breakouts( names: Optional[list[str]] = None, periods: Optional[list[str]] = None, ) -> dict[str, Any]: - """Help explore available metric breakdowns + """Help explore available metric breakouts Args: run: run ID @@ -1383,7 +1414,7 @@ def get_metric_breakouts( periods: list of period IDs Returns: - A description of all breakdown names and values, which can be + A description of all breakout names and values, which can be specified to narrow down metrics returns by the data, summary, and graph APIs. @@ -1394,7 +1425,7 @@ def get_metric_breakouts( ], "type": "Busy-CPU", "source": "mpstat", - "breakdowns": { + "breakouts": { "num": [ "8", "72" @@ -1643,35 +1674,73 @@ def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: names = g.names metric: str = g.metric - if run_id not in params_by_run: - # Gather iteration parameters outside the loop for help in generating - # useful lables. - all_params = self.search( - "param", filters=[{"term": {"run.id": default_run_id}}] - ) - collector = defaultdict(defaultdict) - for h in self._hits(all_params): - collector[h["iteration"]["id"]][h["param"]["arg"]] = h["param"][ - "val" - ] - params_by_run[run_id] = collector + if g.title: + title = g.title else: - collector = params_by_run[run_id] + if run_id not in params_by_run: + # Gather iteration parameters outside the loop for help in + # generating useful lables. + all_params = self.search( + "param", filters=[{"term": {"run.id": default_run_id}}] + ) + collector = defaultdict(defaultdict) + for h in self._hits(all_params): + collector[h["iteration"]["id"]][h["param"]["arg"]] = h["param"][ + "val" + ] + params_by_run[run_id] = collector + else: + collector = params_by_run[run_id] - if run_id not in periods_by_run: - # Capture period IDs for the run iterations outside the loop - periods = self.search( - "period", filters=[{"term": {"run.id": default_run_id}}] - ) - iteration_periods = defaultdict(set) - for p in self._hits(periods): - if run_id not in suffix_by_run: - print(f"Run {run_id} suffix {p['run']['begin']}") - suffix_by_run[run_id] = p["run"]["begin"] - iteration_periods[p["iteration"]["id"]].add(p["period"]["id"]) - periods_by_run[run_id] = iteration_periods - else: - iteration_periods = periods_by_run[run_id] + if run_id not in periods_by_run: + periods = self.search( + "period", filters=[{"term": {"run.id": default_run_id}}] + ) + iteration_periods = defaultdict(set) + for p in self._hits(periods): + if run_id not in suffix_by_run: + suffix_by_run[run_id] = p["run"]["begin"] + iteration_periods[p["iteration"]["id"]].add(p["period"]["id"]) + periods_by_run[run_id] = iteration_periods + else: + iteration_periods = periods_by_run[run_id] + + # We can easily end up with multiple graphs across distinct + # periods or iterations, so we want to be able to provide some + # labeling to the graphs. We do this by looking for unique + # iteration parameters values, since the iteration number and + # period name aren't useful by themselves. + name_suffix = "" + if g.periods: + iteration = None + for i, pset in iteration_periods.items(): + if set(g.periods) <= pset: + iteration = i + break + + # If the period(s) we're graphing resolve to a single + # iteration in a run with multiple iterations, then we can + # try to find a unique title suffix based on distinct param + # values for that iteration. + if iteration and len(collector) > 1: + unique = collector[iteration].copy() + for i, params in collector.items(): + if i != iteration: + for p in list(unique.keys()): + if p in params and unique[p] == params[p]: + del unique[p] + if unique: + name_suffix = ( + " (" + + ",".join([f"{p}={v}" for p, v in unique.items()]) + + ")" + ) + + if len(run_id_list) > 1: + name_suffix += f" {{run {run_id_list.index(run_id) + 1}}}" + + options = (" [" + ",".join(names) + "]") if names else "" + title = metric + options + name_suffix ids = self._get_metric_ids( run_id, @@ -1680,41 +1749,6 @@ def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: periodlist=g.periods, aggregate=g.aggregate, ) - - # We can easily end up with multiple graphs across distinct periods - # or iterations, so we want to be able to provide some labeling to - # the graphs. We do this by looking for unique iteration parameters - # values, since the iteration number and period name aren't useful - # by themselves. - name_suffix = "" - if g.periods: - iteration = None - for i, pset in iteration_periods.items(): - if set(g.periods) <= pset: - iteration = i - break - - # If the period(s) we're graphing resolve to a single iteration - # in a run with multiple iterations, then we can try to find a - # unique title suffix based on distinct param values for that - # iteration. - if iteration and len(collector) > 1: - unique = collector[iteration].copy() - for i, params in collector.items(): - if i != iteration: - for p in list(unique.keys()): - if p in params and unique[p] == params[p]: - del unique[p] - if unique: - name_suffix = ( - " (" - + ",".join([f"{p}={v}" for p, v in unique.items()]) - + ")" - ) - - if len(run_id_list) > 1: - name_suffix += f" {{run {run_id_list.index(run_id) + 1}}}" - filters = [{"terms": {"metric_desc.id": ids}}] filters.extend(self._build_timestamp_range_filters(g.periods)) y_max = 0.0 @@ -1775,12 +1809,10 @@ def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: y.extend([p.value, p.value]) y_max = max(y_max, p.value) - options = (" [" + ",".join(names) + "]") if names else "" - title = metric + options graphitem = { "x": x, "y": y, - "name": title + name_suffix, + "name": title, "type": "scatter", "mode": "line", "marker": {"color": colors[cindex]}, @@ -1827,7 +1859,7 @@ def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: print(f"Processing took {duration} seconds") return {"data": graphlist, "layout": layout} - def fields(self, index: str) -> dict[str, dict[str, str]]: + def get_fields(self, index: str) -> dict[str, dict[str, str]]: """Return the fields of an OpenSearch document from an index This fetches the document mapping from OpenSearch and reports it as a From 4ccd603b6335161f7ee9eed5e2fafcd8ce8b2be1 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Tue, 1 Oct 2024 09:11:40 -0400 Subject: [PATCH 05/29] Allow overriding graph color --- backend/app/services/crucible_svc.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index 2451aa8a..307f3a72 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -39,6 +39,7 @@ class Graph(BaseModel): Fields: metric: the metric label, "ilab::train-samples-sec" aggregate: True to aggregate unspecified breakouts + color: CSS color string ("green" or "#008000") names: Lock in breakouts periods: Select metrics for specific test period(s) run: Override the default run ID from GraphList @@ -47,6 +48,7 @@ class Graph(BaseModel): metric: str aggregate: bool = False + color: Optional[str] = None names: Optional[list[str]] = None periods: Optional[list[str]] = None run: Optional[str] = None @@ -1809,13 +1811,20 @@ def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: y.extend([p.value, p.value]) y_max = max(y_max, p.value) + if g.color: + color = g.color + else: + color = colors[cindex] + cindex += 1 + if cindex >= len(colors): + cindex = 0 graphitem = { "x": x, "y": y, "name": title, "type": "scatter", "mode": "line", - "marker": {"color": colors[cindex]}, + "marker": {"color": color}, "labels": { "x": "sample timestamp", "y": "samples / second", @@ -1835,7 +1844,7 @@ def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: yaxis += 1 layout[name] = { "title": metric, - "color": colors[cindex], + "color": color, "autorange": True, "anchor": "free", "autoshift": True, @@ -1847,13 +1856,10 @@ def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: yaxis = 2 layout[name] = { "title": metric, - "color": colors[cindex], + "color": color, } axes[metric] = yref graphitem["yaxis"] = yref - cindex += 1 - if cindex >= len(colors): - cindex = 0 graphlist.append(graphitem) duration = time.time() - start print(f"Processing took {duration} seconds") From de0acccfb56406e79d30071197bde3a83b6fe472 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Thu, 3 Oct 2024 08:29:16 -0400 Subject: [PATCH 06/29] Some (self) review cleanup --- backend/app/api/v1/endpoints/ilab/ilab.py | 142 +++++++++++---- backend/app/services/crucible_svc.py | 206 +++++++++++++--------- local-compose.sh | 33 ++-- 3 files changed, 244 insertions(+), 137 deletions(-) diff --git a/backend/app/api/v1/endpoints/ilab/ilab.py b/backend/app/api/v1/endpoints/ilab/ilab.py index 897f2ae0..e49b8c6c 100644 --- a/backend/app/api/v1/endpoints/ilab/ilab.py +++ b/backend/app/api/v1/endpoints/ilab/ilab.py @@ -255,16 +255,20 @@ async def iterations(run: str): 200: example_response( [ { - "id": "6BA5071A-7139-11EF-9864-EA6BC0BEFE10", + "id": "6BBE6872-7139-11EF-BFAA-8569A9399D61", "num": "1", "path": None, "status": "pass", + "iteration": 5, + "primary_metric": "ilab::sdg-samples-sec", }, { - "id": "6BBE6872-7139-11EF-BFAA-8569A9399D61", + "id": "6BACDFA8-7139-11EF-9F33-8185DD5B4869", "num": "1", "path": None, "status": "pass", + "iteration": 2, + "primary_metric": "ilab::sdg-samples-sec", }, ] ), @@ -288,12 +292,20 @@ async def run_samples(run: str): "end": "2024-09-12 18:03:23.132000+00:00", "id": "6BA57EF2-7139-11EF-A80B-E5037504B9B1", "name": "measurement", + "iteration": 1, + "sample": "1", + "primary_metric": "ilab::sdg-samples-sec", + "status": "pass", }, { - "begin": "2024-09-12 16:50:19.305000+00:00", - "end": "2024-09-12 17:14:04.475000+00:00", - "id": "6BAD466E-7139-11EF-8E60-927A210BA97E", + "begin": "2024-09-12 18:05:03.229000+00:00", + "end": "2024-09-12 18:27:55.419000+00:00", + "id": "6BB93622-7139-11EF-A6C0-89A48E630F9D", "name": "measurement", + "iteration": 4, + "sample": "1", + "primary_metric": "ilab::sdg-samples-sec", + "status": "pass", }, ] ), @@ -313,11 +325,21 @@ async def run_periods(run: str): 200: example_response( [ { - "id": "6BB8BD00-7139-11EF-B2B2-942D604C0B7B", + "id": "6BBE6872-7139-11EF-BFAA-8569A9399D61", "num": "1", "path": None, "status": "pass", - } + "iteration": 5, + "primary_metric": "ilab::sdg-samples-sec", + }, + { + "id": "6BACDFA8-7139-11EF-9F33-8185DD5B4869", + "num": "1", + "path": None, + "status": "pass", + "iteration": 2, + "primary_metric": "ilab::sdg-samples-sec", + }, ] ), 400: example_error("Parameter error"), @@ -383,26 +405,28 @@ async def timeline(run: str): responses={ 200: example_response( { - "ilab::train-samples-sec": { - "periods": ["C022CDC6-60C8-11EF-BA80-AFE7B4B2692B"], + "sar-net::packets-sec": { + "periods": [], "breakouts": { - "benchmark-group": ["unknown"], - "benchmark-name": ["unknown"], - "benchmark-role": ["client"], - "csid": ["1"], - "cstype": ["client"], + "benchmark-name": ["none"], + "benchmark-role": ["none"], + "csid": ["remotehosts-1-sysstat-1"], + "cstype": ["profiler"], + "dev": ["lo", "eno8303", "eno12399", "eno12409"], + "direction": ["rx", "tx"], "endpoint-label": ["remotehosts-1"], - "engine-id": ["1"], - "engine-role": ["benchmarker"], - "engine-type": ["client"], - "hosted-by": ["nvd-srv-29.nvidia.eng.rdu2.dc.redhat.com"], - "hostname": ["nvd-srv-29.nvidia.eng.rdu2.dc.redhat.com"], + "engine-id": ["remotehosts-1-sysstat-1"], + "engine-role": ["profiler"], + "engine-type": ["profiler"], + "hosted-by": ["x.example.com"], + "hostname": ["x.example.com"], "hypervisor-host": ["none"], "osruntime": ["podman"], - "tool-name": ["unknown"], + "tool-name": ["sysstat"], + "type": ["virtual", "physical"], "userenv": ["rhel-ai"], }, - } + }, }, ), 400: example_error("Parameter error"), @@ -571,21 +595,49 @@ async def metric_summary( responses={ 200: example_response( response={ - "iostat::operations-merged-sec": [ + "data": [ { "x": [ + "2024-09-05 21:50:07+00:00", + "2024-09-05 21:56:37+00:00", + "2024-09-05 21:56:37.001000+00:00", + "2024-09-05 21:56:52+00:00", + "2024-09-05 21:56:52.001000+00:00", "2024-09-05 22:01:52+00:00", + ], + "y": [0.0, 0.0, 0.33, 0.33, 0.0, 0.0], + "name": "iostat::operations-merged-sec [cmd=read,dev=sdb]", + "type": "scatter", + "mode": "line", + "marker": {"color": "black"}, + "labels": {"x": "sample timestamp", "y": "samples / second"}, + "yaxis": "y", + }, + { + "x": [ + "2024-09-05 21:50:07+00:00", "2024-09-05 21:56:37+00:00", + "2024-09-05 21:56:37.001000+00:00", "2024-09-05 21:56:52+00:00", + "2024-09-05 21:56:52.001000+00:00", + "2024-09-05 22:01:52+00:00", ], - "y": [0.0, 0.0, 0.33], - "name": "Metric iostat::operations-merged-sec cmd=read,dev=sdb", + "y": [0.0, 0.0, 0.33, 0.33, 0.0, 0.0], + "name": "iostat::operations-merged-sec [dev=sdb,cmd=read]", "type": "scatter", - "mode": "markers", - "orientation": "h", + "mode": "line", + "marker": {"color": "purple"}, "labels": {"x": "sample timestamp", "y": "samples / second"}, - } - ] + "yaxis": "y", + }, + ], + "layout": { + "width": "1500", + "yaxis": { + "title": "iostat::operations-merged-sec", + "color": "black", + }, + }, } ), 400: example_error("No matches for ilab::train-samples-sec"), @@ -614,21 +666,35 @@ async def metric_graph_body(graphs: GraphList): responses={ 200: example_response( response={ - "iostat::operations-merged-sec": [ + "data": [ { "x": [ - "2024-09-05 22:01:52+00:00", - "2024-09-05 21:56:37+00:00", - "2024-09-05 21:56:52+00:00", + "2024-09-12 16:49:01+00:00", + "2024-09-12 18:04:31+00:00", + "2024-09-12 18:04:31.001000+00:00", + "2024-09-12 18:04:46+00:00", + "2024-09-12 18:04:46.001000+00:00", + "2024-09-12 18:53:16+00:00", ], - "y": [0.0, 0.0, 0.33], - "name": "Metric iostat::operations-merged-sec cmd=read,dev=sdb", + "y": [0.0, 0.0, 1.4, 1.4, 0.0, 0.0], + "name": "iostat::operations-merged-sec [cmd=read,dev=sda]", "type": "scatter", - "mode": "markers", - "orientation": "h", - "labels": {"x": "sample timestamp", "y": "samples / second"}, + "mode": "line", + "marker": {"color": "black"}, + "labels": { + "x": "sample timestamp", + "y": "samples / second", + }, + "yaxis": "y", } - ] + ], + "layout": { + "width": "1500", + "yaxis": { + "title": "iostat::operations-merged-sec", + "color": "black", + }, + }, } ), 400: example_error("No matches for ilab::train-samples-sec"), diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index 307f3a72..a756d0e4 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -70,7 +70,7 @@ class GraphList(BaseModel): graphs: a list of Graph objects """ - run: str + run: Optional[str] = None name: str graphs: list[Graph] @@ -824,18 +824,14 @@ def get_run_filters(self) -> dict[str, dict[str, list[str]]]: { "param": { - {"gpus": { - "4": 22, - "8": 2 - } + {"gpus": [4", "8"]} } } Returns: - A three-level JSON dict; the first level is the namespace (param or - tag), the second level is the parameter or tag name, the third - level key is each value present in the index, and the value is the - number of times that value appears. + A two-level JSON dict; the first level is the namespace (param or + tag), the second level key is the param/tag/field name and its value + is the set of values defined for that key. """ tags = self.search( "tag", @@ -965,7 +961,7 @@ def get_runs( offset: Use size/from pagination instead of search_after Returns: - JSON object with "runs" list, "size", "next", and "total" fields. + JSON object with "results" list and "housekeeping" fields """ # We need to remove runs which don't match against 'tag' or 'param' @@ -1145,9 +1141,9 @@ def get_params( """Return the set of parameters for a run or iteration Parameters are technically associated with an iteration, but can be - aggregated for a run. (Note that, technically, values might vary across - iterations, and only one will be returned. This is OK if a run has a - single iteration, or if you know they're consistent.) + aggregated for a run. This will return a set of parameters for each + iteration; plus, if a "run" was specified, a filtered list of param + values that are common across all iterations. Args: run: run ID @@ -1155,7 +1151,7 @@ def get_params( kwargs: additional OpenSearch keywords Returns: - JSON dict of param key: value + JSON dict of param values by iteration (plus "common" if by run ID) """ if not run and not iteration: raise HTTPException( @@ -1293,6 +1289,7 @@ def get_periods( for h in self._hits(periods): period = self._format_period(period=h["period"]) period["iteration"] = h["iteration"]["num"] + period["sample"] = h["sample"]["num"] period["primary_metric"] = h["iteration"]["primary-metric"] period["status"] = h["iteration"]["status"] body.append(period) @@ -1502,19 +1499,22 @@ def get_metrics_data( [ { - "end": "2024-09-12 18:27:15+00:00", - "value": 0.0, - "duration": 15.0 + "begin": "2024-08-22 20:03:23.028000+00:00", + "end": "2024-08-22 20:03:37.127000+00:00", + "duration": 14.1, + "value": 9.35271216694379 }, { - "end": "2024-09-12 18:27:30+00:00", - "value": 0.0007, - "duration": 15.0 + "begin": "2024-08-22 20:03:37.128000+00:00", + "end": "2024-08-22 20:03:51.149000+00:00", + "duration": 14.022, + "value": 9.405932330557683 }, { - "end": "2024-09-12 18:27:45+00:00", - "value": 0.0033, - "duration": 15.0 + "begin": "2024-08-22 20:03:51.150000+00:00", + "end": "2024-08-22 20:04:05.071000+00:00", + "duration": 13.922, + "value": 9.478773265522682 } ] """ @@ -1610,6 +1610,89 @@ def get_metrics_summary( print(f"Processing took {duration} seconds") return data["aggregations"]["score"] + def _graph_title( + self, + run_id: str, + run_id_list: list[str], + graph: Graph, + params_by_run: dict[str, Any], + periods_by_run: dict[str, Any], + ) -> str: + """Compute a default title for a graph + + Use the period, breakout name selections, run list, and iteration + parameters to construct a meaningful name for a graph. + + For example, "ilab::sdg-samples-sec (batch-size=4) {run 1}", or + "mpstat::Busy-CPU [cpu=4]" + + Args: + run_id: the Crucible run ID + run_id_list: ordered list of run IDs in our list of graphs + graph: the current Graph object + params_by_run: initially empty dict used to cache parameters + periods_by_run: initially empty dict used to cache periods + + Returns: + A string title + """ + names = graph.names + metric = graph.metric + if run_id not in params_by_run: + # Gather iteration parameters outside the loop for help in + # generating useful labels. + all_params = self.search("param", filters=[{"term": {"run.id": run_id}}]) + collector = defaultdict(defaultdict) + for h in self._hits(all_params): + collector[h["iteration"]["id"]][h["param"]["arg"]] = h["param"]["val"] + params_by_run[run_id] = collector + else: + collector = params_by_run[run_id] + + if run_id not in periods_by_run: + periods = self.search("period", filters=[{"term": {"run.id": run_id}}]) + iteration_periods = defaultdict(set) + for p in self._hits(periods): + iteration_periods[p["iteration"]["id"]].add(p["period"]["id"]) + periods_by_run[run_id] = iteration_periods + else: + iteration_periods = periods_by_run[run_id] + + # We can easily end up with multiple graphs across distinct + # periods or iterations, so we want to be able to provide some + # labeling to the graphs. We do this by looking for unique + # iteration parameters values, since the iteration number and + # period name aren't useful by themselves. + name_suffix = "" + if graph.periods: + iteration = None + for i, pset in iteration_periods.items(): + if set(graph.periods) <= pset: + iteration = i + break + + # If the period(s) we're graphing resolve to a single + # iteration in a run with multiple iterations, then we can + # try to find a unique title suffix based on distinct param + # values for that iteration. + if iteration and len(collector) > 1: + unique = collector[iteration].copy() + for i, params in collector.items(): + if i != iteration: + for p in list(unique.keys()): + if p in params and unique[p] == params[p]: + del unique[p] + if unique: + name_suffix = ( + " (" + ",".join([f"{p}={v}" for p, v in unique.items()]) + ")" + ) + + if len(run_id_list) > 1: + name_suffix += f" {{run {run_id_list.index(run_id) + 1}}}" + + options = (" [" + ",".join(names) + "]") if names else "" + return metric + options + name_suffix + def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: """Return metrics data for a run @@ -1660,7 +1743,6 @@ def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: cindex = 0 params_by_run = {} periods_by_run = {} - suffix_by_run = {} # Construct a de-duped ordered list of run IDs, starting with the # default. @@ -1671,78 +1753,26 @@ def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: if g.run and g.run not in run_id_list: run_id_list.append(g.run) + if len(run_id_list) < len(graphdata.graphs) and not default_run_id: + raise HTTPException( + status.HTTP_400_BAD_REQUEST, "each graph request must have a run ID" + ) + for g in graphdata.graphs: run_id = g.run if g.run else default_run_id names = g.names metric: str = g.metric + # The caller can provide a title for each graph; but, if not, we + # journey down dark overgrown pathways to fabricate a default with + # reasonable context, including unique iteration parameters, + # breakdown selections, and which run provided the data. if g.title: title = g.title else: - if run_id not in params_by_run: - # Gather iteration parameters outside the loop for help in - # generating useful lables. - all_params = self.search( - "param", filters=[{"term": {"run.id": default_run_id}}] - ) - collector = defaultdict(defaultdict) - for h in self._hits(all_params): - collector[h["iteration"]["id"]][h["param"]["arg"]] = h["param"][ - "val" - ] - params_by_run[run_id] = collector - else: - collector = params_by_run[run_id] - - if run_id not in periods_by_run: - periods = self.search( - "period", filters=[{"term": {"run.id": default_run_id}}] - ) - iteration_periods = defaultdict(set) - for p in self._hits(periods): - if run_id not in suffix_by_run: - suffix_by_run[run_id] = p["run"]["begin"] - iteration_periods[p["iteration"]["id"]].add(p["period"]["id"]) - periods_by_run[run_id] = iteration_periods - else: - iteration_periods = periods_by_run[run_id] - - # We can easily end up with multiple graphs across distinct - # periods or iterations, so we want to be able to provide some - # labeling to the graphs. We do this by looking for unique - # iteration parameters values, since the iteration number and - # period name aren't useful by themselves. - name_suffix = "" - if g.periods: - iteration = None - for i, pset in iteration_periods.items(): - if set(g.periods) <= pset: - iteration = i - break - - # If the period(s) we're graphing resolve to a single - # iteration in a run with multiple iterations, then we can - # try to find a unique title suffix based on distinct param - # values for that iteration. - if iteration and len(collector) > 1: - unique = collector[iteration].copy() - for i, params in collector.items(): - if i != iteration: - for p in list(unique.keys()): - if p in params and unique[p] == params[p]: - del unique[p] - if unique: - name_suffix = ( - " (" - + ",".join([f"{p}={v}" for p, v in unique.items()]) - + ")" - ) - - if len(run_id_list) > 1: - name_suffix += f" {{run {run_id_list.index(run_id) + 1}}}" - - options = (" [" + ",".join(names) + "]") if names else "" - title = metric + options + name_suffix + title = self._graph_title( + run_id, run_id_list, g, params_by_run, periods_by_run + ) ids = self._get_metric_ids( run_id, diff --git a/local-compose.sh b/local-compose.sh index 98b4e01f..8c5a4b60 100755 --- a/local-compose.sh +++ b/local-compose.sh @@ -1,21 +1,32 @@ #!/bin/sh # -# Simple script to build the frontend and backend, and deploy to test out the changes -# Need: -# - Users will need to update the backend/ocpperf.toml file to meet their needs. +# Simple script to build and deploy the frontend and backend containers. # +# Users must to update the backend/ocpperf.toml file to meet their needs. +# +# Please don't edit this file for debugging: it's too easy to accidentally +# commit undesirable changes. Instead, I've added some convenient environment +# variables to support common changes (customizing the container ports, and +# disabling either the front end or back end container): +# +# CPT_BACKEND_PORT -- set the port for the backend (default 8000) +# CPT_FRONTEND_PORT -- set the port for the UI (default 3000) +# SKIP_FRONTEND -- SKIP_FRONTEND=1 to skip building and running frontend +# SKIP_BACKEND -- SKIP_BACKEND=1 to skip building and running backend # CPT_BACKEND_PORT=${CPT_BACKEND_PORT:-8000} CPT_FRONTEND_PORT=${CPT_FRONTEND_PORT:-3000} CPT_CONFIG=${CPT_CONFIG:-"$PWD/backend/ocpperf.toml"} +SKIP_FRONTEND=${SKIP_FRONTEND:-0} +SKIP_BACKEND=${SKIP_BACKEND:-0} podman rm -f front back -podman build -f backend/backend.containerfile --tag backend -#podman build -f frontend/frontend.containerfile --tag frontend - -# NOTE: add --network=host to test against a local containerized Horreum -podman run -d --name=back -p ${CPT_BACKEND_PORT}:8000 --network=host -v "${CPT_CONFIG}:/backend/ocpperf.toml:Z" localhost/backend - -#podman run -d --name=front --net=host -p ${CPT_FRONTEND_PORT}:3000 localhost/frontend - +if [ "$SKIP_BACKEND" != 1 ] ;then + podman build -f backend/backend.containerfile --tag backend + podman run -d --name=back -p ${CPT_BACKEND_PORT}:8000 --network=host -v "${CPT_CONFIG}:/backend/ocpperf.toml:Z" localhost/backend +fi +if [ "$SKIP_FRONTEND" != 1 ] ;then + podman build -f frontend/frontend.containerfile --tag frontend + podman run -d --name=front --network=host -p ${CPT_FRONTEND_PORT}:3000 localhost/frontend +fi From ad48906c7af0b04eb035c080491ec52fc31269e8 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Thu, 3 Oct 2024 15:02:58 -0400 Subject: [PATCH 07/29] Move periods query to action/reducer This cleans up my direct API call to get the run's periods for graphing, to use a separate action and a reducer. I also experimented with trying to improve error diagnosis by looking at some of the error responses to "toast" instead of just saying something went wrong. --- backend/app/services/crucible_svc.py | 2 +- frontend/src/actions/ilabActions.js | 38 ++++++++++++++----- frontend/src/actions/toastActions.js | 4 +- frontend/src/actions/types.js | 1 + .../src/components/templates/ILab/index.jsx | 3 +- frontend/src/reducers/ilabReducer.js | 3 ++ 6 files changed, 38 insertions(+), 13 deletions(-) diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index a756d0e4..1edb76f7 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -716,7 +716,7 @@ def _get_metric_ids( response["names"] = {n: sorted(v) for n, v in names.items() if v and len(v) > 1} response["periods"] = list(periods) raise HTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=[response] + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=response ) def _build_timestamp_range_filters( diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index e80ac651..45275bbc 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -89,9 +89,27 @@ export const fetchMetricsInfo = (uid) => async (dispatch) => { dispatch({ type: TYPES.COMPLETED }); }; +export const fetchPeriods = (uid) => async (dispatch) => { + try { + dispatch({ type: TYPES.LOADING }); + const response = await API.get(`/api/v1/ilab/runs/${uid}/periods`); + if (response.status === 200) { + dispatch({ + type: TYPES.SET_ILAB_PERIODS, + payload: { uid, periods: response.data }, + }); + } + } catch (error) { + console.error(error); + dispatch(showFailureToast(error?.response?.data?.detail)); + } + dispatch({ type: TYPES.COMPLETED }); +}; + export const fetchGraphData = (uid, metric, primary_metric) => async (dispatch, getState) => { try { + const periods = getState().ilab.periods.find((i) => i.uid == uid); const graphData = cloneDeep(getState().ilab.graphData); const filterData = graphData.filter((i) => i.uid !== uid); dispatch({ @@ -100,9 +118,8 @@ export const fetchGraphData = }); const copyData = cloneDeep(filterData); dispatch({ type: TYPES.GRAPH_LOADING }); - const periods = await API.get(`/api/v1/ilab/runs/${uid}/periods`); let graphs = []; - periods.data.forEach((p) => { + periods?.periods?.forEach((p) => { graphs.push({ metric: p.primary_metric, periods: [p.id] }); graphs.push({ metric, @@ -127,8 +144,16 @@ export const fetchGraphData = }); } } catch (error) { - console.error(error); - dispatch(showToast("danger", "Graph error", error.data)); + var detail = error?.response?.data?.detail; + var str; + if (typeof detail == "string") { + str = detail; + } else if (typeof detail == "object" && typeof detail?.message == "string") { + str = detail.message; + } else { + str = JSON.stringify(detail); + } + dispatch(showFailureToast(str)); } dispatch({ type: TYPES.GRAPH_COMPLETED }); }; @@ -161,11 +186,6 @@ export const checkIlabJobs = (newPage) => (dispatch, getState) => { export const setSelectedMetrics = (id, metrics) => (dispatch, getState) => { const metrics_selected = cloneDeep(getState().ilab.metrics_selected); - // if (id in metrics_selected) { - // metrics_selected[id] = metrics; - // } else { - // metrics_selected[id] = metrics; - // } metrics_selected[id] = metrics; dispatch({ type: TYPES.SET_ILAB_SELECTED_METRICS, diff --git a/frontend/src/actions/toastActions.js b/frontend/src/actions/toastActions.js index 574487f1..a0fddabc 100644 --- a/frontend/src/actions/toastActions.js +++ b/frontend/src/actions/toastActions.js @@ -2,11 +2,11 @@ import * as TYPES from "./types"; import { uid } from "@/utils/helper"; -export const showFailureToast = () => async (dispatch) => { +export const showFailureToast = (message = null) => async (dispatch) => { const toast = { variant: "danger", title: "Something went wrong", - message: "Please try again later", + message: message ? message : "Please try again later", }; dispatch(showToast(toast.variant, toast.title, toast.message)); }; diff --git a/frontend/src/actions/types.js b/frontend/src/actions/types.js index e4739422..b4137839 100644 --- a/frontend/src/actions/types.js +++ b/frontend/src/actions/types.js @@ -87,3 +87,4 @@ export const SET_ILAB_PAGE = "SET_ILAB_PAGE"; export const SET_ILAB_PAGE_OPTIONS = "SET_ILAB_PAGE_OPTIONS"; export const SET_ILAB_METRICS = "SET_ILAB_METRICS"; export const SET_ILAB_SELECTED_METRICS = "SET_ILAB_SELECTED_METRICS"; +export const SET_ILAB_PERIODS = "SET_ILAB_PERIODS"; diff --git a/frontend/src/components/templates/ILab/index.jsx b/frontend/src/components/templates/ILab/index.jsx index 7a836563..8085c5f5 100644 --- a/frontend/src/components/templates/ILab/index.jsx +++ b/frontend/src/components/templates/ILab/index.jsx @@ -17,7 +17,7 @@ import { Thead, Tr, } from "@patternfly/react-table"; -import { fetchILabJobs, fetchMetricsInfo } from "@/actions/ilabActions"; +import { fetchILabJobs, fetchMetricsInfo, fetchPeriods } from "@/actions/ilabActions"; import { formatDateTime, uid } from "@/utils/helper"; import { useDispatch, useSelector } from "react-redux"; import { useEffect, useState } from "react"; @@ -58,6 +58,7 @@ const ILab = () => { : otherExpandedRunNames; }); if (isExpanding) { + dispatch(fetchPeriods(run.id)); dispatch(fetchMetricsInfo(run.id)); // dispatch(fetchGraphData(run.id, run?.primary_metrics[0])); } diff --git a/frontend/src/reducers/ilabReducer.js b/frontend/src/reducers/ilabReducer.js index a4e5ec50..1ceaea40 100644 --- a/frontend/src/reducers/ilabReducer.js +++ b/frontend/src/reducers/ilabReducer.js @@ -11,6 +11,7 @@ const initialState = { size: 10, offset: 1, metrics: [], + periods: [], metrics_selected: {}, }; const ILabReducer = (state = initialState, action = {}) => { @@ -40,6 +41,8 @@ const ILabReducer = (state = initialState, action = {}) => { return { ...state, page: payload.page, perPage: payload.perPage }; case TYPES.SET_ILAB_METRICS: return { ...state, metrics: [...state.metrics, payload] }; + case TYPES.SET_ILAB_PERIODS: + return { ...state, periods: [...state.periods, payload] }; case TYPES.SET_ILAB_SELECTED_METRICS: return { ...state, From 5671d7765664417bc188c8c0bcd181259fcfe7a7 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Fri, 4 Oct 2024 08:44:31 -0400 Subject: [PATCH 08/29] Cleanup OpenSearch connections Add a Crucible `close` method, and use a FastAPI yield dependency to ensure every API connection is closed cleanly. --- backend/app/api/v1/endpoints/ilab/ilab.py | 80 ++++++++++++++--------- backend/app/services/crucible_svc.py | 6 ++ 2 files changed, 56 insertions(+), 30 deletions(-) diff --git a/backend/app/api/v1/endpoints/ilab/ilab.py b/backend/app/api/v1/endpoints/ilab/ilab.py index e49b8c6c..d83290a8 100644 --- a/backend/app/api/v1/endpoints/ilab/ilab.py +++ b/backend/app/api/v1/endpoints/ilab/ilab.py @@ -8,7 +8,7 @@ from datetime import datetime, timedelta, timezone from typing import Annotated, Any, Optional -from fastapi import APIRouter, Query +from fastapi import APIRouter, Depends, HTTPException, Query, status from app.services.crucible_svc import CrucibleService, Graph, GraphList @@ -26,6 +26,22 @@ def example_error(message: str) -> dict[str, Any]: return example_response({"message": message}) +def crucible_svc(): + crucible = None + try: + crucible = CrucibleService(CONFIGPATH) + yield crucible + except Exception as e: + print(f"Error opening {CONFIGPATH}: {str(e)!r}") + raise HTTPException( + status.HTTP_502_BAD_GATEWAY, + f"Crucible service is not available: {str(e)!r}", + ) + finally: + if crucible: + crucible.close() + + @router.get( "/api/v1/ilab/runs/filters", summary="Returns possible filters", @@ -66,8 +82,7 @@ def example_error(message: str) -> dict[str, Any]: ) }, ) -async def run_filters(): - crucible = CrucibleService(CONFIGPATH) +async def run_filters(crucible: Annotated[CrucibleService, Depends(crucible_svc)]): return crucible.get_run_filters() @@ -125,6 +140,7 @@ async def run_filters(): }, ) async def runs( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], start_date: Annotated[ Optional[str], Query(description="Start time for search", examples=["2020-11-10"]), @@ -151,7 +167,6 @@ async def runs( Query(description="Page offset to start", examples=[10]), ] = 0, ): - crucible = CrucibleService(CONFIGPATH) if start_date is None and end_date is None: now = datetime.now(timezone.utc) start = now - timedelta(days=30) @@ -174,8 +189,7 @@ async def runs( 400: example_error("Parameter error"), }, ) -async def tags(run: str): - crucible = CrucibleService(CONFIGPATH) +async def tags(crucible: Annotated[CrucibleService, Depends(crucible_svc)], run: str): return crucible.get_tags(run) @@ -209,8 +223,7 @@ async def tags(run: str): 400: example_error("Parameter error"), }, ) -async def params(run: str): - crucible = CrucibleService(CONFIGPATH) +async def params(crucible: Annotated[CrucibleService, Depends(crucible_svc)], run: str): return crucible.get_params(run) @@ -242,8 +255,9 @@ async def params(run: str): 400: example_error("Parameter error"), }, ) -async def iterations(run: str): - crucible = CrucibleService(CONFIGPATH) +async def iterations( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], run: str +): return crucible.get_iterations(run) @@ -275,8 +289,9 @@ async def iterations(run: str): 400: example_error("Parameter error"), }, ) -async def run_samples(run: str): - crucible = CrucibleService(CONFIGPATH) +async def run_samples( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], run: str +): return crucible.get_samples(run) @@ -312,8 +327,9 @@ async def run_samples(run: str): 400: example_error("Parameter error"), }, ) -async def run_periods(run: str): - crucible = CrucibleService(CONFIGPATH) +async def run_periods( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], run: str +): return crucible.get_periods(run) @@ -345,8 +361,9 @@ async def run_periods(run: str): 400: example_error("Parameter error"), }, ) -async def iteration_samples(iteration: str): - crucible = CrucibleService(CONFIGPATH) +async def iteration_samples( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], iteration: str +): return crucible.get_samples(iteration=iteration) @@ -393,8 +410,9 @@ async def iteration_samples(iteration: str): 400: example_error("Parameter error"), }, ) -async def timeline(run: str): - crucible = CrucibleService(CONFIGPATH) +async def timeline( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], run: str +): return crucible.get_timeline(run) @@ -432,8 +450,9 @@ async def timeline(run: str): 400: example_error("Parameter error"), }, ) -async def metrics(run: str): - crucible = CrucibleService(CONFIGPATH) +async def metrics( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], run: str +): return crucible.get_metrics_list(run) @@ -455,6 +474,7 @@ async def metrics(run: str): }, ) async def metric_breakouts( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], run: str, metric: str, name: Annotated[ @@ -472,7 +492,6 @@ async def metric_breakouts( ), ] = None, ): - crucible = CrucibleService(CONFIGPATH) return crucible.get_metric_breakouts(run, metric, names=name, periods=period) @@ -512,6 +531,7 @@ async def metric_breakouts( }, ) async def metric_data( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], run: str, metric: str, name: Annotated[ @@ -532,7 +552,6 @@ async def metric_data( bool, Query(description="Allow aggregation of metrics") ] = False, ): - crucible = CrucibleService(CONFIGPATH) return crucible.get_metrics_data( run, metric, names=name, periods=period, aggregate=aggregate ) @@ -567,6 +586,7 @@ async def metric_data( }, ) async def metric_summary( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], run: str, metric: str, name: Annotated[ @@ -584,7 +604,6 @@ async def metric_summary( ), ] = None, ): - crucible = CrucibleService(CONFIGPATH) return crucible.get_metrics_summary(run, metric, names=name, periods=period) @@ -654,8 +673,9 @@ async def metric_summary( ), }, ) -async def metric_graph_body(graphs: GraphList): - crucible = CrucibleService(CONFIGPATH) +async def metric_graph_body( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], graphs: GraphList +): return crucible.get_metrics_graph(graphs) @@ -712,6 +732,7 @@ async def metric_graph_body(graphs: GraphList): }, ) async def metric_graph_param( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], run: str, metric: str, aggregate: Annotated[ @@ -733,7 +754,6 @@ async def metric_graph_param( ] = None, title: Annotated[Optional[str], Query(description="Title for graph")] = None, ): - crucible = CrucibleService(CONFIGPATH) return crucible.get_metrics_graph( GraphList( run=run, @@ -776,8 +796,7 @@ async def metric_graph_param( ), }, ) -async def info(): - crucible = CrucibleService(CONFIGPATH) +async def info(crucible: Annotated[CrucibleService, Depends(crucible_svc)]): return crucible.info @@ -807,6 +826,7 @@ async def info(): 400: example_error("Index name 'foo' doesn't exist"), }, ) -async def fields(index: str): - crucible = CrucibleService(CONFIGPATH) +async def fields( + crucible: Annotated[CrucibleService, Depends(crucible_svc)], index: str +): return crucible.get_fields(index=index) diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index 1edb76f7..8abe157a 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -608,6 +608,12 @@ def _search( ) return value + def close(self): + """Close the OpenSearch connection""" + if self.elastic: + self.elastic.close() + self.elastic = None + def search( self, index: str, From a49fc651fdf9775e5ca06827448a2b2f9e276d42 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Mon, 7 Oct 2024 09:49:13 -0400 Subject: [PATCH 09/29] Try to remove a couple of incidental changes --- backend/app/main.py | 53 +++++++++++++++++---------------- backend/scripts/start-reload.sh | 3 +- 2 files changed, 28 insertions(+), 28 deletions(-) diff --git a/backend/app/main.py b/backend/app/main.py index c68d4c5b..727c9b55 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -1,5 +1,4 @@ import sys -import traceback import typing from fastapi import FastAPI, HTTPException, Request @@ -17,23 +16,24 @@ def render(self, content: typing.Any) -> bytes: return orjson.dumps(content) -origins = ["http://localhost:3000", "localhost:3000"] +origins = [ + "http://localhost:3000", + "localhost:3000" +] -app = FastAPI( - default_response_class=ORJSONResponse, - docs_url="/docs", - redoc_url=None, - title="CPT-Dashboard API Documentation", - version="0.0.1", - contact={ - "name": "OCP PerfScale Jedi", - "url": "https://redhat.enterprise.slack.com/archives/C05CDC19ZKJ", - }, - license_info={ - "name": "Apache 2.0", - "url": "https://www.apache.org/licenses/LICENSE-2.0", - }, -) +app = FastAPI(default_response_class=ORJSONResponse, + docs_url="/docs", + redoc_url=None, + title="CPT-Dashboard API Documentation", + version="0.0.1", + contact={ + "name": "OCP PerfScale Jedi", + "url": "https://redhat.enterprise.slack.com/archives/C05CDC19ZKJ", + }, + license_info={ + "name": "Apache 2.0", + "url": "https://www.apache.org/licenses/LICENSE-2.0", + }) @app.middleware("http") @@ -53,7 +53,10 @@ async def report_exceptions(request: Request, call_next): file=sys.stderr, ) tb = tb.tb_next - return JSONResponse(status_code=500, content={"message": f"Unhandled server error at {where}: {str(e)}"}) + return JSONResponse( + status_code=500, + content={"message": f"Unhandled server error at {where}: {str(e)}"}, + ) app.add_middleware( @@ -64,17 +67,15 @@ async def report_exceptions(request: Request, call_next): allow_headers=["*"], ) -routes_to_reroute = ["/"] - +routes_to_reroute = ['/'] -@app.middleware("http") +@app.middleware('http') async def some_middleware(request: Request, call_next): if request.url.path in routes_to_reroute: - request.scope["path"] = "/docs" - headers = dict(request.scope["headers"]) - headers[b"custom-header"] = b"my custom header" - request.scope["headers"] = [(k, v) for k, v in headers.items()] + request.scope['path'] = '/docs' + headers = dict(request.scope['headers']) + headers[b'custom-header'] = b'my custom header' + request.scope['headers'] = [(k, v) for k, v in headers.items()] return await call_next(request) - app.include_router(router) diff --git a/backend/scripts/start-reload.sh b/backend/scripts/start-reload.sh index 8f1e4eb5..869f2488 100755 --- a/backend/scripts/start-reload.sh +++ b/backend/scripts/start-reload.sh @@ -1,3 +1,2 @@ #!/usr/bin/bash -LOG=${CPT_BACKEND_LOG_LEVEL:-info} -uvicorn --reload --log-level="${LOG}" --host="0.0.0.0" --port=8000 --forwarded-allow-ips='*' --proxy-headers app.main:app +uvicorn --reload --host="0.0.0.0" --port=8000 --forwarded-allow-ips='*' --proxy-headers app.main:app From 1d5783dc70df87aaf6f825cac6f864d8c0dabebe Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Mon, 7 Oct 2024 15:34:33 -0400 Subject: [PATCH 10/29] Undoing a few more ancillary changes --- backend/app/services/crucible_readme.md | 11 +++++++++++ backend/scripts/start-reload.sh | 1 + frontend/src/actions/ilabActions.js | 16 ++++------------ frontend/src/actions/toastActions.js | 4 ++-- .../src/components/atoms/PlotGraph/index.jsx | 5 ++--- frontend/src/components/templates/ILab/index.jsx | 1 - 6 files changed, 20 insertions(+), 18 deletions(-) diff --git a/backend/app/services/crucible_readme.md b/backend/app/services/crucible_readme.md index 4b61e5eb..e58184ea 100644 --- a/backend/app/services/crucible_readme.md +++ b/backend/app/services/crucible_readme.md @@ -4,6 +4,17 @@ each with a specific document mapping. CDM index names include a "root" name Crucible timestamps are integers in "millisecond-from-the-epoch" format. +The Crucible CDM hierarchy is roughly: + +- RUN (an instrumented benchmark run) + - TAG (metadata) + - ITERATION (a benchmark interval) + - PARAM (execution parameters) + - SAMPLE + - PERIOD (time range where data is recorded) + - METRIC_DESC (description of a specific recorded metric) + - METRIC_DATA (a specific recorded data point) +
RUN
this contains the basic information about a performance run, including a generated UUID, begin and end timestamps, a benchmark name, a user name and diff --git a/backend/scripts/start-reload.sh b/backend/scripts/start-reload.sh index 869f2488..764e707c 100755 --- a/backend/scripts/start-reload.sh +++ b/backend/scripts/start-reload.sh @@ -1,2 +1,3 @@ #!/usr/bin/bash + uvicorn --reload --host="0.0.0.0" --port=8000 --forwarded-allow-ips='*' --proxy-headers app.main:app diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index 45275bbc..65d8ec59 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -100,8 +100,8 @@ export const fetchPeriods = (uid) => async (dispatch) => { }); } } catch (error) { - console.error(error); - dispatch(showFailureToast(error?.response?.data?.detail)); + console.error(`ERROR (${error?.response?.status}): ${JSON.stringify(error?.response?.data)}`); + dispatch(showFailureToast()); } dispatch({ type: TYPES.COMPLETED }); }; @@ -144,16 +144,8 @@ export const fetchGraphData = }); } } catch (error) { - var detail = error?.response?.data?.detail; - var str; - if (typeof detail == "string") { - str = detail; - } else if (typeof detail == "object" && typeof detail?.message == "string") { - str = detail.message; - } else { - str = JSON.stringify(detail); - } - dispatch(showFailureToast(str)); + console.error(`ERROR (${error?.response?.status}): ${JSON.stringify(error?.response?.data)}`); + dispatch(showFailureToast()); } dispatch({ type: TYPES.GRAPH_COMPLETED }); }; diff --git a/frontend/src/actions/toastActions.js b/frontend/src/actions/toastActions.js index a0fddabc..574487f1 100644 --- a/frontend/src/actions/toastActions.js +++ b/frontend/src/actions/toastActions.js @@ -2,11 +2,11 @@ import * as TYPES from "./types"; import { uid } from "@/utils/helper"; -export const showFailureToast = (message = null) => async (dispatch) => { +export const showFailureToast = () => async (dispatch) => { const toast = { variant: "danger", title: "Something went wrong", - message: message ? message : "Please try again later", + message: "Please try again later", }; dispatch(showToast(toast.variant, toast.title, toast.message)); }; diff --git a/frontend/src/components/atoms/PlotGraph/index.jsx b/frontend/src/components/atoms/PlotGraph/index.jsx index 2afb0f1c..182496f7 100644 --- a/frontend/src/components/atoms/PlotGraph/index.jsx +++ b/frontend/src/components/atoms/PlotGraph/index.jsx @@ -1,12 +1,12 @@ import Plotly from "react-plotly.js"; import PropTypes from "prop-types"; + const PlotGraph = (props) => { return ( ); }; @@ -14,5 +14,4 @@ export default PlotGraph; PlotGraph.propTypes = { data: PropTypes.arr, - layout: PropTypes.object, }; diff --git a/frontend/src/components/templates/ILab/index.jsx b/frontend/src/components/templates/ILab/index.jsx index 8085c5f5..0b4425fa 100644 --- a/frontend/src/components/templates/ILab/index.jsx +++ b/frontend/src/components/templates/ILab/index.jsx @@ -60,7 +60,6 @@ const ILab = () => { if (isExpanding) { dispatch(fetchPeriods(run.id)); dispatch(fetchMetricsInfo(run.id)); - // dispatch(fetchGraphData(run.id, run?.primary_metrics[0])); } }; From f20e45de71f2a52c9897f510606ff73b156c9190 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Wed, 9 Oct 2024 15:55:07 -0400 Subject: [PATCH 11/29] Review feedback --- backend/app/services/crucible_svc.py | 30 +++++++++++++++------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index 8abe157a..ef51dee1 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -319,8 +319,8 @@ def _hits( Iteratively yields the "_source" of each hit. As a convenience, can yield a sub-object of "_source" ... for example, specifying the - optional "fields" as ["metric_desc"] will yield the equivalent of - hit["_source"]["metric_desc"] + optional "fields" as ["metric_desc", "id"] will yield the equivalent of + hit["_source"]["metric_desc"]["id"] Args: payload: OpenSearch reponse payload @@ -560,7 +560,7 @@ def _build_metric_filters( """Helper for filtering metric descriptions We normally filter by run, metric "label", and optionally by breakout - names and periods. This encapsulates the filter contruction. + names and periods. This encapsulates the filter construction. Args: run: run ID @@ -571,12 +571,12 @@ def _build_metric_filters( Returns: A list of OpenSearch filter expressions """ - source, type = metric.split("::") + msource, mtype = metric.split("::") return ( [ {"term": {"run.id": run}}, - {"term": {"metric_desc.source": source}}, - {"term": {"metric_desc.type": type}}, + {"term": {"metric_desc.source": msource}}, + {"term": {"metric_desc.type": mtype}}, ] + cls._build_name_filters(names) + cls._build_period_filters(periods) @@ -703,13 +703,14 @@ def _get_metric_ids( if len(ids) < 2 or aggregate: return ids - # This probably means we're not filtering well enouch for a useful - # summary. Diagnose how to improve it. + # If we get here, the client asked for breakout data that doesn't + # resolve to a single metric stream, and didn't specify aggregation. + # Offer some help. names = defaultdict(set) periods = set() response = { - "message": f"More than one metric ({len(ids)}) probably means " - "you should add filters" + "message": f"More than one metric ({len(ids)}) means " + "you should add breakout filters or aggregate." } for m in self._hits(metrics): if "period" in m: @@ -744,8 +745,8 @@ def _build_timestamp_range_filters( if periods: ps = self._split_list(periods) matches = self.search("period", filters=[{"terms": {"period.id": ps}}]) - start = min([int(h["begin"]) for h in self._hits(matches, ["period"])]) - end = max([int(h["end"]) for h in self._hits(matches, ["period"])]) + start = min([int(h) for h in self._hits(matches, ["period", "begin"])]) + end = max([int(h) for h in self._hits(matches, ["period", "end"])]) return [ {"range": {"metric_data.begin": {"gte": str(start)}}}, {"range": {"metric_data.end": {"lte": str(end)}}}, @@ -817,7 +818,8 @@ def _get_run_ids( filtered = self.search( index, source="run.id", filters=filters, ignore_unavailable=True ) - return set([x["id"] for x in self._hits(filtered, ["run"])]) + print(f"HITS: {filtered['hits']['hits']}") + return set([x for x in self._hits(filtered, ["run", "id"])]) def get_run_filters(self) -> dict[str, dict[str, list[str]]]: """Return possible tag and filter terms @@ -1062,7 +1064,6 @@ def get_runs( for h in self._hits(hits): run = h["run"] rid = run["id"] - runs[rid] = run # Filter the runs by our tag and param queries if param_filters and rid not in paramids: @@ -1073,6 +1074,7 @@ def get_runs( # Collect unique runs: the status is "fail" if any iteration for # that run ID failed. + runs[rid] = run run["tags"] = tags.get(rid, {}) run["iterations"] = [] run["primary_metrics"] = set() From a959fe60381795451fc5c6a8e5cc47fa37d7024b Mon Sep 17 00:00:00 2001 From: MVarshini Date: Thu, 10 Oct 2024 12:33:51 +0530 Subject: [PATCH 12/29] Pagination and Date filter issue --- frontend/src/actions/filterActions.js | 3 +- frontend/src/actions/ilabActions.js | 119 +++++++++++------- frontend/src/actions/paginationActions.js | 8 +- frontend/src/actions/types.js | 1 + .../components/organisms/Pagination/index.jsx | 11 +- .../src/components/templates/ILab/index.jsx | 37 +++++- frontend/src/reducers/ilabReducer.js | 7 +- 7 files changed, 125 insertions(+), 61 deletions(-) diff --git a/frontend/src/actions/filterActions.js b/frontend/src/actions/filterActions.js index 0307bcdf..f8fa5691 100644 --- a/frontend/src/actions/filterActions.js +++ b/frontend/src/actions/filterActions.js @@ -1,3 +1,4 @@ +import { fetchILabJobs, setIlabDateFilter } from "./ilabActions"; import { removeCPTAppliedFilters, setCPTAppliedFilters, @@ -27,7 +28,6 @@ import { setTelcoOtherSummaryFilter, } from "./telcoActions"; -import { setIlabDateFilter } from "./ilabActions"; import store from "@/store/store"; const { dispatch } = store; @@ -79,6 +79,7 @@ export const setDateFilter = (date, key, navigation, currType) => { dispatch(setTelcoDateFilter(date, key, navigation)); } else if (currType === "ilab") { dispatch(setIlabDateFilter(date, key, navigation)); + dispatch(fetchILabJobs()); } }; diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index 65d8ec59..1152846b 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -1,61 +1,74 @@ import * as API_ROUTES from "@/utils/apiConstants"; import * as TYPES from "./types.js"; -import { showFailureToast, showToast } from "@/actions/toastActions"; - import API from "@/utils/axiosInstance"; import { appendQueryString } from "@/utils/helper"; import { cloneDeep } from "lodash"; +import { showFailureToast } from "@/actions/toastActions"; -export const fetchILabJobs = () => async (dispatch, getState) => { - try { - dispatch({ type: TYPES.LOADING }); - const { start_date, end_date, size, offset } = getState().ilab; - const response = await API.get(API_ROUTES.ILABS_JOBS_API_V1, { - params: { - ...(start_date && { start_date }), - ...(end_date && { end_date }), - ...(size && { size }), - ...(offset && { offset }), - }, - }); - if (response.status === 200 && response?.data?.results.length > 0) { - const startDate = response.data.startDate, - endDate = response.data.endDate; - dispatch({ - type: TYPES.SET_ILAB_JOBS_DATA, - payload: response.data.results, - }); - - dispatch({ - type: TYPES.SET_ILAB_DATE_FILTER, - payload: { - start_date: startDate, - end_date: endDate, +export const fetchILabJobs = + (shouldStartFresh = false) => + async (dispatch, getState) => { + try { + dispatch({ type: TYPES.LOADING }); + const { start_date, end_date, size, offset, results } = getState().ilab; + const response = await API.get(API_ROUTES.ILABS_JOBS_API_V1, { + params: { + ...(start_date && { start_date }), + ...(end_date && { end_date }), + ...(size && { size }), + ...(offset && { offset }), }, }); + if (response.status === 200 && response?.data?.results.length > 0) { + const startDate = response.data.startDate, + endDate = response.data.endDate; + dispatch({ + type: TYPES.SET_ILAB_JOBS_DATA, + payload: shouldStartFresh + ? response.data.results + : [...results, ...response.data.results], + }); - dispatch({ - type: TYPES.SET_ILAB_TOTAL_ITEMS, - payload: response.data.total, - }); - dispatch({ - type: TYPES.SET_ILAB_OFFSET, - payload: response.data.next_offset, - }); + dispatch({ + type: TYPES.SET_ILAB_DATE_FILTER, + payload: { + start_date: startDate, + end_date: endDate, + }, + }); + + dispatch({ + type: TYPES.SET_ILAB_TOTAL_ITEMS, + payload: response.data.total, + }); + dispatch({ + type: TYPES.SET_ILAB_OFFSET, + payload: response.data.next_offset, + }); + + dispatch(tableReCalcValues()); + } + } catch (error) { + dispatch(showFailureToast()); } - } catch (error) { - dispatch(showFailureToast()); - } - dispatch({ type: TYPES.COMPLETED }); -}; + dispatch({ type: TYPES.COMPLETED }); + }; +export const sliceIlabTableRows = + (startIdx, endIdx) => (dispatch, getState) => { + const results = [...getState().ilab.results]; + dispatch({ + type: TYPES.SET_ILAB_INIT_JOBS, + payload: results.slice(startIdx, endIdx), + }); + }; export const setIlabDateFilter = (start_date, end_date, navigate) => (dispatch, getState) => { - const appliedFilters = getState().cpt.appliedFilters; + const appliedFilters = getState().ilab.appliedFilters; dispatch({ - type: TYPES.SET_CPT_DATE_FILTER, + type: TYPES.SET_ILAB_DATE_FILTER, payload: { start_date, end_date, @@ -63,8 +76,6 @@ export const setIlabDateFilter = }); appendQueryString({ ...appliedFilters, start_date, end_date }, navigate); - - dispatch(fetchILabJobs()); }; export const fetchMetricsInfo = (uid) => async (dispatch) => { @@ -100,7 +111,11 @@ export const fetchPeriods = (uid) => async (dispatch) => { }); } } catch (error) { - console.error(`ERROR (${error?.response?.status}): ${JSON.stringify(error?.response?.data)}`); + console.error( + `ERROR (${error?.response?.status}): ${JSON.stringify( + error?.response?.data + )}` + ); dispatch(showFailureToast()); } dispatch({ type: TYPES.COMPLETED }); @@ -144,7 +159,11 @@ export const fetchGraphData = }); } } catch (error) { - console.error(`ERROR (${error?.response?.status}): ${JSON.stringify(error?.response?.data)}`); + console.error( + `ERROR (${error?.response?.status}): ${JSON.stringify( + error?.response?.data + )}` + ); dispatch(showFailureToast()); } dispatch({ type: TYPES.GRAPH_COMPLETED }); @@ -184,3 +203,11 @@ export const setSelectedMetrics = (id, metrics) => (dispatch, getState) => { payload: metrics_selected, }); }; + +export const tableReCalcValues = () => (dispatch, getState) => { + const { page, perPage } = getState().ilab; + + const startIdx = page !== 1 ? (page - 1) * perPage : 0; + const endIdx = page !== 1 ? page * perPage - 1 : perPage; + dispatch(sliceIlabTableRows(startIdx, endIdx)); +}; diff --git a/frontend/src/actions/paginationActions.js b/frontend/src/actions/paginationActions.js index 8cbc9641..1717a82a 100644 --- a/frontend/src/actions/paginationActions.js +++ b/frontend/src/actions/paginationActions.js @@ -3,7 +3,11 @@ import { setCPTPageOptions, sliceCPTTableRows, } from "./homeActions"; -import { setIlabPage, setIlabPageOptions } from "./ilabActions"; +import { + setIlabPage, + setIlabPageOptions, + sliceIlabTableRows, +} from "./ilabActions"; import { setOCPPage, setOCPPageOptions, sliceOCPTableRows } from "./ocpActions"; import { setQuayPage, setQuayPageOptions } from "./quayActions"; import { setTelcoPage, setTelcoPageOptions } from "./telcoActions"; @@ -43,6 +47,8 @@ export const sliceTableRows = (startIdx, endIdx, currType) => (dispatch) => { dispatch(sliceCPTTableRows(startIdx, endIdx)); } else if (currType === "ocp") { dispatch(sliceOCPTableRows(startIdx, endIdx)); + } else if (currType === "ilab") { + dispatch(sliceIlabTableRows(startIdx, endIdx)); } }; diff --git a/frontend/src/actions/types.js b/frontend/src/actions/types.js index b4137839..58d7506f 100644 --- a/frontend/src/actions/types.js +++ b/frontend/src/actions/types.js @@ -88,3 +88,4 @@ export const SET_ILAB_PAGE_OPTIONS = "SET_ILAB_PAGE_OPTIONS"; export const SET_ILAB_METRICS = "SET_ILAB_METRICS"; export const SET_ILAB_SELECTED_METRICS = "SET_ILAB_SELECTED_METRICS"; export const SET_ILAB_PERIODS = "SET_ILAB_PERIODS"; +export const SET_ILAB_INIT_JOBS = "SET_ILAB_INIT_JOBS"; diff --git a/frontend/src/components/organisms/Pagination/index.jsx b/frontend/src/components/organisms/Pagination/index.jsx index 3eb2c706..deb8d8fe 100644 --- a/frontend/src/components/organisms/Pagination/index.jsx +++ b/frontend/src/components/organisms/Pagination/index.jsx @@ -23,18 +23,16 @@ const RenderPagination = (props) => { const onSetPage = useCallback( (_evt, newPage, _perPage, startIdx, endIdx) => { dispatch(setPage(newPage, props.type)); - if (props.type !== "ilab") { - dispatch(sliceTableRows(startIdx, endIdx, props.type)); - } + + dispatch(sliceTableRows(startIdx, endIdx, props.type)); }, [dispatch, props.type] ); const onPerPageSelect = useCallback( (_evt, newPerPage, newPage, startIdx, endIdx) => { dispatch(setPageOptions(newPage, newPerPage, props.type)); - if (props.type !== "ilab") { - dispatch(sliceTableRows(startIdx, endIdx, props.type)); - } + + dispatch(sliceTableRows(startIdx, endIdx, props.type)); }, [dispatch, props.type] ); @@ -55,6 +53,7 @@ const RenderPagination = (props) => { perPageOptions={perPageOptions} onSetPage={onSetPage} onPerPageSelect={onPerPageSelect} + onPageInput={checkAndFetch} /> ); }; diff --git a/frontend/src/components/templates/ILab/index.jsx b/frontend/src/components/templates/ILab/index.jsx index 0b4425fa..d1eb5d62 100644 --- a/frontend/src/components/templates/ILab/index.jsx +++ b/frontend/src/components/templates/ILab/index.jsx @@ -17,10 +17,16 @@ import { Thead, Tr, } from "@patternfly/react-table"; -import { fetchILabJobs, fetchMetricsInfo, fetchPeriods } from "@/actions/ilabActions"; +import { + fetchILabJobs, + fetchMetricsInfo, + fetchPeriods, + setIlabDateFilter, +} from "@/actions/ilabActions"; import { formatDateTime, uid } from "@/utils/helper"; import { useDispatch, useSelector } from "react-redux"; import { useEffect, useState } from "react"; +import { useNavigate, useSearchParams } from "react-router-dom"; import ILabGraph from "./ILabGraph"; import MetaRow from "./MetaRow"; @@ -28,13 +34,13 @@ import MetricsSelect from "./MetricsDropdown"; import RenderPagination from "@/components/organisms/Pagination"; import StatusCell from "./StatusCell"; import TableFilter from "@/components/organisms/TableFilters"; -import { useNavigate } from "react-router-dom"; const ILab = () => { const dispatch = useDispatch(); const navigate = useNavigate(); + const [searchParams] = useSearchParams(); - const { results, start_date, end_date } = useSelector((state) => state.ilab); + const { start_date, end_date } = useSelector((state) => state.ilab); const [expandedResult, setExpandedResult] = useState([]); const [expanded, setAccExpanded] = useState(["bordered-toggle1"]); @@ -63,7 +69,27 @@ const ILab = () => { } }; - const { totalItems, page, perPage } = useSelector((state) => state.ilab); + const { totalItems, page, perPage, tableData } = useSelector( + (state) => state.ilab + ); + + useEffect(() => { + if (searchParams.size > 0) { + // date filter is set apart + const startDate = searchParams.get("start_date"); + const endDate = searchParams.get("end_date"); + + searchParams.delete("start_date"); + searchParams.delete("end_date"); + const params = Object.fromEntries(searchParams); + const obj = {}; + for (const key in params) { + obj[key] = params[key].split(","); + } + dispatch(setIlabDateFilter(startDate, endDate, navigate)); + } + }, []); + useEffect(() => { dispatch(fetchILabJobs()); }, [dispatch]); @@ -78,6 +104,7 @@ const ILab = () => { end_date: "End Date", status: "Status", }; + return ( <> { - {results.map((item, rowIndex) => ( + {tableData.map((item, rowIndex) => ( <> { const { type, payload } = action; @@ -20,7 +21,7 @@ const ILabReducer = (state = initialState, action = {}) => { case TYPES.SET_ILAB_JOBS_DATA: return { ...state, - results: [...state.results, ...payload], + results: payload, }; case TYPES.SET_ILAB_DATE_FILTER: return { @@ -50,6 +51,8 @@ const ILabReducer = (state = initialState, action = {}) => { }; case TYPES.SET_ILAB_GRAPH_DATA: return { ...state, graphData: payload }; + case TYPES.SET_ILAB_INIT_JOBS: + return { ...state, tableData: payload }; default: return state; } From 79151ea62c32b90fa8450ff24efdb3e22e18cdd0 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Thu, 10 Oct 2024 12:00:09 -0400 Subject: [PATCH 13/29] Rewrite param consolidation + other review feedback --- backend/app/services/crucible_readme.md | 46 +++++----- backend/app/services/crucible_svc.py | 115 ++++++++++++++++-------- 2 files changed, 100 insertions(+), 61 deletions(-) diff --git a/backend/app/services/crucible_readme.md b/backend/app/services/crucible_readme.md index e58184ea..0ac2e71f 100644 --- a/backend/app/services/crucible_readme.md +++ b/backend/app/services/crucible_readme.md @@ -15,6 +15,14 @@ The Crucible CDM hierarchy is roughly: - METRIC_DESC (description of a specific recorded metric) - METRIC_DATA (a specific recorded data point) +OpenSearch doesn't support the concept of a SQL "join", but many of the indices +contain documents that could be considered a static "join" with parent documents +for convenience. For example, each `iteration` document contains a copy of it's +parent `run` document, while the `period` document contains copies of its parent +`sample`, `iteration`, and `run` documents. This means, for example, that it's +possible to make a single query returning all `period` documents for specific +iteration number of a specific run. +
RUN
this contains the basic information about a performance run, including a generated UUID, begin and end timestamps, a benchmark name, a user name and @@ -24,31 +32,25 @@ The Crucible CDM hierarchy is roughly: arbitrary context with a run, for example software versions, hardware, or other metadata. This can be considered a SQL JOIN with the run document, adding a tag UUID, name, and value.
-
ITERATION
this contains basic information about a performance run iteration. - This is a JOIN with the RUN document, duplicating the run fields while - adding an iteration UUID, number, the primary (benchmark) metric associated +
ITERATION
this contains basic information about a performance run iteration, + including the iteration UUID, number, the primary (benchmark) metric associated with the iteration, plus the primary "period" of the iteration, and the iteration status.
-
PARAM
this contains information about a benchmark parameter value affecting - the behavior of an iteration. This is a JOIN with the run and iteration - data, adding a parameter ID, argument, and value. While parameters are - iteration-specific, parameters that don't vary between iterations are often - represented as run parameters.
-
SAMPLE
this contains basic information about a sample of an iteration. This is - effectively a JOIN against iteration and run, adding a sample UUID and - number, along with a "path" for sample data and a sample status.
+
PARAM
this defines a key/value pair specifying behavior of the benchmark + script for an iteration. Parameters are iteration-specific, but parameters that + don't vary between iterations are often represented as run parameters.
+
SAMPLE
this contains basic information about a sample of an iteration, + including a sample UUID and sample number, along with a "path" for sample data + and a sample status.
PERIOD
this contains basic information about a period during which data is - collected within a sample. This is a JOIN against sample, iteration, and - period, adding the period UUID, name, and begin and end timestamps. A set - of periods can be "linked" through a "prev_id" field.
-
METRIC_DESC
this contains descriptive data about a specific set of benchmark - metric data. This is another JOIN, containing the associated period, - sample, iteration, and run data while adding information specific to a - sequence of metric data values. These include the metric UUID, a class, - type, and source, and a set of "names" which define breakouts that narrow - down a specific source and type. For example source:mpstat, type:Busy-CPU - data is broken down by package, cpu, core, and other breakouts which can - be isolated or aggregated for data reporting.
+ collected within a sample, including the period UUID, name, and begin and end + timestamps. A set of periods can be "linked" through a "prev_id" field.
+
METRIC_DESC
this contains descriptive data about a specific series + of metric values within a specific period of a run, including the metric UUID, + the metric "class", type, and source, along with a set of "names" (key/value + pairs) defining the metric breakout details that narrow down a specific source and + type. For example source:mpstat, type:Busy-CPU data is broken down by package, cpu, + core, and other breakouts which can be isolated or aggregated for data reporting.
METRIC_DATA
this describes a specific data point, sampled over a specified duration with a fixed begin and end timestamp, plus a floating point value. Each is tied to a specific metric_desc UUID value. Depending on the varied diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index ef51dee1..bf00d75b 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -203,6 +203,33 @@ class Quote: return (token, next_char) if not first_quote else (token[1:-1], next_char) +class CommonParams: + """Help with sorting out parameters + + Parameter values are associated with iterations, but often a set of + parameters is common across all iterations of a run, and that set can + provide useful context. + + This helps to filter out identical parameters across a set of + iterations. + """ + + def __init__(self): + self.common: dict[str, Any] = {} + self.removed = set() + + def add(self, params: dict[str, Any]): + if not self.common: + self.common.update(params) + else: + for k, v in self.common.items(): + if k not in self.removed and (k not in params or v != params[k]): + self.removed.add(k) + + def render(self) -> dict[str, Any]: + return {k: v for k, v in self.common.items() if k not in self.removed} + + class CrucibleService: """Support convenient generalized access to Crucible data @@ -213,12 +240,29 @@ class CrucibleService: # OpenSearch massive limit on hits in a single query BIGQUERY = 262144 - # 'run' document fields that support general `?filter=:` + # Define the 'run' document fields that support general filtering via + # `?filter=:` # # TODO: this excludes 'desc', which isn't used by the ilab runs, and needs - # different treatment as its a text field rather than a term. + # different treatment as it's a text field rather than a term. It's not an + # immediate priority for ilab, but may be important for general use. RUN_FILTERS = ("benchmark", "email", "name", "source", "harness", "host") + # Define the keywords for sorting. + DIRECTIONS = ("asc", "desc") + FIELDS = ( + "begin", + "benchmark", + "desc", + "email", + "end", + "harness", + "host", + "id", + "name", + "source", + ) + def __init__(self, configpath: str = "crucible"): """Initialize a Crucible CDM (OpenSearch) connection. @@ -441,7 +485,27 @@ def _build_filter_options(cls, filter: Optional[list[str]] = None) -> Tuple[ filter: list of filter terms like "param:key=value" Returns: - An OpenSearch filter list to apply the filters + A set of OpenSearch filter object lists to detect missing + and matching documents for params, tags, and run fields. For + example, to select param:batch-size=12 results in the + following param filter list: + + [ + {' + dis_max': { + 'queries': [ + { + 'bool': { + 'must': [ + {'term': {'param.arg': 'batch-size'}}, + {'term': {'param.val': '12'}} + ] + } + } + ] + } + } + ] """ terms = defaultdict(list) for term in cls._split_list(filter): @@ -771,26 +835,13 @@ def _build_sort_terms(cls, sorters: Optional[list[str]]) -> list[dict[str, str]] if sorters: sort_terms = [] for s in sorters: - DIRECTIONS = ("asc", "desc") - FIELDS = ( - "begin", - "benchmark", - "desc", - "email", - "end", - "harness", - "host", - "id", - "name", - "source", - ) key, dir = s.split(":", maxsplit=1) - if dir not in DIRECTIONS: + if dir not in cls.DIRECTIONS: raise HTTPException( status.HTTP_400_BAD_REQUEST, f"Sort direction {dir!r} must be one of {','.join(DIRECTIONS)}", ) - if key not in FIELDS: + if key not in cls.FIELDS: raise HTTPException( status.HTTP_400_BAD_REQUEST, f"Sort key {key!r} must be one of {','.join(FIELDS)}", @@ -1078,6 +1129,7 @@ def get_runs( run["tags"] = tags.get(rid, {}) run["iterations"] = [] run["primary_metrics"] = set() + common = CommonParams() for i in iterations.get(rid, []): iparams = params.get(i["id"], {}) if "status" not in run: @@ -1085,16 +1137,7 @@ def get_runs( else: if i["status"] != "pass": run["status"] = i["status"] - if "params" not in run: - run["params"] = iparams.copy() - else: - # Iteration-specific parameter names or values are factored out - # of the run summary. (NOTE: listify the keys first so Python - # doesn't complain about deletion during the traversal.) - p = run["params"] - for k in list(p.keys()): - if k not in iparams or p[k] != iparams[k]: - del p[k] + common.add(iparams) run["primary_metrics"].add(i["primary-metric"]) run["iterations"].append( { @@ -1105,6 +1148,7 @@ def get_runs( "params": iparams, } ) + run["params"] = common.render() try: run["begin_date"] = self._format_timestamp(run["begin"]) run["end_date"] = self._format_timestamp(run["end"]) @@ -1185,17 +1229,10 @@ def get_params( # Filter out all parameter values that don't exist in all or which have # different values. if run: - common = {} - for iter, params in response.items(): - if not common: - common = dict(params) - else: - # We can't change a dict during iteration, so iterate over - # a list of the param keys. - for param in list(common.keys()): - if param not in params or params[param] != common[param]: - del common[param] - response["common"] = common + common = CommonParams() + for params in response.values(): + common.add(params) + response["common"] = common.render() return response def get_iterations(self, run: str, **kwargs) -> list[dict[str, Any]]: From 8b75d5f5e13e8c292a569b44e831de8be2a10d1e Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Wed, 9 Oct 2024 11:24:03 -0400 Subject: [PATCH 14/29] Add framework for UI multi-run comparison --- frontend/src/actions/ilabActions.js | 54 +++++++++++++++++++ frontend/src/actions/types.js | 1 + .../organisms/TableFilters/index.jsx | 2 +- .../src/components/templates/ILab/index.jsx | 40 +++++++++++--- frontend/src/reducers/ilabReducer.js | 3 ++ 5 files changed, 93 insertions(+), 7 deletions(-) diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index 1152846b..fcdeb286 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -169,6 +169,60 @@ export const fetchGraphData = dispatch({ type: TYPES.GRAPH_COMPLETED }); }; +export const fetchMultiGraphData = + (uids, metric) => async (dispatch, getState) => { + try { + const graphData = cloneDeep(getState().ilab.multiGraphData); + const filterData = graphData.filter((i) => !isEqual(i.uids, uids)); + dispatch({ + type: TYPES.SET_ILAB_MULTIGRAPH_DATA, + payload: filterData, + }); + const copyData = cloneDeep(filterData); + dispatch({ type: TYPES.GRAPH_LOADING }); + uids.forEach((uid) => { + const periods = getState().ilab.periods.find((i) => i.uid == uid); + let graphs = []; + periods?.periods?.forEach((p) => { + graphs.push({ + run: uid, + metric: p.primary_metric, + periods: [p.id], + }); + graphs.push({ + run: uid, + metric, + aggregate: true, + periods: [p.id], + }); + }); + }); + const response = await API.post(`/api/v1/ilab/runs/multigraph`, { + name: "comparison", + graphs, + }); + if (response.status === 200) { + copyData.push({ + uids, + data: response.data.data, + layout: response.data.layout, + }); + dispatch({ + type: TYPES.SET_ILAB_MULTIGRAPH_DATA, + payload: copyData, + }); + } + } catch (error) { + console.error( + `ERROR (${error?.response?.status}): ${JSON.stringify( + error?.response?.data + )}` + ); + dispatch(showFailureToast()); + } + dispatch({ type: TYPES.GRAPH_COMPLETED }); + }; + export const setIlabPage = (pageNo) => ({ type: TYPES.SET_ILAB_PAGE, payload: pageNo, diff --git a/frontend/src/actions/types.js b/frontend/src/actions/types.js index 58d7506f..cc1d04c3 100644 --- a/frontend/src/actions/types.js +++ b/frontend/src/actions/types.js @@ -81,6 +81,7 @@ export const SET_TELCO_GRAPH_DATA = "SET_TELCO_GRAPH_DATA"; export const SET_ILAB_JOBS_DATA = "SET_ILAB_JOBS_DATA"; export const SET_ILAB_DATE_FILTER = "SET_ILAB_DATE_FILTER"; export const SET_ILAB_GRAPH_DATA = "SET_ILAB_GRAPH_DATA"; +export const SET_ILAB_MULTIGRAPH_DATA = "SET_ILAB_MULTIGRAPH_DATA"; export const SET_ILAB_TOTAL_ITEMS = "SET_ILAB_TOTAL_ITEMS"; export const SET_ILAB_OFFSET = "SET_ILAB_OFFSET"; export const SET_ILAB_PAGE = "SET_ILAB_PAGE"; diff --git a/frontend/src/components/organisms/TableFilters/index.jsx b/frontend/src/components/organisms/TableFilters/index.jsx index 0dd5885d..0d93df09 100644 --- a/frontend/src/components/organisms/TableFilters/index.jsx +++ b/frontend/src/components/organisms/TableFilters/index.jsx @@ -66,7 +66,7 @@ const TableFilter = (props) => { setDateFilter(date, key, navigation, type); }; const endDateChangeHandler = (date, key) => { - setDateFilter(key, date, navigation, type); + setDateFilter(date, key, navigation, type); }; return ( diff --git a/frontend/src/components/templates/ILab/index.jsx b/frontend/src/components/templates/ILab/index.jsx index d1eb5d62..a5ef4c0d 100644 --- a/frontend/src/components/templates/ILab/index.jsx +++ b/frontend/src/components/templates/ILab/index.jsx @@ -5,6 +5,7 @@ import { AccordionContent, AccordionItem, AccordionToggle, + Button, Card, CardBody, } from "@patternfly/react-core"; @@ -49,9 +50,9 @@ const ILab = () => { const newExpanded = index >= 0 ? [ - ...expanded.slice(0, index), - ...expanded.slice(index + 1, expanded.length), - ] + ...expanded.slice(0, index), + ...expanded.slice(index + 1, expanded.length), + ] : [...expanded, id]; setAccExpanded(newExpanded); }; @@ -73,6 +74,17 @@ const ILab = () => { (state) => state.ilab ); + const [selectedRuns, setSelectedRuns] = useState([]); + const setRunSelected = (run, isSelecting = true) => + setSelectedRuns((prevSelected) => { + const others = prevSelected.filter((r) => r != run.id); + return isSelecting ? [...others, run.id] : others; + }); + const selectAllRuns = (isSelecting = true) => + setSelectedRuns(isSelecting ? tableData.map((r) => r.id) : []); + const areAllRunsSelected = selectedRuns.length === tableData.length; + const isRunSelected = (run) => selectedRuns.includes(run.id); + useEffect(() => { if (searchParams.size > 0) { // date filter is set apart @@ -107,6 +119,7 @@ const ILab = () => { return ( <> + {selectedRuns.length > 1 && } { showColumnMenu={false} navigation={navigate} /> - +
- + + @@ -128,6 +148,14 @@ const ILab = () => { {tableData.map((item, rowIndex) => ( <> + - +
selectAllRuns(isSelecting), + isSelected: areAllRunsSelected, + }} + /> {columnNames.metric} {columnNames.begin_date}
+ setRunSelected(item, isSelecting), + isSelected: isRunSelected(item), + }} + /> {
diff --git a/frontend/src/reducers/ilabReducer.js b/frontend/src/reducers/ilabReducer.js index 2f1bcd91..9f71d337 100644 --- a/frontend/src/reducers/ilabReducer.js +++ b/frontend/src/reducers/ilabReducer.js @@ -5,6 +5,7 @@ const initialState = { start_date: "", end_date: "", graphData: [], + multiGraphData: [], totalItems: 0, page: 1, perPage: 10, @@ -53,6 +54,8 @@ const ILabReducer = (state = initialState, action = {}) => { return { ...state, graphData: payload }; case TYPES.SET_ILAB_INIT_JOBS: return { ...state, tableData: payload }; + case TYPES.SET_ILAB_MULTIGRAPH_DATA: + return { ...state, multiGraphData: payload }; default: return state; } From c8cd5970c37a530e99497a3e0c78bdeb5d156a45 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Thu, 10 Oct 2024 12:01:26 -0400 Subject: [PATCH 15/29] Some UI cleanup --- frontend/src/actions/ilabActions.js | 32 +++++++++++-------- .../templates/ILab/MetricsDropdown.jsx | 2 +- 2 files changed, 19 insertions(+), 15 deletions(-) diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index fcdeb286..8db54208 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -122,7 +122,7 @@ export const fetchPeriods = (uid) => async (dispatch) => { }; export const fetchGraphData = - (uid, metric, primary_metric) => async (dispatch, getState) => { + (uid, metric = null) => async (dispatch, getState) => { try { const periods = getState().ilab.periods.find((i) => i.uid == uid); const graphData = cloneDeep(getState().ilab.graphData); @@ -136,15 +136,17 @@ export const fetchGraphData = let graphs = []; periods?.periods?.forEach((p) => { graphs.push({ metric: p.primary_metric, periods: [p.id] }); - graphs.push({ - metric, - aggregate: true, - periods: [p.id], - }); + if (metric) { + graphs.push({ + metric, + aggregate: true, + periods: [p.id], + }); + } }); const response = await API.post(`/api/v1/ilab/runs/multigraph`, { run: uid, - name: primary_metric, + name: `graph ${uid}`, graphs, }); if (response.status === 200) { @@ -170,7 +172,7 @@ export const fetchGraphData = }; export const fetchMultiGraphData = - (uids, metric) => async (dispatch, getState) => { + (uids, metric = null) => async (dispatch, getState) => { try { const graphData = cloneDeep(getState().ilab.multiGraphData); const filterData = graphData.filter((i) => !isEqual(i.uids, uids)); @@ -189,12 +191,14 @@ export const fetchMultiGraphData = metric: p.primary_metric, periods: [p.id], }); - graphs.push({ - run: uid, - metric, - aggregate: true, - periods: [p.id], - }); + if (metric) { + graphs.push({ + run: uid, + metric, + aggregate: true, + periods: [p.id], + }); + } }); }); const response = await API.post(`/api/v1/ilab/runs/multigraph`, { diff --git a/frontend/src/components/templates/ILab/MetricsDropdown.jsx b/frontend/src/components/templates/ILab/MetricsDropdown.jsx index f301953d..54424ed4 100644 --- a/frontend/src/components/templates/ILab/MetricsDropdown.jsx +++ b/frontend/src/components/templates/ILab/MetricsDropdown.jsx @@ -41,7 +41,7 @@ const MetricsSelect = (props) => { //setSelected(run[1].trim()); dispatch(setSelectedMetrics(run[0].trim(), run[1].trim())); setIsOpen(false); - dispatch(fetchGraphData(run[0].trim(), run[1].trim(), run[2].trim())); + dispatch(fetchGraphData(run[0].trim(), run[1].trim())); }; const metricsDataCopy = cloneDeep(metrics); From 3b0b19087298884102907e8982ed6be0ff0a45bb Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Fri, 11 Oct 2024 08:25:42 -0400 Subject: [PATCH 16/29] Debug unhandled exceptions + add some method documentation + misc review feedback --- backend/app/api/v1/endpoints/ilab/ilab.py | 6 --- backend/app/main.py | 46 +++++++++++------------ backend/app/services/crucible_svc.py | 20 +++++++--- 3 files changed, 36 insertions(+), 36 deletions(-) diff --git a/backend/app/api/v1/endpoints/ilab/ilab.py b/backend/app/api/v1/endpoints/ilab/ilab.py index d83290a8..9a4c7bbc 100644 --- a/backend/app/api/v1/endpoints/ilab/ilab.py +++ b/backend/app/api/v1/endpoints/ilab/ilab.py @@ -31,12 +31,6 @@ def crucible_svc(): try: crucible = CrucibleService(CONFIGPATH) yield crucible - except Exception as e: - print(f"Error opening {CONFIGPATH}: {str(e)!r}") - raise HTTPException( - status.HTTP_502_BAD_GATEWAY, - f"Crucible service is not available: {str(e)!r}", - ) finally: if crucible: crucible.close() diff --git a/backend/app/main.py b/backend/app/main.py index 727c9b55..90a41de0 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -21,6 +21,25 @@ def render(self, content: typing.Any) -> bytes: "localhost:3000" ] + +async def report_exceptions(request: Request, e: Exception): + if isinstance(e, HTTPException): + raise + tb = e.__traceback__ + print(f"Unhandled exception {e.__class__.__name__}: {str(e)}") + where = "unknown" + while tb is not None: + where = f"{tb.tb_frame.f_code.co_filename}:{tb.tb_lineno}" + print( + f" {where} {tb.tb_frame.f_code.co_name}", + file=sys.stderr, + ) + tb = tb.tb_next + return JSONResponse( + status_code=500, + content={"detail": f"Unhandled server error at {where}: {str(e)}"}, + ) + app = FastAPI(default_response_class=ORJSONResponse, docs_url="/docs", redoc_url=None, @@ -33,31 +52,8 @@ def render(self, content: typing.Any) -> bytes: license_info={ "name": "Apache 2.0", "url": "https://www.apache.org/licenses/LICENSE-2.0", - }) - - -@app.middleware("http") -async def report_exceptions(request: Request, call_next): - try: - return await call_next(request) - except Exception as e: - if isinstance(e, HTTPException): - raise - tb = e.__traceback__ - print(f"Unhandled exception {e.__class__.__name__}: {str(e)}") - where = "unknown" - while tb is not None: - where = f"{tb.tb_frame.f_code.co_filename}:{tb.tb_lineno}" - print( - f" {where} {tb.tb_frame.f_code.co_name}", - file=sys.stderr, - ) - tb = tb.tb_next - return JSONResponse( - status_code=500, - content={"message": f"Unhandled server error at {where}: {str(e)}"}, - ) - + }, + exception_handlers={Exception: report_exceptions}) app.add_middleware( CORSMiddleware, diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index bf00d75b..9acf20ea 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -216,18 +216,28 @@ class CommonParams: def __init__(self): self.common: dict[str, Any] = {} - self.removed = set() + self.omit = set() def add(self, params: dict[str, Any]): + """Add a new iteration into the param set + + Mark all parameter keys which don't appear in all iterations, or which + have different values in at least one iteration, to be omitted from the + merged "common" param set. + + Args: + params: the param dictionary of an iteration + """ if not self.common: self.common.update(params) else: for k, v in self.common.items(): - if k not in self.removed and (k not in params or v != params[k]): - self.removed.add(k) + if k not in self.omit and (k not in params or v != params[k]): + self.omit.add(k) def render(self) -> dict[str, Any]: - return {k: v for k, v in self.common.items() if k not in self.removed} + """Return a new param set with only common params""" + return {k: v for k, v in self.common.items() if k not in self.omit} class CrucibleService: @@ -783,7 +793,7 @@ def _get_metric_ids( names[n].add(v) # We want to help filter a consistent summary, so only show those - # names with more than one value. + # breakout names with more than one value. response["names"] = {n: sorted(v) for n, v in names.items() if v and len(v) > 1} response["periods"] = list(periods) raise HTTPException( From b32a6064960c4da16c1d66e8bd18523ae49e2cc2 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Fri, 11 Oct 2024 12:04:53 -0400 Subject: [PATCH 17/29] Fix multigraph bug Multigraph API failed if more than one `Graph` element specified the same run; fix to be smarter about missing run IDs. This also contains experimental code to expose per-iteration param values, which doesn't quite work but doesn't seem to hurt anything. --- backend/app/services/crucible_svc.py | 10 +++++++--- frontend/src/components/templates/ILab/index.jsx | 12 +++++++++++- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index 9acf20ea..f5098306 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -1804,11 +1804,15 @@ def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: run_id_list = [] if default_run_id: run_id_list.append(default_run_id) + run_id_missing = False for g in graphdata.graphs: - if g.run and g.run not in run_id_list: - run_id_list.append(g.run) + if g.run: + if g.run not in run_id_list: + run_id_list.append(g.run) + else: + run_id_missing = True - if len(run_id_list) < len(graphdata.graphs) and not default_run_id: + if run_id_missing and not default_run_id: raise HTTPException( status.HTTP_400_BAD_REQUEST, "each graph request must have a run ID" ) diff --git a/frontend/src/components/templates/ILab/index.jsx b/frontend/src/components/templates/ILab/index.jsx index a5ef4c0d..7949eb1a 100644 --- a/frontend/src/components/templates/ILab/index.jsx +++ b/frontend/src/components/templates/ILab/index.jsx @@ -220,9 +220,19 @@ const ILab = () => { + {item?.iterations && + item.iterations.forEach((i) => { + i?.params && ( + + ); + })} From 2148311b81fe3b74300683d55c08867f10dfa42a Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Fri, 11 Oct 2024 15:40:00 -0400 Subject: [PATCH 18/29] Support for per-iteration parameters. (And `/api/v1/ilab/runs` reports iterations in numerical order.) --- backend/app/services/crucible_svc.py | 1 + .../src/components/templates/ILab/index.jsx | 54 +++++++++++++++---- 2 files changed, 44 insertions(+), 11 deletions(-) diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index f5098306..a50632bd 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -1158,6 +1158,7 @@ def get_runs( "params": iparams, } ) + run["iterations"].sort(key=lambda i: i["iteration"]) run["params"] = common.render() try: run["begin_date"] = self._format_timestamp(run["begin"]) diff --git a/frontend/src/components/templates/ILab/index.jsx b/frontend/src/components/templates/ILab/index.jsx index 7949eb1a..35025350 100644 --- a/frontend/src/components/templates/ILab/index.jsx +++ b/frontend/src/components/templates/ILab/index.jsx @@ -220,19 +220,51 @@ const ILab = () => { - {item?.iterations && - item.iterations.forEach((i) => { - i?.params && ( - - ); - })} + {item.iterations.length > 1 && ( +
+ + + { + onToggle("bordered-toggle3"); + }} + isExpanded={expanded.includes( + "bordered-toggle3" + )} + id="bordered-toggle3" + > + {`Unique parameters for ${item.iterations.length} Iterations`} + + + {item.iterations.map((i) => ( + !(i[0] in item.params) + )} + /> + ))} + + + +
+ )}
From 312eebf4a85e5d43a73a8fb9b1c5612f42255d7a Mon Sep 17 00:00:00 2001 From: MVarshini Date: Fri, 11 Oct 2024 21:05:46 +0530 Subject: [PATCH 19/29] comparison --- frontend/src/actions/ilabActions.js | 114 +++---- frontend/src/actions/types.js | 1 + .../organisms/TableFilters/index.jsx | 17 + .../organisms/TableFilters/index.less | 4 + .../templates/ILab/IlabCompareComponent.jsx | 86 +++++ .../templates/ILab/IlabExpandedRow.jsx | 111 +++++++ .../templates/ILab/MetricsDropdown.jsx | 4 + .../src/components/templates/ILab/index.jsx | 294 ++++++------------ .../src/components/templates/ILab/index.less | 13 + frontend/src/reducers/ilabReducer.js | 3 + 10 files changed, 389 insertions(+), 258 deletions(-) create mode 100644 frontend/src/components/templates/ILab/IlabCompareComponent.jsx create mode 100644 frontend/src/components/templates/ILab/IlabExpandedRow.jsx diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index 8db54208..3c02fbab 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -1,9 +1,10 @@ import * as API_ROUTES from "@/utils/apiConstants"; import * as TYPES from "./types.js"; +import { cloneDeep, isEqual } from "lodash"; + import API from "@/utils/axiosInstance"; import { appendQueryString } from "@/utils/helper"; -import { cloneDeep } from "lodash"; import { showFailureToast } from "@/actions/toastActions"; export const fetchILabJobs = @@ -122,7 +123,8 @@ export const fetchPeriods = (uid) => async (dispatch) => { }; export const fetchGraphData = - (uid, metric = null) => async (dispatch, getState) => { + (uid, metric = null) => + async (dispatch, getState) => { try { const periods = getState().ilab.periods.find((i) => i.uid == uid); const graphData = cloneDeep(getState().ilab.graphData); @@ -171,61 +173,64 @@ export const fetchGraphData = dispatch({ type: TYPES.GRAPH_COMPLETED }); }; -export const fetchMultiGraphData = - (uids, metric = null) => async (dispatch, getState) => { - try { - const graphData = cloneDeep(getState().ilab.multiGraphData); - const filterData = graphData.filter((i) => !isEqual(i.uids, uids)); - dispatch({ - type: TYPES.SET_ILAB_MULTIGRAPH_DATA, - payload: filterData, - }); - const copyData = cloneDeep(filterData); - dispatch({ type: TYPES.GRAPH_LOADING }); - uids.forEach((uid) => { - const periods = getState().ilab.periods.find((i) => i.uid == uid); - let graphs = []; - periods?.periods?.forEach((p) => { - graphs.push({ - run: uid, - metric: p.primary_metric, - periods: [p.id], - }); - if (metric) { - graphs.push({ - run: uid, - metric, - aggregate: true, - periods: [p.id], - }); - } +export const fetchMultiGraphData = (uids) => async (dispatch, getState) => { + try { + const graphData = cloneDeep(getState().ilab.multiGraphData); + const filterData = graphData.filter((i) => !isEqual(i.uids, uids)); + dispatch({ + type: TYPES.SET_ILAB_MULTIGRAPH_DATA, + payload: filterData, + }); + const copyData = cloneDeep(filterData); + dispatch({ type: TYPES.GRAPH_LOADING }); + + let graphs = []; + uids.forEach(async (uid) => { + // if (!periods) { + await dispatch(fetchPeriods(uid)); + //} + + const periods = getState().ilab.periods.find((i) => i.uid == uid); + periods?.periods?.forEach((p) => { + graphs.push({ + run: uid, + metric: p.primary_metric, + periods: [p.id], }); + // graphs.push({ + // run: uid, + // metric, + // aggregate: true, + // periods: [p.id], + // }); }); - const response = await API.post(`/api/v1/ilab/runs/multigraph`, { - name: "comparison", - graphs, + }); + console.log(graphs); + const response = await API.post(`/api/v1/ilab/runs/multigraph`, { + name: "comparison", + graphs, + }); + if (response.status === 200) { + copyData.push({ + uids, + data: response.data.data, + layout: response.data.layout, + }); + dispatch({ + type: TYPES.SET_ILAB_MULTIGRAPH_DATA, + payload: copyData, }); - if (response.status === 200) { - copyData.push({ - uids, - data: response.data.data, - layout: response.data.layout, - }); - dispatch({ - type: TYPES.SET_ILAB_MULTIGRAPH_DATA, - payload: copyData, - }); - } - } catch (error) { - console.error( - `ERROR (${error?.response?.status}): ${JSON.stringify( - error?.response?.data - )}` - ); - dispatch(showFailureToast()); } - dispatch({ type: TYPES.GRAPH_COMPLETED }); - }; + } catch (error) { + console.error( + `ERROR (${error?.response?.status}): ${JSON.stringify( + error?.response?.data + )}` + ); + dispatch(showFailureToast()); + } + dispatch({ type: TYPES.GRAPH_COMPLETED }); +}; export const setIlabPage = (pageNo) => ({ type: TYPES.SET_ILAB_PAGE, @@ -269,3 +274,6 @@ export const tableReCalcValues = () => (dispatch, getState) => { const endIdx = page !== 1 ? page * perPage - 1 : perPage; dispatch(sliceIlabTableRows(startIdx, endIdx)); }; +export const toggleComparisonSwitch = () => ({ + type: TYPES.TOGGLE_COMPARISON_SWITCH, +}); diff --git a/frontend/src/actions/types.js b/frontend/src/actions/types.js index cc1d04c3..7afeb6b5 100644 --- a/frontend/src/actions/types.js +++ b/frontend/src/actions/types.js @@ -90,3 +90,4 @@ export const SET_ILAB_METRICS = "SET_ILAB_METRICS"; export const SET_ILAB_SELECTED_METRICS = "SET_ILAB_SELECTED_METRICS"; export const SET_ILAB_PERIODS = "SET_ILAB_PERIODS"; export const SET_ILAB_INIT_JOBS = "SET_ILAB_INIT_JOBS"; +export const TOGGLE_COMPARISON_SWITCH = "TOGGLE_COMPARISON_SWITCH"; diff --git a/frontend/src/components/organisms/TableFilters/index.jsx b/frontend/src/components/organisms/TableFilters/index.jsx index 0d93df09..2f702a97 100644 --- a/frontend/src/components/organisms/TableFilters/index.jsx +++ b/frontend/src/components/organisms/TableFilters/index.jsx @@ -5,6 +5,7 @@ import "./index.less"; import { Chip, ChipGroup, + Switch, Toolbar, ToolbarContent, ToolbarItem, @@ -39,6 +40,8 @@ const TableFilter = (props) => { setColumns, selectedFilters, updateSelectedFilter, + onSwitchChange, + isSwitchChecked, } = props; const category = @@ -123,6 +126,18 @@ const TableFilter = (props) => { )} + {type === "ilab" && ( + + + + + + )} {appliedFilters && Object.keys(appliedFilters).length > 0 && @@ -154,5 +169,7 @@ TableFilter.propTypes = { selectedFilters: PropTypes.array, updateSelectedFilter: PropTypes.func, navigation: PropTypes.func, + isSwitchChecked: PropTypes.bool, + onSwitchChange: PropTypes.func, }; export default TableFilter; diff --git a/frontend/src/components/organisms/TableFilters/index.less b/frontend/src/components/organisms/TableFilters/index.less index b100a012..1a479703 100644 --- a/frontend/src/components/organisms/TableFilters/index.less +++ b/frontend/src/components/organisms/TableFilters/index.less @@ -11,4 +11,8 @@ .to-text { padding: 5px 0; } + #comparison-switch { + margin-left: auto; + align-content: center; + } } \ No newline at end of file diff --git a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx new file mode 100644 index 00000000..efaf739c --- /dev/null +++ b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx @@ -0,0 +1,86 @@ +import { + Button, + Menu, + MenuContent, + MenuItem, + MenuList, + Title, +} from "@patternfly/react-core"; + +import PropTypes from "prop-types"; +import { fetchMultiGraphData } from "@/actions/ilabActions.js"; +import { uid } from "@/utils/helper"; +import { useDispatch } from "react-redux"; +import { useState } from "react"; + +const IlabCompareComponent = (props) => { + const { data } = props; + + const dispatch = useDispatch(); + const [selectedItems, setSelectedItems] = useState([]); + const onSelect = (_event, itemId) => { + const item = itemId; + if (selectedItems.includes(item)) { + setSelectedItems(selectedItems.filter((id) => id !== item)); + } else { + setSelectedItems([...selectedItems, item]); + } + }; + return ( +
+
+ + Metrics + + + {/* + {data.map((item) => { + return ( + // + + ); + })} + + */} + + + + {data.map((item) => { + return ( + + {item.primary_metrics[0]} + + ); + })} + + + +
+ + + Chart + +
+ ); +}; + +IlabCompareComponent.propTypes = { + data: PropTypes.array, +}; +export default IlabCompareComponent; diff --git a/frontend/src/components/templates/ILab/IlabExpandedRow.jsx b/frontend/src/components/templates/ILab/IlabExpandedRow.jsx new file mode 100644 index 00000000..7c09fc71 --- /dev/null +++ b/frontend/src/components/templates/ILab/IlabExpandedRow.jsx @@ -0,0 +1,111 @@ +import { + Accordion, + AccordionContent, + AccordionItem, + AccordionToggle, + Card, + CardBody, +} from "@patternfly/react-core"; + +import ILabGraph from "./ILabGraph"; +import MetaRow from "./MetaRow"; +import MetricsSelect from "./MetricsDropdown"; +import PropTypes from "prop-types"; +import { uid } from "@/utils/helper"; +import { useState } from "react"; + +const IlabRowContent = (props) => { + const { item } = props; + const [expanded, setAccExpanded] = useState(["bordered-toggle1"]); + const onToggle = (id) => { + const index = expanded.indexOf(id); + const newExpanded = + index >= 0 + ? [ + ...expanded.slice(0, index), + ...expanded.slice(index + 1, expanded.length), + ] + : [...expanded, id]; + setAccExpanded(newExpanded); + }; + + return ( + + + { + onToggle("bordered-toggle1"); + }} + isExpanded={expanded.includes("bordered-toggle1")} + id="bordered-toggle1" + > + Metadata + + + +
+ + + + + + + + + + + + + + + +
+
+
+ + { + onToggle("bordered-toggle2"); + }} + isExpanded={expanded.includes("bordered-toggle2")} + id="bordered-toggle2" + > + Metrics & Graph + + + Metrics: +
+ +
+
+
+
+ ); +}; +IlabRowContent.propTypes = { + item: PropTypes.object, +}; +export default IlabRowContent; diff --git a/frontend/src/components/templates/ILab/MetricsDropdown.jsx b/frontend/src/components/templates/ILab/MetricsDropdown.jsx index 54424ed4..2cdebb79 100644 --- a/frontend/src/components/templates/ILab/MetricsDropdown.jsx +++ b/frontend/src/components/templates/ILab/MetricsDropdown.jsx @@ -7,6 +7,7 @@ import { import { fetchGraphData, setSelectedMetrics } from "@/actions/ilabActions"; import { useDispatch, useSelector } from "react-redux"; +import PropTypes from "prop-types"; import { cloneDeep } from "lodash"; import { uid } from "@/utils/helper"; import { useState } from "react"; @@ -83,4 +84,7 @@ const MetricsSelect = (props) => { ); }; +MetricsSelect.propTypes = { + item: PropTypes.object, +}; export default MetricsSelect; diff --git a/frontend/src/components/templates/ILab/index.jsx b/frontend/src/components/templates/ILab/index.jsx index 35025350..2e446a30 100644 --- a/frontend/src/components/templates/ILab/index.jsx +++ b/frontend/src/components/templates/ILab/index.jsx @@ -1,14 +1,5 @@ import "./index.less"; -import { - Accordion, - AccordionContent, - AccordionItem, - AccordionToggle, - Button, - Card, - CardBody, -} from "@patternfly/react-core"; import { ExpandableRowContent, Table, @@ -23,15 +14,16 @@ import { fetchMetricsInfo, fetchPeriods, setIlabDateFilter, + toggleComparisonSwitch, } from "@/actions/ilabActions"; import { formatDateTime, uid } from "@/utils/helper"; import { useDispatch, useSelector } from "react-redux"; import { useEffect, useState } from "react"; import { useNavigate, useSearchParams } from "react-router-dom"; -import ILabGraph from "./ILabGraph"; -import MetaRow from "./MetaRow"; -import MetricsSelect from "./MetricsDropdown"; +// import { Button } from "@patternfly/react-core"; +import IlabCompareComponent from "./IlabCompareComponent"; +import IlabRowContent from "./IlabExpandedRow"; import RenderPagination from "@/components/organisms/Pagination"; import StatusCell from "./StatusCell"; import TableFilter from "@/components/organisms/TableFilters"; @@ -41,21 +33,18 @@ const ILab = () => { const navigate = useNavigate(); const [searchParams] = useSearchParams(); - const { start_date, end_date } = useSelector((state) => state.ilab); + const { + results, + start_date, + end_date, + comparisonSwitch, + tableData, + page, + perPage, + totalItems, + } = useSelector((state) => state.ilab); const [expandedResult, setExpandedResult] = useState([]); - const [expanded, setAccExpanded] = useState(["bordered-toggle1"]); - const onToggle = (id) => { - const index = expanded.indexOf(id); - const newExpanded = - index >= 0 - ? [ - ...expanded.slice(0, index), - ...expanded.slice(index + 1, expanded.length), - ] - : [...expanded, id]; - setAccExpanded(newExpanded); - }; const isResultExpanded = (res) => expandedResult?.includes(res); const setExpanded = async (run, isExpanding = true) => { setExpandedResult((prevExpanded) => { @@ -70,20 +59,17 @@ const ILab = () => { } }; - const { totalItems, page, perPage, tableData } = useSelector( - (state) => state.ilab - ); - - const [selectedRuns, setSelectedRuns] = useState([]); - const setRunSelected = (run, isSelecting = true) => - setSelectedRuns((prevSelected) => { - const others = prevSelected.filter((r) => r != run.id); - return isSelecting ? [...others, run.id] : others; - }); - const selectAllRuns = (isSelecting = true) => - setSelectedRuns(isSelecting ? tableData.map((r) => r.id) : []); - const areAllRunsSelected = selectedRuns.length === tableData.length; - const isRunSelected = (run) => selectedRuns.includes(run.id); + // const [selectedRuns, setSelectedRuns] = useState([]); + // const setRunSelected = (run, isSelecting = true) => + // setSelectedRuns((prevSelected) => { + // console.log(`Adding ${run}, ${isSelecting}`); + // const others = prevSelected.filter((r) => r != run.id); + // return isSelecting ? [...others, run.id] : others; + // }); + // const selectAllRuns = (isSelecting = true) => + // setSelectedRuns(isSelecting ? results.map((r) => r.id) : []); + // const areAllRunsSelected = selectedRuns.length === results.length; + // const isRunSelected = (run) => selectedRuns.includes(run.id); useEffect(() => { if (searchParams.size > 0) { @@ -117,193 +103,91 @@ const ILab = () => { status: "Status", }; + const onSwitchChange = () => { + dispatch(toggleComparisonSwitch()); + }; return ( <> - {selectedRuns.length > 1 && } + {/* {selectedRuns.length > 1 && } */} + - - - - + + + + + + + + + ))} + +
+ ) : ( + <> + + + + {/* - - - - - - - {tableData.map((item, rowIndex) => ( - <> - - + + + + + + + {tableData.map((item, rowIndex) => ( + <> + + {/* - - - - - - - - - ))} - -
selectAllRuns(isSelecting), isSelected: areAllRunsSelected, }} - /> - - {columnNames.metric}{columnNames.begin_date}{columnNames.end_date}{columnNames.status}
*/} + + {columnNames.metric}{columnNames.begin_date}{columnNames.end_date}{columnNames.status}
setRunSelected(item, isSelecting), isSelected: isRunSelected(item), }} - /> - - setExpanded(item, !isResultExpanded(item.id)), - expandId: `expandId-${uid()}`, - }} - /> - - {item.primary_metrics[0]}{formatDateTime(item.begin_date)}{formatDateTime(item.end_date)} - -
- - - - { - onToggle("bordered-toggle1"); - }} - isExpanded={expanded.includes("bordered-toggle1")} - id="bordered-toggle1" - > - Metadata - + /> */} + + setExpanded(item, !isResultExpanded(item.id)), + expandId: `expandId-${uid()}`, + }} + /> - -
- - - - - - - - - - - - - - {item.iterations.length > 1 && ( -
- - - { - onToggle("bordered-toggle3"); - }} - isExpanded={expanded.includes( - "bordered-toggle3" - )} - id="bordered-toggle3" - > - {`Unique parameters for ${item.iterations.length} Iterations`} - - - {item.iterations.map((i) => ( - !(i[0] in item.params) - )} - /> - ))} - - - -
- )} -
-
-
-
- - - { - onToggle("bordered-toggle2"); - }} - isExpanded={expanded.includes("bordered-toggle2")} - id="bordered-toggle2" - > - Metrics & Graph - - - Metrics: -
- -
-
-
- - -
- +
{item.primary_metrics[0]}{formatDateTime(item.begin_date)}{formatDateTime(item.end_date)} + +
+ + + +
+ + + )} ); }; diff --git a/frontend/src/components/templates/ILab/index.less b/frontend/src/components/templates/ILab/index.less index 02cd02cb..ad9b41ba 100644 --- a/frontend/src/components/templates/ILab/index.less +++ b/frontend/src/components/templates/ILab/index.less @@ -11,3 +11,16 @@ margin-right: 1.5vw; } } +.comparison-container { + display: flex; + .metrics-container { + width: 20vw; + padding: 10px; + .compare-btn { + margin: 2vh 0; + } + } + .title { + margin-bottom: 2vh; + } +} \ No newline at end of file diff --git a/frontend/src/reducers/ilabReducer.js b/frontend/src/reducers/ilabReducer.js index 9f71d337..582679f8 100644 --- a/frontend/src/reducers/ilabReducer.js +++ b/frontend/src/reducers/ilabReducer.js @@ -15,6 +15,7 @@ const initialState = { periods: [], metrics_selected: {}, tableData: [], + comparisonSwitch: false, }; const ILabReducer = (state = initialState, action = {}) => { const { type, payload } = action; @@ -56,6 +57,8 @@ const ILabReducer = (state = initialState, action = {}) => { return { ...state, tableData: payload }; case TYPES.SET_ILAB_MULTIGRAPH_DATA: return { ...state, multiGraphData: payload }; + case TYPES.TOGGLE_COMPARISON_SWITCH: + return { ...state, comparisonSwitch: !state.comparisonSwitch }; default: return state; } From 0d7592a6730ec5412961157e92c141644c492eea Mon Sep 17 00:00:00 2001 From: MVarshini Date: Tue, 15 Oct 2024 17:56:27 +0530 Subject: [PATCH 20/29] render graph --- frontend/src/actions/ilabActions.js | 58 +++++++++++++------ .../templates/ILab/IlabCompareComponent.jsx | 49 +++++++++------- .../src/components/templates/ILab/index.less | 10 +++- 3 files changed, 76 insertions(+), 41 deletions(-) diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index 3c02fbab..41504686 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -1,10 +1,9 @@ import * as API_ROUTES from "@/utils/apiConstants"; import * as TYPES from "./types.js"; -import { cloneDeep, isEqual } from "lodash"; - import API from "@/utils/axiosInstance"; import { appendQueryString } from "@/utils/helper"; +import { cloneDeep } from "lodash"; import { showFailureToast } from "@/actions/toastActions"; export const fetchILabJobs = @@ -173,24 +172,40 @@ export const fetchGraphData = dispatch({ type: TYPES.GRAPH_COMPLETED }); }; +export const handleMultiGraph = (uids) => async (dispatch, getState) => { + try { + const periods = getState().ilab.periods; + + const missingPeriods = periods.filter((item) => !uids.includes(item.uid)); + + const missingUids = + periods.length > 0 ? missingPeriods.map((item) => item.uid) : uids; + + await Promise.all( + missingUids.map(async (uid) => { + await dispatch(fetchPeriods(uid)); // Dispatch each item + }) + ); + + dispatch(fetchMultiGraphData(uids)); + } catch (error) { + console.error( + `ERROR (${error?.response?.status}): ${JSON.stringify( + error?.response?.data + )}` + ); + dispatch(showFailureToast()); + } +}; export const fetchMultiGraphData = (uids) => async (dispatch, getState) => { try { - const graphData = cloneDeep(getState().ilab.multiGraphData); - const filterData = graphData.filter((i) => !isEqual(i.uids, uids)); - dispatch({ - type: TYPES.SET_ILAB_MULTIGRAPH_DATA, - payload: filterData, - }); - const copyData = cloneDeep(filterData); - dispatch({ type: TYPES.GRAPH_LOADING }); + dispatch({ type: TYPES.LOADING }); + const periods = getState().ilab.periods; + const filterPeriods = periods.filter((item) => uids.includes(item.uid)); let graphs = []; uids.forEach(async (uid) => { - // if (!periods) { - await dispatch(fetchPeriods(uid)); - //} - - const periods = getState().ilab.periods.find((i) => i.uid == uid); + const periods = filterPeriods.find((i) => i.uid == uid); periods?.periods?.forEach((p) => { graphs.push({ run: uid, @@ -211,14 +226,19 @@ export const fetchMultiGraphData = (uids) => async (dispatch, getState) => { graphs, }); if (response.status === 200) { - copyData.push({ - uids, + response.data.layout["showlegend"] = true; + //response.data.layout["width"] = "1200px"; + response.data.layout["responsive"] = "true"; + response.data.layout["autosize"] = "true"; + response.data.layout["legend"] = { x: 0, y: 1 }; + const graphData = []; + graphData.push({ data: response.data.data, layout: response.data.layout, }); dispatch({ type: TYPES.SET_ILAB_MULTIGRAPH_DATA, - payload: copyData, + payload: graphData, }); } } catch (error) { @@ -229,7 +249,7 @@ export const fetchMultiGraphData = (uids) => async (dispatch, getState) => { ); dispatch(showFailureToast()); } - dispatch({ type: TYPES.GRAPH_COMPLETED }); + dispatch({ type: TYPES.COMPLETED }); }; export const setIlabPage = (pageNo) => ({ diff --git a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx index efaf739c..32a654e8 100644 --- a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx +++ b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx @@ -1,3 +1,5 @@ +import "./index.less"; + import { Button, Menu, @@ -6,11 +8,13 @@ import { MenuList, Title, } from "@patternfly/react-core"; +import { useDispatch, useSelector } from "react-redux"; +import Plot from "react-plotly.js"; import PropTypes from "prop-types"; -import { fetchMultiGraphData } from "@/actions/ilabActions.js"; +import { cloneDeep } from "lodash"; +import { handleMultiGraph } from "@/actions/ilabActions.js"; import { uid } from "@/utils/helper"; -import { useDispatch } from "react-redux"; import { useState } from "react"; const IlabCompareComponent = (props) => { @@ -18,6 +22,10 @@ const IlabCompareComponent = (props) => { const dispatch = useDispatch(); const [selectedItems, setSelectedItems] = useState([]); + const { multiGraphData } = useSelector((state) => state.ilab); + const isGraphLoading = useSelector((state) => state.loading.isGraphLoading); + const graphDataCopy = cloneDeep(multiGraphData); + const onSelect = (_event, itemId) => { const item = itemId; if (selectedItems.includes(item)) { @@ -26,6 +34,9 @@ const IlabCompareComponent = (props) => { setSelectedItems([...selectedItems, item]); } }; + const dummy = () => { + dispatch(handleMultiGraph(selectedItems)); + }; return (
@@ -36,23 +47,10 @@ const IlabCompareComponent = (props) => { className="compare-btn" isDisabled={selectedItems.length < 2} isBlock - onClick={() => dispatch(fetchMultiGraphData(selectedItems))} + onClick={dummy} > Comapre - {/* - {data.map((item) => { - return ( - // - - ); - })} - - */} @@ -72,10 +70,21 @@ const IlabCompareComponent = (props) => {
- - - Chart - +
+ {isGraphLoading ? ( +
+ ) : ( + graphDataCopy?.length > 0 && ( +
+ +
+ ) + )} +
); }; diff --git a/frontend/src/components/templates/ILab/index.less b/frontend/src/components/templates/ILab/index.less index ad9b41ba..0e4051db 100644 --- a/frontend/src/components/templates/ILab/index.less +++ b/frontend/src/components/templates/ILab/index.less @@ -12,14 +12,20 @@ } } .comparison-container { - display: flex; + display: flex; .metrics-container { - width: 20vw; + width: 20%; padding: 10px; .compare-btn { margin: 2vh 0; } } + .chart-conatiner { + width: 80%; + .js-plotly-plot { + // width: 350vw; + } + } .title { margin-bottom: 2vh; } From e056d62afb8b1924f010e5606de42e06e755ab48 Mon Sep 17 00:00:00 2001 From: MVarshini Date: Tue, 15 Oct 2024 19:54:50 +0530 Subject: [PATCH 21/29] Pagination for Graphs --- frontend/src/actions/filterActions.js | 2 +- frontend/src/actions/ilabActions.js | 1 - .../templates/ILab/IlabCompareComponent.jsx | 30 ++++++++++++------- .../templates/ILab/IlabExpandedRow.jsx | 27 +++++++++++++++++ .../src/components/templates/ILab/index.jsx | 2 -- .../src/components/templates/ILab/index.less | 11 ++++++- 6 files changed, 57 insertions(+), 16 deletions(-) diff --git a/frontend/src/actions/filterActions.js b/frontend/src/actions/filterActions.js index f8fa5691..6385b0bc 100644 --- a/frontend/src/actions/filterActions.js +++ b/frontend/src/actions/filterActions.js @@ -79,7 +79,7 @@ export const setDateFilter = (date, key, navigation, currType) => { dispatch(setTelcoDateFilter(date, key, navigation)); } else if (currType === "ilab") { dispatch(setIlabDateFilter(date, key, navigation)); - dispatch(fetchILabJobs()); + dispatch(fetchILabJobs(true)); } }; diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index 41504686..795c0910 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -227,7 +227,6 @@ export const fetchMultiGraphData = (uids) => async (dispatch, getState) => { }); if (response.status === 200) { response.data.layout["showlegend"] = true; - //response.data.layout["width"] = "1200px"; response.data.layout["responsive"] = "true"; response.data.layout["autosize"] = "true"; response.data.layout["legend"] = { x: 0, y: 1 }; diff --git a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx index 32a654e8..80781ef9 100644 --- a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx +++ b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx @@ -12,6 +12,7 @@ import { useDispatch, useSelector } from "react-redux"; import Plot from "react-plotly.js"; import PropTypes from "prop-types"; +import RenderPagination from "@/components/organisms/Pagination"; import { cloneDeep } from "lodash"; import { handleMultiGraph } from "@/actions/ilabActions.js"; import { uid } from "@/utils/helper"; @@ -19,7 +20,7 @@ import { useState } from "react"; const IlabCompareComponent = (props) => { const { data } = props; - + const { page, perPage, totalItems } = useSelector((state) => state.ilab); const dispatch = useDispatch(); const [selectedItems, setSelectedItems] = useState([]); const { multiGraphData } = useSelector((state) => state.ilab); @@ -49,7 +50,7 @@ const IlabCompareComponent = (props) => { isBlock onClick={dummy} > - Comapre + Compare @@ -69,20 +70,27 @@ const IlabCompareComponent = (props) => { +
{isGraphLoading ? (
+ ) : graphDataCopy?.length > 0 && + graphDataCopy?.[0]?.data?.length > 0 ? ( +
+ +
) : ( - graphDataCopy?.length > 0 && ( -
- -
- ) +
No data to compare
)}
diff --git a/frontend/src/components/templates/ILab/IlabExpandedRow.jsx b/frontend/src/components/templates/ILab/IlabExpandedRow.jsx index 7c09fc71..01f008da 100644 --- a/frontend/src/components/templates/ILab/IlabExpandedRow.jsx +++ b/frontend/src/components/templates/ILab/IlabExpandedRow.jsx @@ -102,6 +102,33 @@ const IlabRowContent = (props) => { + {item.iterations.length > 1 && ( + + { + onToggle("bordered-toggle3"); + }} + isExpanded={expanded.includes("bordered-toggle3")} + id="bordered-toggle3" + > + {`Unique parameters for ${item.iterations.length} Iterations`} + + + {item.iterations.map((i) => ( + !(i[0] in item.params) + )} + /> + ))} + + + )}
); }; diff --git a/frontend/src/components/templates/ILab/index.jsx b/frontend/src/components/templates/ILab/index.jsx index 2e446a30..ed362e16 100644 --- a/frontend/src/components/templates/ILab/index.jsx +++ b/frontend/src/components/templates/ILab/index.jsx @@ -108,8 +108,6 @@ const ILab = () => { }; return ( <> - {/* {selectedRuns.length > 1 && } */} - Date: Tue, 15 Oct 2024 11:35:28 -0400 Subject: [PATCH 22/29] Support relative timescale graphs --- backend/app/services/crucible_svc.py | 22 +++++++++++++++++++--- frontend/src/actions/ilabActions.js | 1 + 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/backend/app/services/crucible_svc.py b/backend/app/services/crucible_svc.py index a50632bd..0d374d08 100644 --- a/backend/app/services/crucible_svc.py +++ b/backend/app/services/crucible_svc.py @@ -64,14 +64,21 @@ class GraphList(BaseModel): omitted if all Graph objects specify a run ID. (This is most useful to select a set of graphs all for a single run ID.) + Normally the X axis will be the actual sample timestamp values; if you + specify relative=True, the X axis will be the duration from the first + timestamp of the metric series. This allows graphs of similar runs started + at different times to be overlaid. + Fields: run: Specify the (default) run ID name: Specify a name for the set of graphs + relative: True for relative timescale graphs: a list of Graph objects """ run: Optional[str] = None name: str + relative: bool = False graphs: list[Graph] @@ -1894,10 +1901,19 @@ def get_metrics_graph(self, graphdata: GraphList) -> dict[str, Any]: x = [] y = [] + first = None + for p in sorted(points, key=lambda a: a.begin): - x.extend( - [self._format_timestamp(p.begin), self._format_timestamp(p.end)] - ) + if graphdata.relative: + if not first: + first = p.begin + s = (p.begin - first) / 1000.0 + e = (p.end - first) / 1000.0 + x.extend([s, e]) + else: + x.extend( + [self._format_timestamp(p.begin), self._format_timestamp(p.end)] + ) y.extend([p.value, p.value]) y_max = max(y_max, p.value) diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index 795c0910..49c80888 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -223,6 +223,7 @@ export const fetchMultiGraphData = (uids) => async (dispatch, getState) => { console.log(graphs); const response = await API.post(`/api/v1/ilab/runs/multigraph`, { name: "comparison", + relative: true, graphs, }); if (response.status === 200) { From e25b82e1f71f9c69b83507ff422d3c85adadd660 Mon Sep 17 00:00:00 2001 From: MVarshini Date: Tue, 15 Oct 2024 23:23:39 +0530 Subject: [PATCH 23/29] pagination data --- .../components/templates/ILab/IlabCompareComponent.jsx | 10 ++++++---- frontend/src/components/templates/ILab/index.jsx | 3 +-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx index 80781ef9..c7c61971 100644 --- a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx +++ b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx @@ -18,9 +18,11 @@ import { handleMultiGraph } from "@/actions/ilabActions.js"; import { uid } from "@/utils/helper"; import { useState } from "react"; -const IlabCompareComponent = (props) => { - const { data } = props; - const { page, perPage, totalItems } = useSelector((state) => state.ilab); +const IlabCompareComponent = () => { + // const { data } = props; + const { page, perPage, totalItems, tableData } = useSelector( + (state) => state.ilab + ); const dispatch = useDispatch(); const [selectedItems, setSelectedItems] = useState([]); const { multiGraphData } = useSelector((state) => state.ilab); @@ -55,7 +57,7 @@ const IlabCompareComponent = (props) => { - {data.map((item) => { + {tableData.map((item) => { return ( { const [searchParams] = useSearchParams(); const { - results, start_date, end_date, comparisonSwitch, @@ -118,7 +117,7 @@ const ILab = () => { onSwitchChange={onSwitchChange} /> {comparisonSwitch ? ( - + ) : ( <> From 993a4fe2380c6694a46b797ecbd22c994638882d Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Thu, 17 Oct 2024 10:06:39 -0400 Subject: [PATCH 24/29] Multi run comparison adjustments --- backend/app/main.py | 24 +-------- .../templates/ILab/IlabCompareComponent.jsx | 2 +- .../templates/ILab/IlabExpandedRow.jsx | 54 +++++++++---------- .../src/components/templates/ILab/index.less | 10 ++-- 4 files changed, 35 insertions(+), 55 deletions(-) diff --git a/backend/app/main.py b/backend/app/main.py index 90a41de0..8c72feb8 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -1,7 +1,7 @@ import sys import typing -from fastapi import FastAPI, HTTPException, Request +from fastapi import FastAPI, Request from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import JSONResponse import orjson @@ -21,25 +21,6 @@ def render(self, content: typing.Any) -> bytes: "localhost:3000" ] - -async def report_exceptions(request: Request, e: Exception): - if isinstance(e, HTTPException): - raise - tb = e.__traceback__ - print(f"Unhandled exception {e.__class__.__name__}: {str(e)}") - where = "unknown" - while tb is not None: - where = f"{tb.tb_frame.f_code.co_filename}:{tb.tb_lineno}" - print( - f" {where} {tb.tb_frame.f_code.co_name}", - file=sys.stderr, - ) - tb = tb.tb_next - return JSONResponse( - status_code=500, - content={"detail": f"Unhandled server error at {where}: {str(e)}"}, - ) - app = FastAPI(default_response_class=ORJSONResponse, docs_url="/docs", redoc_url=None, @@ -52,8 +33,7 @@ async def report_exceptions(request: Request, e: Exception): license_info={ "name": "Apache 2.0", "url": "https://www.apache.org/licenses/LICENSE-2.0", - }, - exception_handlers={Exception: report_exceptions}) + }) app.add_middleware( CORSMiddleware, diff --git a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx index c7c61971..12fc1d81 100644 --- a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx +++ b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx @@ -65,7 +65,7 @@ const IlabCompareComponent = () => { itemId={item.id} isSelected={selectedItems.includes(item.id)} > - {item.primary_metrics[0]} + {`${new Date(item.begin_date).toLocaleDateString()} ${item.primary_metrics[0]}`} ); })} diff --git a/frontend/src/components/templates/ILab/IlabExpandedRow.jsx b/frontend/src/components/templates/ILab/IlabExpandedRow.jsx index 01f008da..5c8a1324 100644 --- a/frontend/src/components/templates/ILab/IlabExpandedRow.jsx +++ b/frontend/src/components/templates/ILab/IlabExpandedRow.jsx @@ -77,6 +77,33 @@ const IlabRowContent = (props) => { heading={"Common Parameters"} metadata={Object.entries(item.params)} /> + {item.iterations.length > 1 && ( + + { + onToggle("bordered-toggle3"); + }} + isExpanded={expanded.includes("bordered-toggle3")} + id="bordered-toggle3" + > + {`Unique parameters for ${item.iterations.length} Iterations`} + + + {item.iterations.map((i) => ( + !(i[0] in item.params) + )} + /> + ))} + + + )} @@ -102,33 +129,6 @@ const IlabRowContent = (props) => { - {item.iterations.length > 1 && ( - - { - onToggle("bordered-toggle3"); - }} - isExpanded={expanded.includes("bordered-toggle3")} - id="bordered-toggle3" - > - {`Unique parameters for ${item.iterations.length} Iterations`} - - - {item.iterations.map((i) => ( - !(i[0] in item.params) - )} - /> - ))} - - - )} ); }; diff --git a/frontend/src/components/templates/ILab/index.less b/frontend/src/components/templates/ILab/index.less index 53a26a93..399c6c77 100644 --- a/frontend/src/components/templates/ILab/index.less +++ b/frontend/src/components/templates/ILab/index.less @@ -6,17 +6,17 @@ flex-direction: row; margin-bottom: 1vw; .metadata-card { - flex: 1; /* additionally, equal width */ + flex: 1; /* additionally, equal width */ padding: 1em; margin-right: 1.5vw; } } .comparison-container { - display: flex; + display: flex; width: 100%; height: 80%; .metrics-container { - width: 20%; + width: 40%; padding: 10px; .compare-btn { margin: 2vh 0; @@ -26,7 +26,7 @@ box-shadow: unset; } } - .chart-conatiner { + .chart-container { width: 80%; .js-plotly-plot { width: 100%; @@ -38,4 +38,4 @@ .title { margin-bottom: 2vh; } -} \ No newline at end of file +} From de387386ed7a1c790d395ca5b5e8cde64c6056d0 Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Thu, 17 Oct 2024 15:57:43 -0400 Subject: [PATCH 25/29] A few tweaks Move the legend up off the graph (although 1.5 is arbitrary and maybe not ideal -- is there a more dynamic way to do this). Also, I fixed the name of a `.less` class earlier, but just happened to notice the corresponding use... --- frontend/src/actions/ilabActions.js | 2 +- frontend/src/components/templates/ILab/IlabCompareComponent.jsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index 49c80888..6a073080 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -230,7 +230,7 @@ export const fetchMultiGraphData = (uids) => async (dispatch, getState) => { response.data.layout["showlegend"] = true; response.data.layout["responsive"] = "true"; response.data.layout["autosize"] = "true"; - response.data.layout["legend"] = { x: 0, y: 1 }; + response.data.layout["legend"] = { x: 0, y: 1.5 }; const graphData = []; graphData.push({ data: response.data.data, diff --git a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx index 12fc1d81..161d39f7 100644 --- a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx +++ b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx @@ -79,7 +79,7 @@ const IlabCompareComponent = () => { type={"ilab"} /> -
+
{isGraphLoading ? (
) : graphDataCopy?.length > 0 && From 25fe58f9939a7d9d0adaf3ca9097875ee383c5da Mon Sep 17 00:00:00 2001 From: MVarshini Date: Fri, 18 Oct 2024 20:00:48 +0530 Subject: [PATCH 26/29] closing of graph accordion --- frontend/src/actions/ilabActions.js | 12 ++++ frontend/src/actions/types.js | 1 + .../templates/ILab/IlabExpandedRow.jsx | 71 ++++++++++++++----- .../templates/ILab/MetricsDropdown.jsx | 7 +- .../src/components/templates/ILab/index.jsx | 28 -------- frontend/src/reducers/ilabReducer.js | 3 + 6 files changed, 73 insertions(+), 49 deletions(-) diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index 6a073080..82cee57d 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -293,7 +293,19 @@ export const tableReCalcValues = () => (dispatch, getState) => { const startIdx = page !== 1 ? (page - 1) * perPage : 0; const endIdx = page !== 1 ? page * perPage - 1 : perPage; dispatch(sliceIlabTableRows(startIdx, endIdx)); + dispatch(getMetaRowdId()); +}; + +export const getMetaRowdId = () => (dispatch, getState) => { + const tableData = getState().ilab.tableData; + const metaId = tableData.map((item) => `metadata-toggle-${item.id}`); + dispatch(setMetaRowExpanded(metaId)); }; export const toggleComparisonSwitch = () => ({ type: TYPES.TOGGLE_COMPARISON_SWITCH, }); + +export const setMetaRowExpanded = (expandedItems) => ({ + type: TYPES.SET_EXPANDED_METAROW, + payload: expandedItems, +}); diff --git a/frontend/src/actions/types.js b/frontend/src/actions/types.js index 7afeb6b5..f7e21fec 100644 --- a/frontend/src/actions/types.js +++ b/frontend/src/actions/types.js @@ -91,3 +91,4 @@ export const SET_ILAB_SELECTED_METRICS = "SET_ILAB_SELECTED_METRICS"; export const SET_ILAB_PERIODS = "SET_ILAB_PERIODS"; export const SET_ILAB_INIT_JOBS = "SET_ILAB_INIT_JOBS"; export const TOGGLE_COMPARISON_SWITCH = "TOGGLE_COMPARISON_SWITCH"; +export const SET_EXPANDED_METAROW = "SET_EXPANDED_METAROW"; diff --git a/frontend/src/components/templates/ILab/IlabExpandedRow.jsx b/frontend/src/components/templates/ILab/IlabExpandedRow.jsx index 5c8a1324..68a8cfa5 100644 --- a/frontend/src/components/templates/ILab/IlabExpandedRow.jsx +++ b/frontend/src/components/templates/ILab/IlabExpandedRow.jsx @@ -6,45 +6,48 @@ import { Card, CardBody, } from "@patternfly/react-core"; +import { useDispatch, useSelector } from "react-redux"; import ILabGraph from "./ILabGraph"; import MetaRow from "./MetaRow"; import MetricsSelect from "./MetricsDropdown"; import PropTypes from "prop-types"; +import { setMetaRowExpanded } from "@/actions/ilabActions"; import { uid } from "@/utils/helper"; -import { useState } from "react"; const IlabRowContent = (props) => { const { item } = props; - const [expanded, setAccExpanded] = useState(["bordered-toggle1"]); + const dispatch = useDispatch(); + const { metaRowExpanded } = useSelector((state) => state.ilab); + const onToggle = (id) => { - const index = expanded.indexOf(id); + const index = metaRowExpanded.indexOf(id); const newExpanded = index >= 0 ? [ - ...expanded.slice(0, index), - ...expanded.slice(index + 1, expanded.length), + ...metaRowExpanded.slice(0, index), + ...metaRowExpanded.slice(index + 1, metaRowExpanded.length), ] - : [...expanded, id]; - setAccExpanded(newExpanded); - }; + : [...metaRowExpanded, id]; + dispatch(setMetaRowExpanded(newExpanded)); + }; return ( { - onToggle("bordered-toggle1"); + onToggle(`metadata-toggle-${item.id}`); }} - isExpanded={expanded.includes("bordered-toggle1")} - id="bordered-toggle1" + isExpanded={metaRowExpanded.includes(`metadata-toggle-${item.id}`)} + id={`metadata-toggle-${item.id}`} > Metadata
@@ -112,23 +115,53 @@ const IlabRowContent = (props) => { { - onToggle("bordered-toggle2"); + onToggle(`graph-toggle-${item.id}`); }} - isExpanded={expanded.includes("bordered-toggle2")} - id="bordered-toggle2" + isExpanded={metaRowExpanded.includes(`graph-toggle-${item.id}`)} + id={`graph-toggle-${item.id}`} > Metrics & Graph - Metrics: +
Metrics:
+
+ {item.iterations.length > 1 && ( + + { + onToggle(`iterations-toggle-${item.id}`); + }} + isExpanded={metaRowExpanded.includes( + `iterations-toggle-${item.id}` + )} + id={`iterations-toggle-${item.id}`} + > + {`Unique parameters for ${item.iterations.length} Iterations`} + + + {item.iterations.map((i) => ( + !(i[0] in item.params) + )} + /> + ))} + + + )} ); }; diff --git a/frontend/src/components/templates/ILab/MetricsDropdown.jsx b/frontend/src/components/templates/ILab/MetricsDropdown.jsx index 2cdebb79..04568f0f 100644 --- a/frontend/src/components/templates/ILab/MetricsDropdown.jsx +++ b/frontend/src/components/templates/ILab/MetricsDropdown.jsx @@ -3,6 +3,7 @@ import { Select, SelectList, SelectOption, + Skeleton } from "@patternfly/react-core"; import { fetchGraphData, setSelectedMetrics } from "@/actions/ilabActions"; import { useDispatch, useSelector } from "react-redux"; @@ -58,7 +59,7 @@ const MetricsSelect = (props) => { /* Metrics select */ return ( <> - {hasMetricsData(item.id) && ( + {hasMetricsData(item.id) ? ( - )} + ): + + } ); }; diff --git a/frontend/src/components/templates/ILab/index.jsx b/frontend/src/components/templates/ILab/index.jsx index 3c4fc102..d728b44a 100644 --- a/frontend/src/components/templates/ILab/index.jsx +++ b/frontend/src/components/templates/ILab/index.jsx @@ -21,7 +21,6 @@ import { useDispatch, useSelector } from "react-redux"; import { useEffect, useState } from "react"; import { useNavigate, useSearchParams } from "react-router-dom"; -// import { Button } from "@patternfly/react-core"; import IlabCompareComponent from "./IlabCompareComponent"; import IlabRowContent from "./IlabExpandedRow"; import RenderPagination from "@/components/organisms/Pagination"; @@ -58,18 +57,6 @@ const ILab = () => { } }; - // const [selectedRuns, setSelectedRuns] = useState([]); - // const setRunSelected = (run, isSelecting = true) => - // setSelectedRuns((prevSelected) => { - // console.log(`Adding ${run}, ${isSelecting}`); - // const others = prevSelected.filter((r) => r != run.id); - // return isSelecting ? [...others, run.id] : others; - // }); - // const selectAllRuns = (isSelecting = true) => - // setSelectedRuns(isSelecting ? results.map((r) => r.id) : []); - // const areAllRunsSelected = selectedRuns.length === results.length; - // const isRunSelected = (run) => selectedRuns.includes(run.id); - useEffect(() => { if (searchParams.size > 0) { // date filter is set apart @@ -123,13 +110,6 @@ const ILab = () => {
- {/* @@ -141,14 +121,6 @@ const ILab = () => { {tableData.map((item, rowIndex) => ( <> - {/*
selectAllRuns(isSelecting), - isSelected: areAllRunsSelected, - }} - /> */} {columnNames.metric} {columnNames.begin_date}
- setRunSelected(item, isSelecting), - isSelected: isRunSelected(item), - }} - /> */} { const { type, payload } = action; @@ -59,6 +60,8 @@ const ILabReducer = (state = initialState, action = {}) => { return { ...state, multiGraphData: payload }; case TYPES.TOGGLE_COMPARISON_SWITCH: return { ...state, comparisonSwitch: !state.comparisonSwitch }; + case TYPES.SET_EXPANDED_METAROW: + return { ...state, metaRowExpanded: payload }; default: return state; } From eed488f80370f6ac2dc36833ab9044899a26cddf Mon Sep 17 00:00:00 2001 From: David Butenhof Date: Fri, 18 Oct 2024 11:42:22 -0400 Subject: [PATCH 27/29] Adjustments Move "unique parameters" accordion back up with parameters using the new expansion toggles. --- .../templates/ILab/IlabExpandedRow.jsx | 39 +++---------------- 1 file changed, 5 insertions(+), 34 deletions(-) diff --git a/frontend/src/components/templates/ILab/IlabExpandedRow.jsx b/frontend/src/components/templates/ILab/IlabExpandedRow.jsx index 68a8cfa5..41bbe67b 100644 --- a/frontend/src/components/templates/ILab/IlabExpandedRow.jsx +++ b/frontend/src/components/templates/ILab/IlabExpandedRow.jsx @@ -84,16 +84,16 @@ const IlabRowContent = (props) => { { - onToggle("bordered-toggle3"); + onToggle(`iterations-toggle-${item.id}`); }} - isExpanded={expanded.includes("bordered-toggle3")} - id="bordered-toggle3" + isExpanded={metaRowExpanded.includes(`iterations-toggle-${item.id}`)} + id={`iterations-toggle-${item.id}`} > {`Unique parameters for ${item.iterations.length} Iterations`} {item.iterations.map((i) => ( { - {item.iterations.length > 1 && ( - - { - onToggle(`iterations-toggle-${item.id}`); - }} - isExpanded={metaRowExpanded.includes( - `iterations-toggle-${item.id}` - )} - id={`iterations-toggle-${item.id}`} - > - {`Unique parameters for ${item.iterations.length} Iterations`} - - - {item.iterations.map((i) => ( - !(i[0] in item.params) - )} - /> - ))} - - - )} ); }; From 606f468c5822765cf4f299b82f5f8cbbdb432a26 Mon Sep 17 00:00:00 2001 From: MVarshini Date: Fri, 18 Oct 2024 22:09:53 +0530 Subject: [PATCH 28/29] conflict resolve --- .../components/templates/ILab/IlabExpandedRow.jsx | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/templates/ILab/IlabExpandedRow.jsx b/frontend/src/components/templates/ILab/IlabExpandedRow.jsx index 41bbe67b..bcdbcc33 100644 --- a/frontend/src/components/templates/ILab/IlabExpandedRow.jsx +++ b/frontend/src/components/templates/ILab/IlabExpandedRow.jsx @@ -86,14 +86,20 @@ const IlabRowContent = (props) => { onClick={() => { onToggle(`iterations-toggle-${item.id}`); }} - isExpanded={metaRowExpanded.includes(`iterations-toggle-${item.id}`)} + isExpanded={metaRowExpanded.includes( + `iterations-toggle-${item.id}` + )} id={`iterations-toggle-${item.id}`} > {`Unique parameters for ${item.iterations.length} Iterations`} {item.iterations.map((i) => ( Date: Fri, 18 Oct 2024 23:09:51 +0530 Subject: [PATCH 29/29] multiple APIs to fetch periods and icon to display more info --- frontend/src/actions/ilabActions.js | 10 +++++----- .../templates/ILab/IlabCompareComponent.jsx | 14 +++++++++++++- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/frontend/src/actions/ilabActions.js b/frontend/src/actions/ilabActions.js index 82cee57d..c3782735 100644 --- a/frontend/src/actions/ilabActions.js +++ b/frontend/src/actions/ilabActions.js @@ -175,14 +175,14 @@ export const fetchGraphData = export const handleMultiGraph = (uids) => async (dispatch, getState) => { try { const periods = getState().ilab.periods; + const pUids = periods.map((i) => i.uid); - const missingPeriods = periods.filter((item) => !uids.includes(item.uid)); - - const missingUids = - periods.length > 0 ? missingPeriods.map((item) => item.uid) : uids; + const missingPeriods = uids.filter(function (x) { + return pUids.indexOf(x) < 0; + }); await Promise.all( - missingUids.map(async (uid) => { + missingPeriods.map(async (uid) => { await dispatch(fetchPeriods(uid)); // Dispatch each item }) ); diff --git a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx index 161d39f7..c062be13 100644 --- a/frontend/src/components/templates/ILab/IlabCompareComponent.jsx +++ b/frontend/src/components/templates/ILab/IlabCompareComponent.jsx @@ -5,11 +5,13 @@ import { Menu, MenuContent, MenuItem, + MenuItemAction, MenuList, Title, } from "@patternfly/react-core"; import { useDispatch, useSelector } from "react-redux"; +import { InfoCircleIcon } from "@patternfly/react-icons"; import Plot from "react-plotly.js"; import PropTypes from "prop-types"; import RenderPagination from "@/components/organisms/Pagination"; @@ -64,8 +66,18 @@ const IlabCompareComponent = () => { hasCheckbox itemId={item.id} isSelected={selectedItems.includes(item.id)} + actions={ + } + actionId="code" + onClick={() => console.log("clicked on code icon")} + aria-label="Code" + /> + } > - {`${new Date(item.begin_date).toLocaleDateString()} ${item.primary_metrics[0]}`} + {`${new Date(item.begin_date).toLocaleDateString()} ${ + item.primary_metrics[0] + }`} ); })}