Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Project access refactoring #481

Merged
merged 16 commits into from
Sep 12, 2024
Merged
6 changes: 3 additions & 3 deletions examples/upload_and_predict_from_numpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def rotate_image(image: np.ndarray, angle: float) -> np.ndarray:
rotated_image = rotate_image(image=numpy_image, angle=20)

# Make sure that the project exists
ensure_trained_example_project(geti=geti, project_name=PROJECT_NAME)
project = ensure_trained_example_project(geti=geti, project_name=PROJECT_NAME)

print(
"Uploading and predicting example image now... The prediction results will be "
Expand All @@ -71,7 +71,7 @@ def rotate_image(image: np.ndarray, angle: float) -> np.ndarray:

# We can upload and predict the resulting array directly:
sc_image, image_prediction = geti.upload_and_predict_image(
project_name=PROJECT_NAME,
project=project,
image=rotated_image,
visualise_output=False,
delete_after_prediction=DELETE_AFTER_PREDICTION,
Expand Down Expand Up @@ -100,7 +100,7 @@ def rotate_image(image: np.ndarray, angle: float) -> np.ndarray:
print("Video generated, retrieving predictions...")
# Create video, upload and predict from the list of frames
sc_video, video_frames, frame_predictions = geti.upload_and_predict_video(
project_name=PROJECT_NAME,
project=project,
video=rotation_video,
frame_stride=1,
visualise_output=False,
Expand Down
4 changes: 2 additions & 2 deletions examples/upload_and_predict_media_from_folder.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@
# --------------------------------------------------

# Make sure that the specified project exists on the server
ensure_trained_example_project(geti=geti, project_name=PROJECT_NAME)
project = ensure_trained_example_project(geti=geti, project_name=PROJECT_NAME)

# Upload the media in the folder and generate predictions
geti.upload_and_predict_media_folder(
project_name=PROJECT_NAME,
project=project,
media_folder=FOLDER_WITH_MEDIA,
delete_after_prediction=DELETE_AFTER_PREDICTION,
output_folder=OUTPUT_FOLDER,
Expand Down
13 changes: 5 additions & 8 deletions geti_sdk/benchmarking/benchmarker.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ class Benchmarker:
def __init__(
self,
geti: Geti,
project: Union[str, Project],
project: Project,
precision_levels: Optional[Sequence[str]] = None,
models: Optional[Sequence[Model]] = None,
algorithms: Optional[Sequence[str]] = None,
Expand Down Expand Up @@ -83,7 +83,7 @@ def __init__(
be called after initialization.

:param geti: Geti instance on which the project to use for benchmarking lives
:param project: Project or project name to use for the benchmarking. The
:param project: Project to use for the benchmarking. The
project must exist on the specified Geti instance
:param precision_levels: List of model precision levels to run the
benchmarking for. Throughput will be measured for each precision level
Expand Down Expand Up @@ -111,11 +111,8 @@ def __init__(
on.
"""
self.geti = geti
if isinstance(project, str):
project_name = project
else:
project_name = project.name
self.project = geti.get_project(project_name)
# Update project object to get the latest project details
self.project = self.geti.get_project(project_id=project.id)
logging.info(
f"Setting up Benchmarker for Intel® Geti™ project `{self.project.name}`."
)
Expand Down Expand Up @@ -501,7 +498,7 @@ def prepare_benchmark(self, working_directory: os.PathLike = "."):
output_folder = os.path.join(working_directory, f"deployment_{index}")
with suppress_log_output():
self.geti.deploy_project(
project_name=self.project.name,
project=self.project,
output_folder=output_folder,
models=opt_models,
)
Expand Down
103 changes: 45 additions & 58 deletions geti_sdk/geti.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,19 +252,28 @@ def credit_balance(self) -> Optional[int]:
return balance.available if balance is not None else None

def get_project(
self, project_name: str, project_id: Optional[str] = None
self,
project_name: Optional[str] = None,
project_id: Optional[str] = None,
project: Optional[Project] = None,
) -> Project:
"""
Return the Intel® Geti™ project named `project_name`, if any. If no project by
that name is found on the Intel® Geti™ server, this method will raise a
KeyError.

:param project_name: Name of the project to retrieve
:raises: KeyError if project named `project_name` is not found on the server
:return: Project identified by `project_name`
Return the Intel® Geti™ project by name or ID, if any.
If a project object is passed, the method will return the updated object.
If no project by that name is found on the Intel® Geti™ server,
this method will raise a KeyError.

:param project_name: Name of the project to retrieve.
:param project_id: ID of the project to retrieve. If not specified, the
project with name `project_name` will be retrieved.
:param project: Project object to update. If provided, the associated `project_id`
will be used to update the project object.
:raises: KeyError if the project identified by one of the arguments is not found on the server
:raises: ValueError if there are several projects on the server named `project_name`
:return: Project identified by one of the arguments.
"""
project = self.project_client.get_project_by_name(
project_name=project_name, project_id=project_id
project = self.project_client.get_project(
project_name=project_name, project_id=project_id, project=project
)
if project is None:
raise KeyError(
Expand All @@ -275,8 +284,7 @@ def get_project(

def download_project_data(
self,
project_name: str,
project_id: Optional[str] = None,
project: Project,
target_folder: Optional[str] = None,
include_predictions: bool = False,
include_active_models: bool = False,
Expand Down Expand Up @@ -332,7 +340,7 @@ def download_project_data(
Downloading a project may take a substantial amount of time if the project
dataset is large.

:param project_name: Name of the project to download
:param project: Project object to download
:param target_folder: Path to the local folder in which the project data
should be saved. If not specified, a new directory will be created inside
the current working directory. The name of the resulting directory will be
Expand All @@ -354,7 +362,7 @@ def download_project_data(
regarding the downloaded project
"""
project = self.import_export_module.download_project_data(
project=self.get_project(project_name=project_name, project_id=project_id),
project=project,
target_folder=target_folder,
include_predictions=include_predictions,
include_active_models=include_active_models,
Expand All @@ -363,7 +371,7 @@ def download_project_data(
# Download deployment
if include_deployment:
logging.info("Creating deployment for project...")
self.deploy_project(project.name, output_folder=target_folder)
self.deploy_project(project, output_folder=target_folder)

logging.info(f"Project '{project.name}' was downloaded successfully.")
return project
Expand Down Expand Up @@ -459,28 +467,23 @@ def upload_all_projects(self, target_folder: str) -> List[Project]:
def export_project(
self,
filepath: os.PathLike,
project_name: str,
project_id: Optional[str] = None,
project: Project,
) -> None:
"""
Export a project with name `project_name` to the file specified by `filepath`.
The project will be saved in a .zip file format, containing all project data
and metadata required for project import to another instance of the Intel® Geti™ platform.

:param filepath: Path to the file to save the project to
:param project_name: Name of the project to export
:param project_id: Optional ID of the project to export. If not specified, the
project with name `project_name` will be exported.
:param project: Project object to export
"""
if project_id is None:
project_id = self.get_project(project_name=project_name).id
if project_id is None:
if project.id is None:
raise ValueError(
f"Could not retrieve project ID for project '{project_name}'."
"Please specify the project ID explicitly."
f"Could not retrieve project ID for project '{project.name}'."
"Please reinitialize the project object."
)
self.import_export_module.export_project(
project_id=project_id, filepath=filepath
project_id=project.id, filepath=filepath
)

def import_project(
Expand Down Expand Up @@ -523,7 +526,7 @@ def export_dataset(
in the dataset, False to only include media with annotations. Defaults to
False.
"""
if type(export_format) is str:
if isinstance(export_format, str):
export_format = DatasetFormat[export_format]
self.import_export_module.export_dataset(
project=project,
Expand Down Expand Up @@ -858,7 +861,7 @@ def create_task_chain_project_from_dataset(

def upload_and_predict_media_folder(
self,
project_name: str,
project: Project,
media_folder: str,
output_folder: Optional[str] = None,
delete_after_prediction: bool = False,
Expand All @@ -867,7 +870,7 @@ def upload_and_predict_media_folder(
) -> bool:
"""
Upload a folder with media (images, videos or both) from local disk at path
`target_folder` to the project with name `project_name` on the Intel® Geti™
`target_folder` to the project provided with the `project` argument on the Intel® Geti™
server.
After the media upload is complete, predictions will be downloaded for all
media in the folder. This method will create a 'predictions' directory in
Expand All @@ -877,7 +880,7 @@ def upload_and_predict_media_folder(
removed from the project on the Intel® Geti™ server after the predictions have
been downloaded.

:param project_name: Name of the project to upload media to
:param project: Project object to upload the media to
:param media_folder: Path to the folder to upload media from
:param output_folder: Path to save the predictions to. If not specified, this
method will create a folder named '<media_folder_name>_predictions' on
Expand All @@ -892,16 +895,6 @@ def upload_and_predict_media_folder(
:return: True if all media was uploaded, and predictions for all media were
successfully downloaded. False otherwise
"""
# Obtain project details from cluster
try:
project = self.get_project(project_name=project_name)
except ValueError:
logging.info(
f"Project '{project_name}' was not found on the cluster. Aborting "
f"media upload."
)
return False

# Upload images
image_client = ImageClient(
session=self.session, workspace_id=self.workspace_id, project=project
Expand All @@ -927,7 +920,7 @@ def upload_and_predict_media_folder(
)
if not prediction_client.ready_to_predict:
logging.info(
f"Project '{project_name}' is not ready to make predictions, likely "
f"Project '{project.name}' is not ready to make predictions, likely "
f"because one of the tasks in the task chain does not have a "
f"trained model yet. Aborting prediction."
)
Expand Down Expand Up @@ -965,17 +958,17 @@ def upload_and_predict_media_folder(

def upload_and_predict_image(
self,
project_name: str,
project: Project,
image: Union[np.ndarray, Image, VideoFrame, str, os.PathLike],
visualise_output: bool = True,
delete_after_prediction: bool = False,
dataset_name: Optional[str] = None,
) -> Tuple[Image, Prediction]:
"""
Upload a single image to a project named `project_name` on the Intel® Geti™
Upload a single image to a project on the Intel® Geti™
server, and return a prediction for it.

:param project_name: Name of the project to upload the image to
:param project: Project object to upload the image to
:param image: Image, numpy array representing an image, or filepath to an
image to upload and get a prediction for
:param visualise_output: True to show the resulting prediction, overlayed on
Expand All @@ -989,8 +982,6 @@ def upload_and_predict_image(
- Image object representing the image that was uploaded
- Prediction for the image
"""
project = self.get_project(project_name=project_name)

# Get the dataset to upload to
dataset: Optional[Dataset] = None
if dataset_name is not None:
Expand Down Expand Up @@ -1030,7 +1021,7 @@ def upload_and_predict_image(
)
if not prediction_client.ready_to_predict:
raise ValueError(
f"Project '{project_name}' is not ready to make predictions. At least "
f"Project '{project.name}' is not ready to make predictions. At least "
f"one of the tasks in the task chain does not have any models trained."
)
prediction = prediction_client.get_image_prediction(uploaded_image)
Expand All @@ -1048,21 +1039,21 @@ def upload_and_predict_image(

def upload_and_predict_video(
self,
project_name: str,
project: Project,
video: Union[Video, str, os.PathLike, Union[Sequence[np.ndarray], np.ndarray]],
frame_stride: Optional[int] = None,
visualise_output: bool = True,
delete_after_prediction: bool = False,
) -> Tuple[Video, MediaList[VideoFrame], List[Prediction]]:
"""
Upload a single video to a project named `project_name` on the Intel® Geti™
Upload a single video to a project on the Intel® Geti™
server, and return a list of predictions for the frames in the video.

The parameter 'frame_stride' is used to control the stride for frame
extraction. Predictions are only generated for the extracted frames. So to
get predictions for all frames, `frame_stride=1` can be passed.

:param project_name: Name of the project to upload the image to
:param project: Project to upload the video to
:param video: Video or filepath to a video to upload and get predictions for.
Can also be a 4D numpy array or a list of 3D numpy arrays, shaped such
that the array dimensions represent `frames x width x height x channels`,
Expand All @@ -1081,8 +1072,6 @@ def upload_and_predict_video(
have been generated
- List of Predictions for the Video
"""
project = self.get_project(project_name=project_name)

# Upload the video
video_client = VideoClient(
session=self.session, workspace_id=self.workspace_id, project=project
Expand All @@ -1105,7 +1094,7 @@ def upload_and_predict_video(
else:
video_data = video
if needs_upload:
logging.info(f"Uploading video to project '{project_name}'...")
logging.info(f"Uploading video to project '{project.name}'...")
uploaded_video = video_client.upload_video(video=video_data)
else:
uploaded_video = video
Expand All @@ -1116,7 +1105,7 @@ def upload_and_predict_video(
)
if not prediction_client.ready_to_predict:
raise ValueError(
f"Project '{project_name}' is not ready to make predictions. At least "
f"Project '{project.name}' is not ready to make predictions. At least "
f"one of the tasks in the task chain does not have any models trained."
)
if frame_stride is None:
Expand All @@ -1141,7 +1130,7 @@ def upload_and_predict_video(

def deploy_project(
self,
project_name: str,
project: Project,
output_folder: Optional[Union[str, os.PathLike]] = None,
models: Optional[Sequence[BaseModel]] = None,
enable_explainable_ai: bool = False,
Expand All @@ -1156,7 +1145,7 @@ def deploy_project(
for each task in the project. However, it is possible to specify a particular
model to use, by passing it in the list of `models` as input to this method.

:param project_name: Name of the project to deploy
:param project: Project object to deploy
:param output_folder: Path to a folder on local disk to which the Deployment
should be downloaded. If no path is specified, the deployment will not be
saved.
Expand All @@ -1174,8 +1163,6 @@ def deploy_project(
launch an OVMS container serving the models.
:return: Deployment for the project
"""
project = self.get_project(project_name=project_name)

deployment_client = self._deployment_clients.get(project.id, None)
if deployment_client is None:
# Create deployment client and add to cache.
Expand Down
Loading
Loading