Skip to content

Commit

Permalink
Implement all image processing with gcp files & updated task table to…
Browse files Browse the repository at this point in the history
… maintain task areas and so on. (#416)

* feat: update the gcp files for all project images

* feat: update the new fields in tasks tables

* feat: update task areas from  db to instead  of postgis

* feat: update task flight time, flight distance & task areas

* fix: import errors in project routes

* fix: waypoints & waylines counts

* fix: only get unique task id based on task events when all image processing..

* fix: issues resolved in user task out lists in dashboard

* fixup! fix: issues resolved in user task out lists in dashboard

* feat: update assests_url in task tables instead of searching in s3

* fix: run pre-commit for format migartions file

* fix: process assests from odm, download issues

* feat: update the gcp files for all project images

* feat: update the new fields in tasks tables

* feat: update task areas from  db to instead  of postgis

* feat: update task flight time, flight distance & task areas

* fix: import errors in project routes

* fix: waypoints & waylines counts

* fix: only get unique task id based on task events when all image processing..

* fix: issues resolved in user task out lists in dashboard

* fixup! fix: issues resolved in user task out lists in dashboard

* feat: update assests_url in task tables instead of searching in s3

* fix: run pre-commit for format migartions file

* fix: process assests from odm, download issues

* feat: add dem file on task split api payload

* fix: dem data upload section is on view althoiugh the terrian follow option is false

* fix: projection creation fail if no fly is []

remove no flyzone key if np fly zone data is not available

* feat: remove unused api service `getAllAssetsUrl`

* feat: add remove project assets api fetch and display data from project description api

* feat: add action and slice for storing assets information of task

* feat(task-description-map-section): remove extra call for task information and use data from redux state

* feat: remove task-assets-information api and display data from task description api

store asests info on redux state on api call success and remove on component unmount

update keys as per data information

* refactor: comment task assets information services

* refactor: remove comment

* feat: implement dummy api for upload the task table

* fix: remove flight data from waypoints routes

---------

Co-authored-by: Sujit <[email protected]>
  • Loading branch information
Pradip-p and suzit-10 authored Dec 31, 2024
1 parent aefebf7 commit e6b2759
Show file tree
Hide file tree
Showing 29 changed files with 875 additions and 290 deletions.
4 changes: 2 additions & 2 deletions src/backend/app/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from pydantic_settings import BaseSettings
from typing import Annotated, Optional, Union, Any
from pydantic.networks import HttpUrl, PostgresDsn

from loguru import logger as log

HttpUrlStr = Annotated[
str,
Expand Down Expand Up @@ -122,7 +122,7 @@ def get_settings():
"""Cache settings when accessed throughout app."""
_settings = Settings()
if _settings.DEBUG:
print(f"Loaded settings: {_settings.model_dump()}")
log.info(f"Loaded settings: {_settings.model_dump()}")
return _settings


Expand Down
7 changes: 7 additions & 0 deletions src/backend/app/db/db_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,13 @@ class DbTask(Base):
take_off_point = cast(
WKBElement, Column(Geometry("POINT", srid=4326), nullable=True)
)
total_area_sqkm = cast(float, Column(Float, nullable=True))
flight_time_minutes = cast(int, Column(Float, nullable=True))
flight_distance_km = cast(float, Column(Float, nullable=True))
total_image_uploaded = cast(int, Column(SmallInteger, nullable=True))
assets_url = cast(
str, Column(String, nullable=True)
) # download link for assets of images(orthophoto)


class DbProject(Base):
Expand Down
12 changes: 6 additions & 6 deletions src/backend/app/gcp/gcp_routes.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import uuid
from app.config import settings
from app.projects import project_schemas
from fastapi import APIRouter, Depends
from app.waypoints import waypoint_schemas
from app.gcp import gcp_crud
Expand All @@ -21,15 +22,15 @@
async def find_images(
project_id: uuid.UUID,
task_id: uuid.UUID,
db: Annotated[Connection, Depends(database.get_db)],
point: waypoint_schemas.PointField = None,
) -> List[str]:
"""Find images that contain a specified point."""

fov_degree = 82.1 # For DJI Mini 4 Pro
altitude = 100 # TODO: Get this from db

result = await project_schemas.DbProject.one(db, project_id)
return await gcp_crud.find_images_in_a_task_for_point(
project_id, task_id, point, fov_degree, altitude
project_id, task_id, point, fov_degree, result.altitude
)


Expand All @@ -42,11 +43,10 @@ async def find_images_for_a_project(
"""Find images that contain a specified point in a project."""

fov_degree = 82.1 # For DJI Mini 4 Pro
altitude = 100 # TODO: Get this from db

result = await project_schemas.DbProject.one(db, project_id)
# Get all task IDs for the project from database
task_id_list = await list_task_id_for_project(db, project_id)

return await gcp_crud.find_images_in_a_project_for_point(
project_id, task_id_list, point, fov_degree, altitude
project_id, task_id_list, point, fov_degree, result.altitude
)
79 changes: 79 additions & 0 deletions src/backend/app/migrations/versions/b18103ac4ab7_.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
"""
Revision ID: b18103ac4ab7
Revises: e23c05f21542
Create Date: 2024-12-30 11:36:29.762485
"""

from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision: str = "b18103ac4ab7"
down_revision: Union[str, None] = "e23c05f21542"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column(
"task_events",
"state",
existing_type=postgresql.ENUM(
"REQUEST_FOR_MAPPING",
"UNLOCKED_TO_MAP",
"LOCKED_FOR_MAPPING",
"UNLOCKED_TO_VALIDATE",
"LOCKED_FOR_VALIDATION",
"UNLOCKED_DONE",
"UNFLYABLE_TASK",
"IMAGE_UPLOADED",
"IMAGE_PROCESSING_FAILED",
"IMAGE_PROCESSING_STARTED",
"IMAGE_PROCESSING_FINISHED",
name="state",
),
nullable=False,
)
op.add_column("tasks", sa.Column("total_area_sqkm", sa.Float(), nullable=True))
op.add_column("tasks", sa.Column("flight_time_minutes", sa.Float(), nullable=True))
op.add_column("tasks", sa.Column("flight_distance_km", sa.Float(), nullable=True))
op.add_column(
"tasks", sa.Column("total_image_uploaded", sa.SmallInteger(), nullable=True)
)
op.add_column("tasks", sa.Column("assets_url", sa.String(), nullable=True))
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("tasks", "assets_url")
op.drop_column("tasks", "total_image_uploaded")
op.drop_column("tasks", "flight_distance_km")
op.drop_column("tasks", "flight_time_minutes")
op.drop_column("tasks", "total_area_sqkm")
op.alter_column(
"task_events",
"state",
existing_type=postgresql.ENUM(
"REQUEST_FOR_MAPPING",
"UNLOCKED_TO_MAP",
"LOCKED_FOR_MAPPING",
"UNLOCKED_TO_VALIDATE",
"LOCKED_FOR_VALIDATION",
"UNLOCKED_DONE",
"UNFLYABLE_TASK",
"IMAGE_UPLOADED",
"IMAGE_PROCESSING_FAILED",
"IMAGE_PROCESSING_STARTED",
"IMAGE_PROCESSING_FINISHED",
name="state",
),
nullable=True,
)
# ### end Alembic commands ###
2 changes: 1 addition & 1 deletion src/backend/app/models/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ class DroneType(IntEnum):
DJI_MINI_4_PRO = 1


class UserRole(IntEnum, Enum):
class UserRole(int, Enum):
PROJECT_CREATOR = 1
DRONE_PILOT = 2
REGULATOR = 3
Expand Down
44 changes: 34 additions & 10 deletions src/backend/app/projects/image_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from app.models.enums import State
from app.utils import timestamp
from app.db import database
from app.projects import project_logic
from pyodm import Node
from app.s3 import get_file_from_bucket, list_objects_from_bucket, add_file_to_bucket
from loguru import logger as log
Expand Down Expand Up @@ -165,6 +166,13 @@ async def _process_images(
self.download_images_from_s3(bucket_name, temp_dir, self.task_id)
images_list = self.list_images(temp_dir)
else:
gcp_list_file = f"dtm-data/projects/{self.project_id}/gcp/gcp_list.txt"
gcp_file_path = os.path.join(temp_dir, "gcp_list.txt")

# Check and add the GCP file to the images list if it exists
if get_file_from_bucket(bucket_name, gcp_list_file, gcp_file_path):
images_list.append(gcp_file_path)

for task_id in self.task_ids:
self.download_images_from_s3(bucket_name, temp_dir, task_id)
images_list.extend(self.list_images(temp_dir))
Expand Down Expand Up @@ -355,13 +363,19 @@ async def process_assets_from_odm(
"""
log.info(f"Starting processing for project {dtm_project_id}")
node = Node.from_url(node_odm_url)
output_file_path = f"/tmp/{dtm_project_id}"
output_file_path = f"/tmp/{uuid.uuid4()}"

try:
os.makedirs(output_file_path, exist_ok=True)
task = node.get_task(odm_task_id)
log.info(f"Downloading results for task {dtm_project_id} to {output_file_path}")
log.info(f"Downloading results for task {odm_task_id} to {output_file_path}")

assets_path = task.download_zip(output_file_path)
if not os.path.exists(assets_path):
log.error(f"Downloaded file not found: {assets_path}")
raise
log.info(f"Successfully downloaded ZIP to {assets_path}")

s3_path = f"dtm-data/projects/{dtm_project_id}/{dtm_task_id if dtm_task_id else ''}/assets.zip".strip(
"/"
)
Expand All @@ -387,14 +401,16 @@ async def process_assets_from_odm(
add_file_to_bucket(settings.S3_BUCKET_NAME, orthophoto_path, s3_ortho_path)

images_json_path = os.path.join(output_file_path, "images.json")
s3_images_json_path = f"dtm-data/projects/{dtm_project_id}/{dtm_task_id if dtm_task_id else ''}/images.json".strip(
"/"
)

log.info(f"Uploading images.json to S3 path: {s3_images_json_path}")
add_file_to_bucket(
settings.S3_BUCKET_NAME, images_json_path, s3_images_json_path
)
if os.path.exists(images_json_path):
s3_images_json_path = f"dtm-data/projects/{dtm_project_id}/{dtm_task_id if dtm_task_id else ''}/images.json".strip(
"/"
)
log.info(f"Uploading images.json to S3 path: {s3_images_json_path}")
add_file_to_bucket(
settings.S3_BUCKET_NAME, images_json_path, s3_images_json_path
)
else:
log.warning(f"images.json not found in {output_file_path}")

log.info(f"Processing complete for project {dtm_project_id}")

Expand All @@ -418,6 +434,14 @@ async def process_assets_from_odm(
f"Task {dtm_task_id} state updated to IMAGE_PROCESSING_FINISHED in the database."
)

s3_path_url = (
f"dtm-data/projects/{dtm_project_id}/{dtm_task_id}/assets.zip"
)
# update the task table
await project_logic.update_task_field(
conn, dtm_project_id, dtm_task_id, "assets_url", s3_path_url
)

except Exception as e:
log.error(f"Error during processing for project {dtm_project_id}: {e}")

Expand Down
Loading

0 comments on commit e6b2759

Please sign in to comment.