diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 9279822fa10..b0fbcedabbe 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -3,19 +3,15 @@ package controllers import org.apache.pekko.util.Timeout import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} -import com.scalableminds.util.geometry.BoundingBox import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType import com.scalableminds.webknossos.datastore.models.annotation.{ AnnotationLayer, AnnotationLayerStatistics, AnnotationLayerType } -import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis -import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.tracingstore.tracings.volume.MagRestrictions -import com.scalableminds.webknossos.tracingstore.tracings.{TracingIds, TracingType} +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters +import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingType} import mail.{MailchimpClient, MailchimpTag} import models.analytics.{AnalyticsService, CreateAnnotationEvent, OpenAnnotationEvent} import models.annotation.AnnotationState.Cancelled @@ -27,9 +23,7 @@ import models.task.TaskDAO import models.team.{TeamDAO, TeamService} import models.user.time._ import models.user.{User, UserDAO, UserService} -import net.liftweb.common.Box import play.api.i18n.{Messages, MessagesProvider} -import play.api.libs.json.Json.WithDefaultValues import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{URLSharing, UserAwareRequestLogging, WkEnv} @@ -40,18 +34,6 @@ import javax.inject.Inject import scala.concurrent.ExecutionContext import scala.concurrent.duration._ -case class AnnotationLayerParameters(typ: AnnotationLayerType, - fallbackLayerName: Option[String], - autoFallbackLayer: Boolean = false, - mappingName: Option[String] = None, - magRestrictions: Option[MagRestrictions], - name: Option[String], - additionalAxes: Option[Seq[AdditionalAxis]]) -object AnnotationLayerParameters { - implicit val jsonFormat: OFormat[AnnotationLayerParameters] = - Json.using[WithDefaultValues].format[AnnotationLayerParameters] -} - class AnnotationController @Inject()( annotationDAO: AnnotationDAO, annotationLayerDAO: AnnotationLayerDAO, @@ -59,7 +41,6 @@ class AnnotationController @Inject()( userDAO: UserDAO, organizationDAO: OrganizationDAO, datasetDAO: DatasetDAO, - tracingStoreDAO: TracingStoreDAO, datasetService: DatasetService, annotationService: AnnotationService, annotationMutexService: AnnotationMutexService, @@ -75,9 +56,7 @@ class AnnotationController @Inject()( analyticsService: AnalyticsService, slackNotificationService: SlackNotificationService, mailchimpClient: MailchimpClient, - tracingDataSourceTemporaryStore: TracingDataSourceTemporaryStore, conf: WkConf, - rpc: RPC, sil: Silhouette[WkEnv])(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with UserAwareRequestLogging @@ -91,7 +70,7 @@ class AnnotationController @Inject()( // For Task and Explorational annotations, id is an annotation id. For CompoundTask, id is a task id. For CompoundProject, id is a project id. For CompoundTaskType, id is a task type id id: String, // Timestamp in milliseconds (time at which the request is sent) - timestamp: Long): Action[AnyContent] = sil.UserAwareAction.async { implicit request => + timestamp: Option[Long]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => log() { val notFoundMessage = if (request.identity.isEmpty) "annotation.notFound.considerLoggingIn" else "annotation.notFound" @@ -104,10 +83,14 @@ class AnnotationController @Inject()( js <- annotationService .publicWrites(annotation, request.identity, Some(restrictions)) ?~> "annotation.write.failed" _ <- Fox.runOptional(request.identity) { user => - if (typedTyp == AnnotationType.Task || typedTyp == AnnotationType.Explorational) { - timeSpanService - .logUserInteractionIfTheyArePotentialContributor(Instant(timestamp), user, annotation) // log time when a user starts working - } else Fox.successful(()) + Fox.runOptional(timestamp) { timestampDefined => + if (typedTyp == AnnotationType.Task || typedTyp == AnnotationType.Explorational) { + timeSpanService.logUserInteractionIfTheyArePotentialContributor( + Instant(timestampDefined), + user, + annotation) // log time when a user starts working + } else Fox.successful(()) + } } _ = Fox.runOptional(request.identity)(user => userDAO.updateLastActivity(user._id)) _ = request.identity.foreach { user => @@ -119,7 +102,7 @@ class AnnotationController @Inject()( def infoWithoutType(id: String, // Timestamp in milliseconds (time at which the request is sent - timestamp: Long): Action[AnyContent] = sil.UserAwareAction.async { implicit request => + timestamp: Option[Long]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => log() { for { annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND @@ -194,54 +177,6 @@ class AnnotationController @Inject()( } yield JsonOk(json, Messages("annotation.isLockedByOwner.success")) } - def addAnnotationLayer(typ: String, id: String): Action[AnnotationLayerParameters] = - sil.SecuredAction.async(validateJson[AnnotationLayerParameters]) { implicit request => - for { - _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.addLayer.explorationalsOnly" - restrictions <- provider.restrictionsFor(typ, id) ?~> "restrictions.notFound" ~> NOT_FOUND - _ <- restrictions.allowUpdate(request.identity) ?~> "notAllowed" ~> FORBIDDEN - annotation <- provider.provideAnnotation(typ, id, request.identity) - newLayerName = request.body.name.getOrElse(AnnotationLayer.defaultNameForType(request.body.typ)) - _ <- bool2Fox(!annotation.annotationLayers.exists(_.name == newLayerName)) ?~> "annotation.addLayer.nameInUse" - organization <- organizationDAO.findOne(request.identity._organization) - _ <- annotationService.addAnnotationLayer(annotation, organization._id, request.body) - updated <- provider.provideAnnotation(typ, id, request.identity) - json <- annotationService.publicWrites(updated, Some(request.identity)) ?~> "annotation.write.failed" - } yield JsonOk(json) - } - - def addAnnotationLayerWithoutType(id: String): Action[AnnotationLayerParameters] = - sil.SecuredAction.async(validateJson[AnnotationLayerParameters]) { implicit request => - for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND - result <- addAnnotationLayer(annotation.typ.toString, id)(request) - } yield result - } - - def deleteAnnotationLayer(typ: String, id: String, layerName: String): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - for { - _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.deleteLayer.explorationalsOnly" - annotation <- provider.provideAnnotation(typ, id, request.identity) - _ <- bool2Fox(annotation._user == request.identity._id) ?~> "notAllowed" ~> FORBIDDEN - layer <- annotation.annotationLayers.find(annotationLayer => annotationLayer.name == layerName) ?~> Messages( - "annotation.layer.notFound", - layerName) - _ <- bool2Fox(annotation.annotationLayers.length != 1) ?~> "annotation.deleteLayer.onlyLayer" - _ = logger.info( - s"Deleting annotation layer $layerName (tracing id ${layer.tracingId}, typ ${layer.typ}) for annotation $id") - _ <- annotationService.deleteAnnotationLayer(annotation, layerName) - } yield Ok - } - - def deleteAnnotationLayerWithoutType(id: String, layerName: String): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND - result <- deleteAnnotationLayer(annotation.typ.toString, id, layerName)(request) - } yield result - } - def createExplorational(organizationId: String, datasetName: String): Action[List[AnnotationLayerParameters]] = sil.SecuredAction.async(validateJson[List[AnnotationLayerParameters]]) { implicit request => for { @@ -284,7 +219,7 @@ class AnnotationController @Inject()( ObjectId.dummyId, ObjectId.dummyId, List( - AnnotationLayer(TracingIds.dummyTracingId, + AnnotationLayer(TracingId.dummy, AnnotationLayerType.Skeleton, AnnotationLayer.defaultSkeletonLayerName, AnnotationLayerStatistics.unknown)) @@ -293,116 +228,6 @@ class AnnotationController @Inject()( } yield JsonOk(json) } - def makeHybrid(typ: String, id: String, fallbackLayerName: Option[String]): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - for { - _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.addLayer.explorationalsOnly" - restrictions <- provider.restrictionsFor(typ, id) ?~> "restrictions.notFound" ~> NOT_FOUND - _ <- restrictions.allowUpdate(request.identity) ?~> "notAllowed" ~> FORBIDDEN - annotation <- provider.provideAnnotation(typ, id, request.identity) - organization <- organizationDAO.findOne(request.identity._organization) - _ <- annotationService.makeAnnotationHybrid(annotation, organization._id, fallbackLayerName) ?~> "annotation.makeHybrid.failed" - updated <- provider.provideAnnotation(typ, id, request.identity) - json <- annotationService.publicWrites(updated, Some(request.identity)) ?~> "annotation.write.failed" - } yield JsonOk(json) - } - - def makeHybridWithoutType(id: String, fallbackLayerName: Option[String]): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND - result <- makeHybrid(annotation.typ.toString, id, fallbackLayerName)(request) - } yield result - } - - def downsample(typ: String, id: String, tracingId: String): Action[AnyContent] = sil.SecuredAction.async { - implicit request => - for { - _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.downsample.explorationalsOnly" - restrictions <- provider.restrictionsFor(typ, id) ?~> "restrictions.notFound" ~> NOT_FOUND - _ <- restrictions.allowUpdate(request.identity) ?~> "notAllowed" ~> FORBIDDEN - annotation <- provider.provideAnnotation(typ, id, request.identity) - annotationLayer <- annotation.annotationLayers - .find(_.tracingId == tracingId) - .toFox ?~> "annotation.downsample.layerNotFound" - _ <- annotationService.downsampleAnnotation(annotation, annotationLayer) ?~> "annotation.downsample.failed" - updated <- provider.provideAnnotation(typ, id, request.identity) - json <- annotationService.publicWrites(updated, Some(request.identity)) ?~> "annotation.write.failed" - } yield JsonOk(json) - } - - def downsampleWithoutType(id: String, tracingId: String): Action[AnyContent] = sil.SecuredAction.async { - implicit request => - for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND - result <- downsample(annotation.typ.toString, id, tracingId)(request) - } yield result - } - - def addSegmentIndicesToAll(parallelBatchCount: Int, - dryRun: Boolean, - skipTracings: Option[String]): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - { - for { - _ <- userService.assertIsSuperUser(request.identity._multiUser) ?~> "notAllowed" ~> FORBIDDEN - _ = logger.info("Running migration to add segment index to all volume annotation layers...") - skipTracingsSet = skipTracings.map(_.split(",").toSet).getOrElse(Set()) - _ = if (skipTracingsSet.nonEmpty) { - logger.info(f"Skipping these tracings: ${skipTracingsSet.mkString(",")}") - } - _ = logger.info("Gathering list of volume tracings...") - annotationLayers <- annotationLayerDAO.findAllVolumeLayers - annotationLayersFiltered = annotationLayers.filter(l => !skipTracingsSet.contains(l.tracingId)) - totalCount = annotationLayersFiltered.length - batches = batch(annotationLayersFiltered, parallelBatchCount) - _ = logger.info(f"Processing $totalCount tracings in ${batches.length} batches") - before = Instant.now - results: Seq[List[Box[Unit]]] <- Fox.combined(batches.zipWithIndex.map { - case (batch, index) => addSegmentIndicesToBatch(batch, index, dryRun) - }) - failures = results.flatMap(_.filter(_.isEmpty)) - failureCount: Int = failures.length - successCount: Int = results.map(_.count(_.isDefined)).sum - msg = s"All done (dryRun=$dryRun)! Processed $totalCount tracings in ${batches.length} batches. Took ${Instant - .since(before)}. $failureCount failures, $successCount successes." - _ = if (failures.nonEmpty) { - failures.foreach { failedBox => - logger.info(f"Failed: $failedBox") - } - } - _ = logger.info(msg) - } yield JsonOk(msg) - } - } - - private def addSegmentIndicesToBatch(annotationLayerBatch: List[AnnotationLayer], batchIndex: Int, dryRun: Boolean)( - implicit ec: ExecutionContext) = { - var processedCount = 0 - for { - tracingStore <- tracingStoreDAO.findFirst(GlobalAccessContext) ?~> "tracingStore.notFound" - client = new WKRemoteTracingStoreClient(tracingStore, null, rpc, tracingDataSourceTemporaryStore) - batchCount = annotationLayerBatch.length - results <- Fox.serialSequenceBox(annotationLayerBatch) { annotationLayer => - processedCount += 1 - logger.info( - f"Processing tracing ${annotationLayer.tracingId}. $processedCount of $batchCount in batch $batchIndex (${percent(processedCount, batchCount)})...") - client.addSegmentIndex(annotationLayer.tracingId, dryRun) ?~> s"add segment index failed for ${annotationLayer.tracingId}" - } - _ = logger.info(f"Batch $batchIndex is done. Processed ${annotationLayerBatch.length} tracings.") - } yield results - } - - private def batch[T](allItems: List[T], batchCount: Int): List[List[T]] = { - val batchSize: Int = Math.max(Math.min(allItems.length / batchCount, allItems.length), 1) - allItems.grouped(batchSize).toList - } - - private def percent(done: Int, pending: Int) = { - val value = done.toDouble / pending.toDouble * 100 - f"$value%1.1f %%" - } - private def finishAnnotation(typ: String, id: String, issuingUser: User, timestamp: Instant)( implicit ctx: DBAccessContext): Fox[(Annotation, String)] = for { @@ -607,33 +432,24 @@ class AnnotationController @Inject()( datasetService.dataSourceFor(dataset).flatMap(_.toUsable).map(Some(_)) else Fox.successful(None) tracingStoreClient <- tracingStoreService.clientFor(dataset) - newAnnotationLayers <- Fox.serialCombined(annotation.annotationLayers) { annotationLayer => - duplicateAnnotationLayer(annotationLayer, - annotation._task.isDefined, - dataSource.map(_.boundingBox), - tracingStoreClient) - } + newAnnotationId = ObjectId.generate + newAnnotationProto <- tracingStoreClient.duplicateAnnotation( + annotation._id, + newAnnotationId, + version = None, + isFromTask = annotation._task.isDefined, + datasetBoundingBox = dataSource.map(_.boundingBox) + ) + newAnnotationLayers = newAnnotationProto.annotationLayers.map(AnnotationLayer.fromProto) clonedAnnotation <- annotationService.createFrom(user, dataset, newAnnotationLayers, AnnotationType.Explorational, None, - annotation.description) ?~> Messages("annotation.create.failed") + annotation.description, + newAnnotationId) ?~> Messages("annotation.create.failed") } yield clonedAnnotation - private def duplicateAnnotationLayer(annotationLayer: AnnotationLayer, - isFromTask: Boolean, - datasetBoundingBox: Option[BoundingBox], - tracingStoreClient: WKRemoteTracingStoreClient): Fox[AnnotationLayer] = - for { - - newTracingId <- if (annotationLayer.typ == AnnotationLayerType.Skeleton) { - tracingStoreClient.duplicateSkeletonTracing(annotationLayer.tracingId, None, isFromTask) ?~> "Failed to duplicate skeleton tracing." - } else { - tracingStoreClient.duplicateVolumeTracing(annotationLayer.tracingId, isFromTask, datasetBoundingBox) ?~> "Failed to duplicate volume tracing." - } - } yield annotationLayer.copy(tracingId = newTracingId) - def tryAcquiringAnnotationMutex(id: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => logTime(slackNotificationService.noticeSlowRequest, durationThreshold = 1 second) { diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 8f183d84494..42a17b90a74 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -32,7 +32,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFo import com.scalableminds.webknossos.tracingstore.tracings.volume.{ VolumeDataZipFormat, VolumeTracingDefaults, - VolumeTracingDownsampling + VolumeTracingMags } import com.typesafe.scalalogging.LazyLogging @@ -79,6 +79,7 @@ class AnnotationIOController @Inject()( extends Controller with FoxImplicits with ProtoGeometryImplicits + with AnnotationLayerPrecedence with LazyLogging { implicit val actorSystem: ActorSystem = ActorSystem() @@ -145,7 +146,8 @@ class AnnotationIOController @Inject()( mergedSkeletonLayers ::: mergedVolumeLayers, AnnotationType.Explorational, name, - description) + description, + ObjectId.generate) _ = analyticsService.track(UploadAnnotationEvent(request.identity, annotation)) } yield JsonOk( @@ -332,10 +334,9 @@ class AnnotationIOController @Inject()( boundingBox = bbox, elementClass = elementClass, fallbackLayer = fallbackLayerOpt.map(_.name), - largestSegmentId = - annotationService.combineLargestSegmentIdsByPrecedence(volumeTracing.largestSegmentId, - fallbackLayerOpt.map(_.largestSegmentId)), - mags = VolumeTracingDownsampling.magsForVolumeTracing(dataSource, fallbackLayerOpt).map(vec3IntToProto), + largestSegmentId = combineLargestSegmentIdsByPrecedence(volumeTracing.largestSegmentId, + fallbackLayerOpt.map(_.largestSegmentId)), + mags = VolumeTracingMags.magsForVolumeTracing(dataSource, fallbackLayerOpt).map(vec3IntToProto), hasSegmentIndex = Some(tracingCanHaveSegmentIndex) ) } diff --git a/app/controllers/LegacyApiController.scala b/app/controllers/LegacyApiController.scala index 418e8285bac..e0a35d7ca66 100644 --- a/app/controllers/LegacyApiController.scala +++ b/app/controllers/LegacyApiController.scala @@ -5,6 +5,7 @@ import play.silhouette.api.actions.SecuredRequest import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType} +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.scalableminds.webknossos.tracingstore.tracings.volume.MagRestrictions import models.dataset.DatasetService import models.organization.OrganizationDAO @@ -128,7 +129,7 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, } yield adaptedResult } - def annotationInfoV4(typ: String, id: String, timestamp: Long): Action[AnyContent] = sil.SecuredAction.async { + def annotationInfoV4(typ: String, id: String, timestamp: Option[Long]): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { _ <- Fox.successful(logVersioned(request)) diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index 83dd2c2175b..22127329f93 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -11,7 +11,7 @@ import com.scalableminds.webknossos.datastore.services.{ UserAccessAnswer, UserAccessRequest } -import com.scalableminds.webknossos.tracingstore.tracings.TracingIds +import com.scalableminds.webknossos.tracingstore.tracings.TracingId import javax.inject.Inject import models.annotation._ @@ -39,7 +39,6 @@ object RpcTokenHolder { class UserTokenController @Inject()(datasetDAO: DatasetDAO, datasetService: DatasetService, - annotationDAO: AnnotationDAO, annotationPrivateLinkDAO: AnnotationPrivateLinkDAO, userService: UserService, organizationDAO: OrganizationDAO, @@ -98,6 +97,8 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, handleDataSourceAccess(accessRequest.resourceId, accessRequest.mode, userBox)(sharingTokenAccessCtx) case AccessResourceType.tracing => handleTracingAccess(accessRequest.resourceId.name, accessRequest.mode, userBox, token) + case AccessResourceType.annotation => + handleAnnotationAccess(accessRequest.resourceId.name, accessRequest.mode, userBox, token) case AccessResourceType.jobExport => handleJobExportAccess(accessRequest.resourceId.name, accessRequest.mode, userBox) case _ => @@ -159,7 +160,19 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, private def handleTracingAccess(tracingId: String, mode: AccessMode, userBox: Box[User], - token: Option[String]): Fox[UserAccessAnswer] = { + token: Option[String]): Fox[UserAccessAnswer] = + if (tracingId == TracingId.dummy) + Fox.successful(UserAccessAnswer(granted = true)) + else + for { + annotation <- annotationInformationProvider.annotationForTracing(tracingId)(GlobalAccessContext) ?~> "annotation.notFound" + result <- handleAnnotationAccess(annotation._id.toString, mode, userBox, token) + } yield result + + private def handleAnnotationAccess(annotationId: String, + mode: AccessMode, + userBox: Box[User], + token: Option[String]): Fox[UserAccessAnswer] = { // Access is explicitly checked by userBox, not by DBAccessContext, as there is no token sharing for annotations // Optionally, an accessToken can be provided which explicitly looks up the read right the private link table @@ -170,16 +183,15 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, case _ => Fox.successful(false) } - if (tracingId == TracingIds.dummyTracingId) + if (annotationId == ObjectId.dummyId.toString) { Fox.successful(UserAccessAnswer(granted = true)) - else { + } else { for { - annotation <- annotationInformationProvider.annotationForTracing(tracingId)(GlobalAccessContext) ?~> "annotation.notFound" + annotation <- annotationInformationProvider.provideAnnotation(annotationId, userBox)(GlobalAccessContext) ?~> "annotation.notFound" annotationAccessByToken <- token .map(annotationPrivateLinkDAO.findOneByAccessToken) .getOrElse(Fox.empty) .futureBox - allowedByToken = annotationAccessByToken.exists(annotation._id == _._annotation) restrictions <- annotationInformationProvider.restrictionsFor( AnnotationIdentifier(annotation.typ, annotation._id))(GlobalAccessContext) ?~> "restrictions.notFound" @@ -201,7 +213,7 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, jobBox <- jobDAO.findOne(jobIdValidated)(DBAccessContext(userBox)).futureBox answer = jobBox match { case Full(_) => UserAccessAnswer(granted = true) - case _ => UserAccessAnswer(granted = false, Some(s"No ${mode} access to job export")) + case _ => UserAccessAnswer(granted = false, Some(s"No $mode access to job export")) } } yield answer } diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 99b14f82c45..4dbef690f7b 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -3,8 +3,14 @@ package controllers import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId -import com.scalableminds.webknossos.tracingstore.TracingUpdatesReport +import com.scalableminds.webknossos.tracingstore.AnnotationUpdatesReport +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters +import com.scalableminds.webknossos.tracingstore.tracings.TracingId import javax.inject.Inject import models.analytics.{AnalyticsService, UpdateAnnotationEvent, UpdateAnnotationViewOnlyEvent} @@ -12,8 +18,10 @@ import models.annotation.AnnotationState._ import models.annotation.{ Annotation, AnnotationDAO, + AnnotationDefaults, AnnotationInformationProvider, AnnotationLayerDAO, + AnnotationService, TracingDataSourceTemporaryStore, TracingStoreService } @@ -24,8 +32,9 @@ import models.user.time.TimeSpanService import play.api.i18n.Messages import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} +import scalapb.GeneratedMessage import security.{WebknossosBearerTokenAuthenticatorService, WkSilhouetteEnvironment} -import utils.WkConf +import utils.{ObjectId, WkConf} import scala.concurrent.ExecutionContext @@ -37,6 +46,7 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore userDAO: UserDAO, annotationInformationProvider: AnnotationInformationProvider, analyticsService: AnalyticsService, + annotationService: AnnotationService, datasetDAO: DatasetDAO, annotationDAO: AnnotationDAO, annotationLayerDAO: AnnotationLayerDAO, @@ -50,18 +60,47 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore val bearerTokenService: WebknossosBearerTokenAuthenticatorService = wkSilhouetteEnvironment.combinedAuthenticatorService.tokenAuthenticatorService - def handleTracingUpdateReport(name: String, key: String): Action[TracingUpdatesReport] = - Action.async(validateJson[TracingUpdatesReport]) { implicit request => + def updateAnnotation(name: String, key: String, annotationId: String): Action[AnnotationProto] = + Action.async(validateProto[AnnotationProto]) { implicit request => + // tracingstore only sends this request after ensuring write access + implicit val ctx: DBAccessContext = GlobalAccessContext + for { + annotationIdValidated <- ObjectId.fromString(annotationId) + existingLayers <- annotationLayerDAO.findAnnotationLayersFor(annotationIdValidated) + newLayersProto = request.body.annotationLayers + existingLayerIds = existingLayers.map(_.tracingId).toSet + newLayerIds = newLayersProto.map(_.tracingId).toSet + layerIdsToDelete = existingLayerIds.diff(newLayerIds) + layerIdsToUpdate = existingLayerIds.intersect(newLayerIds) + layerIdsToInsert = newLayerIds.diff(existingLayerIds) + _ <- Fox.serialCombined(layerIdsToDelete.toList)( + annotationLayerDAO.deleteOneByTracingId(annotationIdValidated, _)) + _ <- Fox.serialCombined(newLayersProto.filter(l => layerIdsToInsert.contains(l.tracingId))) { layerProto => + annotationLayerDAO.insertOne(annotationIdValidated, AnnotationLayer.fromProto(layerProto)) + } + _ <- Fox.serialCombined(newLayersProto.filter(l => layerIdsToUpdate.contains(l.tracingId)))(l => + annotationLayerDAO.updateName(annotationIdValidated, l.tracingId, l.name)) + // Layer stats are ignored here, they are sent eagerly when saving updates + _ <- annotationDAO.updateName(annotationIdValidated, + request.body.name.getOrElse(AnnotationDefaults.defaultName)) + _ <- annotationDAO.updateDescription(annotationIdValidated, + request.body.description.getOrElse(AnnotationDefaults.defaultDescription)) + } yield Ok + } + + def handleTracingUpdateReport(name: String, key: String): Action[AnnotationUpdatesReport] = + Action.async(validateJson[AnnotationUpdatesReport]) { implicit request => implicit val ctx: DBAccessContext = GlobalAccessContext tracingStoreService.validateAccess(name, key) { _ => val report = request.body for { - annotation <- annotationDAO.findOneByTracingId(report.tracingId) + annotationId <- ObjectId.fromString(report.annotationId) + annotation <- annotationDAO.findOne(annotationId) _ <- ensureAnnotationNotFinished(annotation) _ <- annotationDAO.updateModified(annotation._id, Instant.now) - _ <- Fox.runOptional(report.statistics) { statistics => - annotationLayerDAO.updateStatistics(annotation._id, report.tracingId, statistics) - } + /*_ <- Fox.runOptional(report.statistics) { statistics => + annotationLayerDAO.updateStatistics(annotation._id, annotationId, statistics) + }*/ // TODO stats per tracing id. note: they might arrive before the layer is created. skip them then. userBox <- bearerTokenService.userForTokenOpt(report.userToken).futureBox trackTime = report.significantChangesCount > 0 || !wkConf.WebKnossos.User.timeTrackingOnlyWithSignificantChanges _ <- Fox.runOptional(userBox)(user => @@ -113,6 +152,20 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore } } + def annotationIdForTracing(name: String, key: String, tracingId: String): Action[AnyContent] = + Action.async { implicit request => + tracingStoreService.validateAccess(name, key) { _ => + implicit val ctx: DBAccessContext = GlobalAccessContext + if (tracingId == TracingId.dummy) { + Fox.successful(Ok(Json.toJson(ObjectId.dummyId))) + } else { + for { + annotation <- annotationInformationProvider.annotationForTracing(tracingId) ?~> s"No annotation for tracing $tracingId" + } yield Ok(Json.toJson(annotation._id)) + } + } + } + def dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], @@ -131,4 +184,28 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore } yield Ok(Json.toJson(dataStore.url)) } } + + def createTracing(name: String, + key: String, + annotationId: String, + previousVersion: Long): Action[AnnotationLayerParameters] = + Action.async(validateJson[AnnotationLayerParameters]) { implicit request => + tracingStoreService.validateAccess(name, key) { _ => + implicit val ctx: DBAccessContext = GlobalAccessContext + for { + annotationIdValidated <- ObjectId.fromString(annotationId) + annotation <- annotationDAO.findOne(annotationIdValidated) ?~> "annotation.notFound" + dataset <- datasetDAO.findOne(annotation._dataset) + tracingEither <- annotationService.createTracingForExplorational(dataset, + request.body, + annotation.annotationLayers, + Some(previousVersion)) + tracing: GeneratedMessage = tracingEither match { + case Left(s: SkeletonTracing) => s + case Right(v: VolumeTracing) => v + } + } yield Ok(tracing.toByteArray).as(protobufMimeType) + } + } + } diff --git a/app/models/analytics/AnalyticsService.scala b/app/models/analytics/AnalyticsService.scala index 86b2cec45c1..83aba641736 100644 --- a/app/models/analytics/AnalyticsService.scala +++ b/app/models/analytics/AnalyticsService.scala @@ -55,6 +55,7 @@ class AnalyticsService @Inject()(rpc: RPC, } val wrappedJson = Json.obj("api_key" -> conf.key, "events" -> List(analyticsEventJson)) rpc(conf.uri).silent.postJson(wrappedJson) + () } Fox.successful(()) } diff --git a/app/models/annotation/Annotation.scala b/app/models/annotation/Annotation.scala index 887bf7439c9..f4eccd723db 100755 --- a/app/models/annotation/Annotation.scala +++ b/app/models/annotation/Annotation.scala @@ -22,6 +22,11 @@ import javax.inject.Inject import scala.concurrent.ExecutionContext import scala.concurrent.duration.FiniteDuration +object AnnotationDefaults { + val defaultName: String = "" + val defaultDescription: String = "" +} + case class Annotation( _id: ObjectId, _dataset: ObjectId, @@ -29,9 +34,9 @@ case class Annotation( _team: ObjectId, _user: ObjectId, annotationLayers: List[AnnotationLayer], - description: String = "", + description: String = AnnotationDefaults.defaultDescription, visibility: AnnotationVisibility.Value = AnnotationVisibility.Internal, - name: String = "", + name: String = AnnotationDefaults.defaultName, viewConfiguration: Option[JsObject] = None, state: AnnotationState.Value = Active, isLockedByOwner: Boolean = false, @@ -140,13 +145,20 @@ class AnnotationLayerDAO @Inject()(SQLClient: SqlClient)(implicit ec: ExecutionC q"""INSERT INTO webknossos.annotation_layers(_annotation, tracingId, typ, name, statistics) VALUES($annotationId, ${a.tracingId}, ${a.typ}, ${a.name}, ${a.stats})""".asUpdate - def deleteOne(annotationId: ObjectId, layerName: String): Fox[Unit] = + def deleteOneByName(annotationId: ObjectId, layerName: String): Fox[Unit] = for { _ <- run(q"""DELETE FROM webknossos.annotation_layers WHERE _annotation = $annotationId AND name = $layerName""".asUpdate) } yield () + def deleteOneByTracingId(annotationId: ObjectId, tracingId: String): Fox[Unit] = + for { + _ <- run(q"""DELETE FROM webknossos.annotation_layers + WHERE _annotation = $annotationId + AND tracingId = $tracingId""".asUpdate) + } yield () + def findAnnotationIdByTracingId(tracingId: String): Fox[ObjectId] = for { rList <- run(q"SELECT _annotation FROM webknossos.annotation_layers WHERE tracingId = $tracingId".as[ObjectId]) @@ -503,6 +515,18 @@ class AnnotationDAO @Inject()(sqlClient: SqlClient, annotationLayerDAO: Annotati AND a.typ = ${AnnotationType.Task} """.as[ObjectId]) } yield r.toList + def findBaseIdForTask(taskId: ObjectId)(implicit ctx: DBAccessContext): Fox[ObjectId] = + for { + accessQuery <- readAccessQuery + r <- run(q"""SELECT _id + FROM $existingCollectionName + WHERE _task = $taskId + AND typ = ${AnnotationType.TracingBase} + AND state != ${AnnotationState.Cancelled} + AND $accessQuery""".as[ObjectId]) + firstRow <- r.headOption + } yield firstRow + def findAllByTaskIdAndType(taskId: ObjectId, typ: AnnotationType)( implicit ctx: DBAccessContext): Fox[List[Annotation]] = for { diff --git a/app/models/annotation/AnnotationLayerPrecedence.scala b/app/models/annotation/AnnotationLayerPrecedence.scala new file mode 100644 index 00000000000..96ac3ec77fd --- /dev/null +++ b/app/models/annotation/AnnotationLayerPrecedence.scala @@ -0,0 +1,147 @@ +package models.annotation + +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.geometry.{ + AdditionalCoordinateProto, + NamedBoundingBoxProto, + Vec3DoubleProto, + Vec3IntProto +} +import com.scalableminds.webknossos.datastore.models.annotation.{ + AnnotationLayer, + AnnotationLayerType, + FetchedAnnotationLayer +} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{VolumeDataZipFormat, VolumeTracingDefaults} +import models.dataset.Dataset + +import scala.concurrent.ExecutionContext + +// Used to pass duplicate properties when creating a new tracing to avoid masking them. +case class RedundantTracingProperties( + editPosition: Vec3IntProto, + editRotation: Vec3DoubleProto, + zoomLevel: Double, + userBoundingBoxes: Seq[NamedBoundingBoxProto], + editPositionAdditionalCoordinates: Seq[AdditionalCoordinateProto], +) + +trait AnnotationLayerPrecedence { + + protected def combineLargestSegmentIdsByPrecedence(fromNml: Option[Long], + fromFallbackLayer: Option[Option[Long]]): Option[Long] = + if (fromNml.nonEmpty) + // This was called for an NML upload. The NML had an explicit largestSegmentId. Use that. + fromNml + else if (fromFallbackLayer.nonEmpty) + // There is a fallback layer. Use its largestSegmentId, even if it is None. + // Some tracing functionality will be disabled until a segment id is set by the user. + fromFallbackLayer.flatten + else { + // There is no fallback layer. Start at default segment id for fresh volume layers + VolumeTracingDefaults.largestSegmentId + } + + protected def adaptSkeletonTracing( + skeletonTracing: SkeletonTracing, + oldPrecedenceLayerProperties: Option[RedundantTracingProperties]): SkeletonTracing = + oldPrecedenceLayerProperties.map { p: RedundantTracingProperties => + skeletonTracing.copy( + editPosition = p.editPosition, + editRotation = p.editRotation, + zoomLevel = p.zoomLevel, + userBoundingBoxes = p.userBoundingBoxes, + editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates + ) + }.getOrElse(skeletonTracing) + + protected def adaptVolumeTracing(volumeTracing: VolumeTracing, + oldPrecedenceLayerProperties: Option[RedundantTracingProperties]): VolumeTracing = + oldPrecedenceLayerProperties.map { p: RedundantTracingProperties => + volumeTracing.copy( + editPosition = p.editPosition, + editRotation = p.editRotation, + zoomLevel = p.zoomLevel, + userBoundingBoxes = p.userBoundingBoxes, + editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates + ) + }.getOrElse(volumeTracing) + + protected def getOldPrecedenceLayerProperties(existingAnnotationLayers: List[AnnotationLayer], + previousVersion: Option[Long], + dataset: Dataset, + tracingStoreClient: WKRemoteTracingStoreClient)( + implicit ec: ExecutionContext): Fox[Option[RedundantTracingProperties]] = + for { + oldPrecedenceLayer <- fetchOldPrecedenceLayer(existingAnnotationLayers, + previousVersion, + dataset, + tracingStoreClient) + oldPrecedenceLayerProperties: Option[RedundantTracingProperties] = oldPrecedenceLayer.map( + extractPrecedenceProperties) + } yield oldPrecedenceLayerProperties + + // If there is more than one tracing, select the one that has precedence for the parameters (they should be identical anyway) + protected def selectLayerWithPrecedenceFetched( + skeletonLayers: List[FetchedAnnotationLayer], + volumeLayers: List[FetchedAnnotationLayer])(implicit ec: ExecutionContext): Fox[FetchedAnnotationLayer] = + if (skeletonLayers.nonEmpty) { + Fox.successful(skeletonLayers.minBy(_.tracingId)) + } else if (volumeLayers.nonEmpty) { + Fox.successful(volumeLayers.minBy(_.tracingId)) + } else Fox.failure("annotation.download.noLayers") + + private def selectLayerWithPrecedence(annotationLayers: List[AnnotationLayer])( + implicit ec: ExecutionContext): Fox[AnnotationLayer] = { + val skeletonLayers = annotationLayers.filter(_.typ == AnnotationLayerType.Skeleton) + val volumeLayers = annotationLayers.filter(_.typ == AnnotationLayerType.Volume) + if (skeletonLayers.nonEmpty) { + Fox.successful(skeletonLayers.minBy(_.tracingId)) + } else if (volumeLayers.nonEmpty) { + Fox.successful(volumeLayers.minBy(_.tracingId)) + } else Fox.failure("Trying to select precedence layer from empty layer list.") + } + + private def fetchOldPrecedenceLayer(existingAnnotationLayers: List[AnnotationLayer], + previousVersion: Option[Long], + dataset: Dataset, + tracingStoreClient: WKRemoteTracingStoreClient)( + implicit ec: ExecutionContext): Fox[Option[FetchedAnnotationLayer]] = + if (existingAnnotationLayers.isEmpty) Fox.successful(None) + else + for { + oldPrecedenceLayer <- selectLayerWithPrecedence(existingAnnotationLayers) + oldPrecedenceLayerFetched <- if (oldPrecedenceLayer.typ == AnnotationLayerType.Skeleton) + tracingStoreClient.getSkeletonTracing(oldPrecedenceLayer, previousVersion) + else + tracingStoreClient.getVolumeTracing(oldPrecedenceLayer, + previousVersion, + skipVolumeData = true, + volumeDataZipFormat = VolumeDataZipFormat.wkw, + dataset.voxelSize) + } yield Some(oldPrecedenceLayerFetched) + + private def extractPrecedenceProperties(oldPrecedenceLayer: FetchedAnnotationLayer): RedundantTracingProperties = + oldPrecedenceLayer.tracing match { + case Left(s) => + RedundantTracingProperties( + s.editPosition, + s.editRotation, + s.zoomLevel, + s.userBoundingBoxes ++ s.userBoundingBox.map( + com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _)), + s.editPositionAdditionalCoordinates + ) + case Right(v) => + RedundantTracingProperties( + v.editPosition, + v.editRotation, + v.zoomLevel, + v.userBoundingBoxes ++ v.userBoundingBox.map( + com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _)), + v.editPositionAdditionalCoordinates + ) + } +} diff --git a/app/models/annotation/AnnotationMerger.scala b/app/models/annotation/AnnotationMerger.scala index 2c251f01462..ec538208b15 100644 --- a/app/models/annotation/AnnotationMerger.scala +++ b/app/models/annotation/AnnotationMerger.scala @@ -2,11 +2,7 @@ package models.annotation import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.annotation.{ - AnnotationLayer, - AnnotationLayerStatistics, - AnnotationLayerType -} +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.typesafe.scalalogging.LazyLogging import javax.inject.Inject @@ -51,7 +47,7 @@ class AnnotationMerger @Inject()(datasetDAO: DatasetDAO, tracingStoreService: Tr Fox.empty else { for { - mergedAnnotationLayers <- mergeTracingsOfAnnotations(annotations, datasetId, persistTracing) + mergedAnnotationLayers <- mergeAnnotationsInTracingstore(annotations, datasetId, newId, persistTracing) ?~> "Failed to merge annotations in tracingstore." } yield { Annotation( newId, @@ -65,56 +61,16 @@ class AnnotationMerger @Inject()(datasetDAO: DatasetDAO, tracingStoreService: Tr } } - private def mergeTracingsOfAnnotations(annotations: List[Annotation], datasetId: ObjectId, persistTracing: Boolean)( - implicit ctx: DBAccessContext): Fox[List[AnnotationLayer]] = + private def mergeAnnotationsInTracingstore( + annotations: List[Annotation], + datasetId: ObjectId, + newAnnotationId: ObjectId, + persist: Boolean)(implicit ctx: DBAccessContext): Fox[List[AnnotationLayer]] = for { dataset <- datasetDAO.findOne(datasetId) tracingStoreClient: WKRemoteTracingStoreClient <- tracingStoreService.clientFor(dataset) - skeletonLayers = annotations.flatMap(_.annotationLayers.find(_.typ == AnnotationLayerType.Skeleton)) - volumeLayers = annotations.flatMap(_.annotationLayers.find(_.typ == AnnotationLayerType.Volume)) - mergedSkeletonTracingId <- mergeSkeletonTracings(tracingStoreClient, - skeletonLayers.map(_.tracingId), - persistTracing) - mergedVolumeTracingId <- mergeVolumeTracings(tracingStoreClient, volumeLayers.map(_.tracingId), persistTracing) - mergedSkeletonName = allEqual(skeletonLayers.map(_.name)) - mergedVolumeName = allEqual(volumeLayers.map(_.name)) - mergedSkeletonLayer = mergedSkeletonTracingId.map( - id => - AnnotationLayer(id, - AnnotationLayerType.Skeleton, - mergedSkeletonName.getOrElse(AnnotationLayer.defaultSkeletonLayerName), - AnnotationLayerStatistics.unknown)) - mergedVolumeLayer = mergedVolumeTracingId.map( - id => - AnnotationLayer(id, - AnnotationLayerType.Volume, - mergedVolumeName.getOrElse(AnnotationLayer.defaultVolumeLayerName), - AnnotationLayerStatistics.unknown)) - } yield List(mergedSkeletonLayer, mergedVolumeLayer).flatten - - private def allEqual(str: List[String]): Option[String] = - // returns the str if all names are equal, None otherwise - str.headOption.map(name => str.forall(_ == name)).flatMap { _ => - str.headOption - } - - private def mergeSkeletonTracings(tracingStoreClient: WKRemoteTracingStoreClient, - skeletonTracingIds: List[String], - persistTracing: Boolean) = - if (skeletonTracingIds.isEmpty) - Fox.successful(None) - else - tracingStoreClient - .mergeSkeletonTracingsByIds(skeletonTracingIds, persistTracing) - .map(Some(_)) ?~> "Failed to merge skeleton tracings." + mergedAnnotationProto <- tracingStoreClient.mergeAnnotationsByIds(annotations.map(_.id), newAnnotationId, persist) + layers = mergedAnnotationProto.annotationLayers.map(AnnotationLayer.fromProto) + } yield layers.toList - private def mergeVolumeTracings(tracingStoreClient: WKRemoteTracingStoreClient, - volumeTracingIds: List[String], - persistTracing: Boolean) = - if (volumeTracingIds.isEmpty) - Fox.successful(None) - else - tracingStoreClient - .mergeVolumeTracingsByIds(volumeTracingIds, persistTracing) - .map(Some(_)) ?~> "Failed to merge volume tracings." } diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 05020dc622e..8cee2a6f915 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -7,15 +7,10 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits, TextUtils} +import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} -import com.scalableminds.webknossos.datastore.geometry.{ - AdditionalCoordinateProto, - ColorProto, - NamedBoundingBoxProto, - Vec3DoubleProto, - Vec3IntProto -} +import com.scalableminds.webknossos.datastore.geometry.ColorProto import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.annotation.{ @@ -32,16 +27,14 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ SegmentationLayerLike => SegmentationLayer } import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.tracingstore.tracings._ +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.tracings.volume.{ MagRestrictions, - VolumeDataZipFormat, VolumeTracingDefaults, - VolumeTracingDownsampling + VolumeTracingMags } import com.typesafe.scalalogging.LazyLogging -import controllers.AnnotationLayerParameters import models.annotation.AnnotationState._ import models.annotation.AnnotationType.AnnotationType import models.annotation.handler.SavedTracingInformationHandler @@ -75,21 +68,10 @@ case class DownloadAnnotation(skeletonTracingIdOpt: Option[String], organizationId: String, datasetName: String) -// Used to pass duplicate properties when creating a new tracing to avoid masking them. -// Uses the proto-generated geometry classes, hence the full qualifiers. -case class RedundantTracingProperties( - editPosition: Vec3IntProto, - editRotation: Vec3DoubleProto, - zoomLevel: Double, - userBoundingBoxes: Seq[NamedBoundingBoxProto], - editPositionAdditionalCoordinates: Seq[AdditionalCoordinateProto], -) - class AnnotationService @Inject()( annotationInformationProvider: AnnotationInformationProvider, savedTracingInformationHandler: SavedTracingInformationHandler, annotationDAO: AnnotationDAO, - annotationLayersDAO: AnnotationLayerDAO, userDAO: UserDAO, taskTypeDAO: TaskTypeDAO, taskService: TaskService, @@ -114,6 +96,7 @@ class AnnotationService @Inject()( extends BoxImplicits with FoxImplicits with ProtoGeometryImplicits + with AnnotationLayerPrecedence with LazyLogging { implicit val actorSystem: ActorSystem = ActorSystem() @@ -147,7 +130,7 @@ class AnnotationService @Inject()( magRestrictions: MagRestrictions, mappingName: Option[String] ): Fox[VolumeTracing] = { - val mags = VolumeTracingDownsampling.magsForVolumeTracing(dataSource, fallbackLayer) + val mags = VolumeTracingMags.magsForVolumeTracing(dataSource, fallbackLayer) val magsRestricted = magRestrictions.filterAllowed(mags) val additionalAxes = fallbackLayer.map(_.additionalAxes).getOrElse(dataSource.additionalAxesUnion) @@ -181,56 +164,20 @@ class AnnotationService @Inject()( ) } - def combineLargestSegmentIdsByPrecedence(fromNml: Option[Long], - fromFallbackLayer: Option[Option[Long]]): Option[Long] = - if (fromNml.nonEmpty) - // This was called for an NML upload. The NML had an explicit largestSegmentId. Use that. - fromNml - else if (fromFallbackLayer.nonEmpty) - // There is a fallback layer. Use its largestSegmentId, even if it is None. - // Some tracing functionality will be disabled until a segment id is set by the user. - fromFallbackLayer.flatten - else { - // There is no fallback layer. Start at default segment id for fresh volume layers - VolumeTracingDefaults.largestSegmentId - } - - def addAnnotationLayer(annotation: Annotation, - organizationId: String, - annotationLayerParameters: AnnotationLayerParameters)(implicit ctx: DBAccessContext, - mp: MessagesProvider): Fox[Unit] = - for { - dataset <- datasetDAO.findOne(annotation._dataset) ?~> "dataset.notFoundForAnnotation" - dataSource <- datasetService.dataSourceFor(dataset).flatMap(_.toUsable) ?~> "dataSource.notFound" - newAnnotationLayers <- createTracingsForExplorational( - dataset, - dataSource, - List(annotationLayerParameters), - organizationId, - annotation.annotationLayers) ?~> "annotation.createTracings.failed" - _ <- annotationLayersDAO.insertForAnnotation(annotation._id, newAnnotationLayers) - } yield () - - def deleteAnnotationLayer(annotation: Annotation, layerName: String): Fox[Unit] = - for { - _ <- annotationLayersDAO.deleteOne(annotation._id, layerName) - } yield () - - private def createTracingsForExplorational(dataset: Dataset, - dataSource: DataSource, - allAnnotationLayerParameters: List[AnnotationLayerParameters], - datasetOrganizationId: String, - existingAnnotationLayers: List[AnnotationLayer] = List())( + def createTracingForExplorational(dataset: Dataset, + params: AnnotationLayerParameters, + existingAnnotationLayers: List[AnnotationLayer], + previousVersion: Option[Long])( implicit ctx: DBAccessContext, - mp: MessagesProvider): Fox[List[AnnotationLayer]] = { + mp: MessagesProvider): Fox[Either[SkeletonTracing, VolumeTracing]] = { - def getAutoFallbackLayerName: Option[String] = + def getAutoFallbackLayerName(dataSource: DataSource): Option[String] = dataSource.dataLayers.find { case _: SegmentationLayer => true case _ => false }.map(_.name) - def getFallbackLayer(fallbackLayerName: String): Fox[SegmentationLayer] = + def getFallbackLayer(dataSource: DataSource, fallbackLayerName: String): Fox[SegmentationLayer] = for { fallbackLayer <- dataSource.dataLayers .filter(dl => dl.name == fallbackLayerName) @@ -248,109 +195,12 @@ class AnnotationService @Inject()( fallbackLayer.elementClass) } yield fallbackLayer - def createAndSaveAnnotationLayer(annotationLayerParameters: AnnotationLayerParameters, - oldPrecedenceLayerProperties: Option[RedundantTracingProperties], - dataStore: DataStore): Fox[AnnotationLayer] = - for { - client <- tracingStoreService.clientFor(dataset) - tracingIdAndName <- annotationLayerParameters.typ match { - case AnnotationLayerType.Skeleton => - val skeleton = SkeletonTracingDefaults.createInstance.copy( - datasetName = dataset.name, - editPosition = dataSource.center, - organizationId = Some(datasetOrganizationId), - additionalAxes = AdditionalAxis.toProto(dataSource.additionalAxesUnion) - ) - val skeletonAdapted = oldPrecedenceLayerProperties.map { p => - skeleton.copy( - editPosition = p.editPosition, - editRotation = p.editRotation, - zoomLevel = p.zoomLevel, - userBoundingBoxes = p.userBoundingBoxes, - editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates - ) - }.getOrElse(skeleton) - for { - tracingId <- client.saveSkeletonTracing(skeletonAdapted) - name = annotationLayerParameters.name.getOrElse( - AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) - } yield (tracingId, name) - case AnnotationLayerType.Volume => - val autoFallbackLayerName = - if (annotationLayerParameters.autoFallbackLayer) getAutoFallbackLayerName else None - val fallbackLayerName = annotationLayerParameters.fallbackLayerName.orElse(autoFallbackLayerName) - for { - fallbackLayer <- Fox.runOptional(fallbackLayerName)(getFallbackLayer) - volumeTracing <- createVolumeTracing( - dataSource, - datasetOrganizationId, - dataStore, - fallbackLayer, - magRestrictions = annotationLayerParameters.magRestrictions.getOrElse(MagRestrictions.empty), - mappingName = annotationLayerParameters.mappingName - ) - volumeTracingAdapted = oldPrecedenceLayerProperties.map { p => - volumeTracing.copy( - editPosition = p.editPosition, - editRotation = p.editRotation, - zoomLevel = p.zoomLevel, - userBoundingBoxes = p.userBoundingBoxes, - editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates - ) - }.getOrElse(volumeTracing) - volumeTracingId <- client.saveVolumeTracing(volumeTracingAdapted, dataSource = Some(dataSource)) - name = annotationLayerParameters.name - .orElse(autoFallbackLayerName) - .getOrElse(AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) - } yield (volumeTracingId, name) - case _ => - Fox.failure(s"Unknown AnnotationLayerType: ${annotationLayerParameters.typ}") - } - } yield - AnnotationLayer(tracingIdAndName._1, - annotationLayerParameters.typ, - tracingIdAndName._2, - AnnotationLayerStatistics.zeroedForTyp(annotationLayerParameters.typ)) - - def fetchOldPrecedenceLayer: Fox[Option[FetchedAnnotationLayer]] = - if (existingAnnotationLayers.isEmpty) Fox.successful(None) - else - for { - oldPrecedenceLayer <- selectLayerWithPrecedence(existingAnnotationLayers) - tracingStoreClient <- tracingStoreService.clientFor(dataset) - oldPrecedenceLayerFetched <- if (oldPrecedenceLayer.typ == AnnotationLayerType.Skeleton) - tracingStoreClient.getSkeletonTracing(oldPrecedenceLayer, None) - else - tracingStoreClient.getVolumeTracing(oldPrecedenceLayer, - None, - skipVolumeData = true, - volumeDataZipFormat = VolumeDataZipFormat.wkw, - dataset.voxelSize) - } yield Some(oldPrecedenceLayerFetched) - - def extractPrecedenceProperties(oldPrecedenceLayer: FetchedAnnotationLayer): RedundantTracingProperties = - oldPrecedenceLayer.tracing match { - case Left(s) => - RedundantTracingProperties( - s.editPosition, - s.editRotation, - s.zoomLevel, - s.userBoundingBoxes ++ s.userBoundingBox.map( - com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _)), - s.editPositionAdditionalCoordinates - ) - case Right(v) => - RedundantTracingProperties( - v.editPosition, - v.editRotation, - v.zoomLevel, - v.userBoundingBoxes ++ v.userBoundingBox.map( - com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _)), - v.editPositionAdditionalCoordinates - ) - } - for { + dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) ?~> "dataStore.notFoundForDataset" + inboxDataSource <- datasetService.dataSourceFor(dataset) + dataSource <- inboxDataSource.toUsable ?~> Messages("dataset.notImported", inboxDataSource.id.name) + tracingStoreClient <- tracingStoreService.clientFor(dataset) + /* Note that the tracings have redundant properties, with a precedence logic selecting a layer from which the values are used. Adding a layer may change this precedence, so the redundant @@ -360,27 +210,82 @@ class AnnotationService @Inject()( We do this for *every* new layer, since we only later get its ID which determines the actual precedence. All of this is skipped if existingAnnotationLayers is empty. */ - oldPrecedenceLayer <- fetchOldPrecedenceLayer - dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) ?~> "dataStore.notFoundForDataset" - precedenceProperties = oldPrecedenceLayer.map(extractPrecedenceProperties) - newAnnotationLayers <- Fox.serialCombined(allAnnotationLayerParameters)(p => - createAndSaveAnnotationLayer(p, precedenceProperties, dataStore)) - } yield newAnnotationLayers + oldPrecedenceLayerProperties <- getOldPrecedenceLayerProperties(existingAnnotationLayers, + previousVersion, + dataset, + tracingStoreClient) + tracing <- params.typ match { + case AnnotationLayerType.Skeleton => + val skeleton = SkeletonTracingDefaults.createInstance.copy( + datasetName = dataset.name, + editPosition = dataSource.center, + organizationId = Some(dataset._organization), + additionalAxes = AdditionalAxis.toProto(dataSource.additionalAxesUnion) + ) + val skeletonAdapted = adaptSkeletonTracing(skeleton, oldPrecedenceLayerProperties) + Fox.successful(Left(skeletonAdapted)) + case AnnotationLayerType.Volume => + val autoFallbackLayerName = + if (params.autoFallbackLayer) getAutoFallbackLayerName(dataSource) else None + val fallbackLayerName = params.fallbackLayerName.orElse(autoFallbackLayerName) + for { + fallbackLayer <- Fox.runOptional(fallbackLayerName)(n => getFallbackLayer(dataSource, n)) + volumeTracing <- createVolumeTracing( + dataSource, + dataset._organization, + dataStore, + fallbackLayer, + magRestrictions = params.magRestrictions.getOrElse(MagRestrictions.empty), + mappingName = params.mappingName + ) + volumeTracingAdapted = adaptVolumeTracing(volumeTracing, oldPrecedenceLayerProperties) + } yield Right(volumeTracingAdapted) + } + } yield tracing } - /* - If there is more than one tracing, select the one that has precedence for the parameters (they should be identical anyway) - This needs to match the code in NmlWriter’s selectLayerWithPrecedence, though the types are different - */ - private def selectLayerWithPrecedence(annotationLayers: List[AnnotationLayer]): Fox[AnnotationLayer] = { - val skeletonLayers = annotationLayers.filter(_.typ == AnnotationLayerType.Skeleton) - val volumeLayers = annotationLayers.filter(_.typ == AnnotationLayerType.Volume) - if (skeletonLayers.nonEmpty) { - Fox.successful(skeletonLayers.minBy(_.tracingId)) - } else if (volumeLayers.nonEmpty) { - Fox.successful(volumeLayers.minBy(_.tracingId)) - } else Fox.failure("Trying to select precedence layer from empty layer list.") - } + private def createLayersForExplorational(dataset: Dataset, + annotationId: ObjectId, + allAnnotationLayerParameters: List[AnnotationLayerParameters], + existingAnnotationLayers: List[AnnotationLayer])( + implicit ctx: DBAccessContext, + mp: MessagesProvider): Fox[List[AnnotationLayer]] = + for { + tracingStoreClient <- tracingStoreService.clientFor(dataset) + newAnnotationLayers <- Fox.serialCombined(allAnnotationLayerParameters) { annotationLayerParameters => + for { + tracing <- createTracingForExplorational(dataset, + annotationLayerParameters, + existingAnnotationLayers, + previousVersion = None) + layerName = annotationLayerParameters.name.getOrElse( + AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) + tracingId <- tracing match { + case Left(skeleton) => tracingStoreClient.saveSkeletonTracing(skeleton) + case Right(volume) => tracingStoreClient.saveVolumeTracing(volume) + } + } yield + AnnotationLayer(tracingId, + annotationLayerParameters.typ, + layerName, + AnnotationLayerStatistics.zeroedForType(annotationLayerParameters.typ)) + } + layersProto = newAnnotationLayers.map { l => + AnnotationLayerProto( + l.tracingId, + l.name, + AnnotationLayerType.toProto(l.typ) + ) + } + annotationProto = AnnotationProto( + name = Some(AnnotationDefaults.defaultName), + description = Some(AnnotationDefaults.defaultDescription), + version = 0L, + annotationLayers = layersProto, + earliestAccessibleVersion = 0L + ) + _ <- tracingStoreClient.saveAnnotationProto(annotationId, annotationProto) + } yield newAnnotationLayers def createExplorationalFor(user: User, datasetId: ObjectId, @@ -389,52 +294,17 @@ class AnnotationService @Inject()( m: MessagesProvider): Fox[Annotation] = for { dataset <- datasetDAO.findOne(datasetId) ?~> "dataset.noAccessById" - dataSource <- datasetService.dataSourceFor(dataset) - datasetOrganization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> "organization.notFound" - usableDataSource <- dataSource.toUsable ?~> Messages("dataset.notImported", dataSource.id.name) - annotationLayers <- createTracingsForExplorational(dataset, - usableDataSource, - annotationLayerParameters, - datasetOrganization._id) ?~> "annotation.createTracings.failed" + newAnnotationId = ObjectId.generate + annotationLayers <- createLayersForExplorational( + dataset, + newAnnotationId, + annotationLayerParameters, + existingAnnotationLayers = List.empty) ?~> "annotation.createTracings.failed" teamId <- selectSuitableTeam(user, dataset) ?~> "annotation.create.forbidden" - annotation = Annotation(ObjectId.generate, datasetId, None, teamId, user._id, annotationLayers) + annotation = Annotation(newAnnotationId, datasetId, None, teamId, user._id, annotationLayers) _ <- annotationDAO.insertOne(annotation) } yield annotation - def makeAnnotationHybrid(annotation: Annotation, organizationId: String, fallbackLayerName: Option[String])( - implicit ctx: DBAccessContext, - mp: MessagesProvider): Fox[Unit] = - for { - newAnnotationLayerType <- annotation.tracingType match { - case TracingType.skeleton => Fox.successful(AnnotationLayerType.Volume) - case TracingType.volume => Fox.successful(AnnotationLayerType.Skeleton) - case _ => Fox.failure("annotation.makeHybrid.alreadyHybrid") - } - usedFallbackLayerName = if (newAnnotationLayerType == AnnotationLayerType.Volume) fallbackLayerName else None - newAnnotationLayerParameters = AnnotationLayerParameters( - newAnnotationLayerType, - usedFallbackLayerName, - autoFallbackLayer = false, - None, - Some(MagRestrictions.empty), - Some(AnnotationLayer.defaultNameForType(newAnnotationLayerType)), - None - ) - _ <- addAnnotationLayer(annotation, organizationId, newAnnotationLayerParameters) ?~> "makeHybrid.createTracings.failed" - } yield () - - def downsampleAnnotation(annotation: Annotation, volumeAnnotationLayer: AnnotationLayer)( - implicit ctx: DBAccessContext): Fox[Unit] = - for { - dataset <- datasetDAO.findOne(annotation._dataset) ?~> "dataset.notFoundForAnnotation" - _ <- bool2Fox(volumeAnnotationLayer.typ == AnnotationLayerType.Volume) ?~> "annotation.downsample.volumeOnly" - rpcClient <- tracingStoreService.clientFor(dataset) - newVolumeTracingId <- rpcClient.duplicateVolumeTracing(volumeAnnotationLayer.tracingId, downsample = true) - _ = logger.info( - s"Replacing volume tracing ${volumeAnnotationLayer.tracingId} by downsampled copy $newVolumeTracingId for annotation ${annotation._id}.") - _ <- annotationLayersDAO.replaceTracingId(annotation._id, volumeAnnotationLayer.tracingId, newVolumeTracingId) - } yield () - // WARNING: needs to be repeatable, might be called multiple times for an annotation def finish(annotation: Annotation, user: User, restrictions: AnnotationRestrictions)( implicit ctx: DBAccessContext): Fox[String] = { @@ -474,54 +344,39 @@ class AnnotationService @Inject()( }).flatten } - private def baseForTask(taskId: ObjectId)(implicit ctx: DBAccessContext): Fox[Annotation] = - (for { - list <- annotationDAO.findAllByTaskIdAndType(taskId, AnnotationType.TracingBase) - } yield list.headOption.toFox).flatten - def annotationsFor(taskId: ObjectId)(implicit ctx: DBAccessContext): Fox[List[Annotation]] = annotationDAO.findAllByTaskIdAndType(taskId, AnnotationType.Task) - private def tracingsFromBase(annotationBase: Annotation, dataset: Dataset)( - implicit ctx: DBAccessContext, - m: MessagesProvider): Fox[(Option[String], Option[String])] = - for { - _ <- bool2Fox(dataset.isUsable) ?~> Messages("dataset.notImported", dataset.name) - tracingStoreClient <- tracingStoreService.clientFor(dataset) - baseSkeletonIdOpt <- annotationBase.skeletonTracingId - baseVolumeIdOpt <- annotationBase.volumeTracingId - newSkeletonId: Option[String] <- Fox.runOptional(baseSkeletonIdOpt)(skeletonId => - tracingStoreClient.duplicateSkeletonTracing(skeletonId)) - newVolumeId: Option[String] <- Fox.runOptional(baseVolumeIdOpt)(volumeId => - tracingStoreClient.duplicateVolumeTracing(volumeId)) - } yield (newSkeletonId, newVolumeId) - def createAnnotationFor(user: User, taskId: ObjectId, initializingAnnotationId: ObjectId)( implicit m: MessagesProvider, - ctx: DBAccessContext): Fox[Annotation] = { - def useAsTemplateAndInsert(annotation: Annotation) = - for { - datasetName <- datasetDAO.getNameById(annotation._dataset)(GlobalAccessContext) ?~> "dataset.notFoundForAnnotation" - dataset <- datasetDAO.findOne(annotation._dataset) ?~> Messages("dataset.noAccess", datasetName) - (newSkeletonId, newVolumeId) <- tracingsFromBase(annotation, dataset) ?~> s"Failed to use annotation base as template for task $taskId with annotation base ${annotation._id}" - annotationLayers <- AnnotationLayer.layersFromIds(newSkeletonId, newVolumeId) - newAnnotation = annotation.copy( - _id = initializingAnnotationId, - _user = user._id, - annotationLayers = annotationLayers, - state = Active, - typ = AnnotationType.Task, - created = Instant.now, - modified = Instant.now - ) - _ <- annotationDAO.updateInitialized(newAnnotation) - } yield newAnnotation - + ctx: DBAccessContext): Fox[Annotation] = for { - annotationBase <- baseForTask(taskId) ?~> "Failed to retrieve annotation base." - result <- useAsTemplateAndInsert(annotationBase).toFox - } yield result - } + annotationBaseId <- annotationDAO.findBaseIdForTask(taskId) ?~> "Failed to retrieve annotation base id." + annotationBase <- annotationDAO.findOne(annotationBaseId) ?~> "Failed to retrieve annotation base." + datasetName <- datasetDAO.getNameById(annotationBase._dataset)(GlobalAccessContext) ?~> "dataset.notFoundForAnnotation" + dataset <- datasetDAO.findOne(annotationBase._dataset) ?~> Messages("dataset.noAccess", datasetName) + _ <- bool2Fox(dataset.isUsable) ?~> Messages("dataset.notImported", dataset.name) + tracingStoreClient <- tracingStoreService.clientFor(dataset) + _ = logger.info( + f"task assignment. creating annotation $initializingAnnotationId from base $annotationBaseId for task $taskId") + duplicatedAnnotationProto <- tracingStoreClient.duplicateAnnotation( + annotationBaseId, + initializingAnnotationId, + version = None, + isFromTask = false, // isFromTask is when duplicate is called on a task annotation, not when a task is assigned + datasetBoundingBox = None + ) + newAnnotation = annotationBase.copy( + _id = initializingAnnotationId, + _user = user._id, + annotationLayers = duplicatedAnnotationProto.annotationLayers.map(AnnotationLayer.fromProto).toList, + state = Active, + typ = AnnotationType.Task, + created = Instant.now, + modified = Instant.now + ) + _ <- annotationDAO.updateInitialized(newAnnotation) + } yield newAnnotation def createSkeletonTracingBase(datasetName: String, boundingBox: Option[BoundingBox], @@ -594,13 +449,15 @@ class AnnotationService @Inject()( case _ => annotationDAO.abortInitializingAnnotation(initializingAnnotationId) } - def createAnnotationBase( + // Save annotation base to postgres AND annotation proto to tracingstore. + def createAndSaveAnnotationBase( taskFox: Fox[Task], userId: ObjectId, skeletonTracingIdBox: Box[Option[String]], volumeTracingIdBox: Box[Option[String]], datasetId: ObjectId, - description: Option[String] + description: Option[String], + tracingStoreClient: WKRemoteTracingStoreClient )(implicit ctx: DBAccessContext): Fox[Unit] = for { task <- taskFox @@ -617,23 +474,33 @@ class AnnotationService @Inject()( annotationLayers, description.getOrElse(""), typ = AnnotationType.TracingBase) + annotationBaseProto = AnnotationProto( + name = Some(AnnotationDefaults.defaultName), + description = Some(AnnotationDefaults.defaultDescription), + version = 0L, + annotationLayers = annotationLayers.map(_.toProto), + earliestAccessibleVersion = 0L + ) + _ <- tracingStoreClient.saveAnnotationProto(annotationBase._id, annotationBaseProto) + _ = logger.info(s"inserting base annotation ${annotationBase._id} for task ${task._id}") _ <- annotationDAO.insertOne(annotationBase) } yield () def createFrom(user: User, dataset: Dataset, - annotationLayers: List[AnnotationLayer], + annotationLayers: Seq[AnnotationLayer], annotationType: AnnotationType, name: Option[String], - description: String): Fox[Annotation] = + description: String, + newAnnotationId: ObjectId): Fox[Annotation] = for { teamId <- selectSuitableTeam(user, dataset) - annotation = Annotation(ObjectId.generate, + annotation = Annotation(newAnnotationId, dataset._id, None, teamId, user._id, - annotationLayers, + annotationLayers.toList, description, name = name.getOrElse(""), typ = annotationType) @@ -832,31 +699,13 @@ class AnnotationService @Inject()( updated <- annotationInformationProvider.provideAnnotation(typ, id, issuingUser) } yield updated - def resetToBase(annotation: Annotation)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[Unit] = - annotation.typ match { - case AnnotationType.Explorational => - Fox.failure("annotation.revert.tasksOnly") - case AnnotationType.Task => - for { - task <- taskFor(annotation) - oldSkeletonTracingIdOpt <- annotation.skeletonTracingId // This also asserts that the annotation does not have multiple volume/skeleton layers - oldVolumeTracingIdOpt <- annotation.volumeTracingId - _ = logger.warn( - s"Resetting annotation ${annotation._id} to base, discarding skeleton tracing $oldSkeletonTracingIdOpt and/or volume tracing $oldVolumeTracingIdOpt") - annotationBase <- baseForTask(task._id) - dataset <- datasetDAO.findOne(annotationBase._dataset)(GlobalAccessContext) ?~> "dataset.notFoundForAnnotation" - (newSkeletonIdOpt, newVolumeIdOpt) <- tracingsFromBase(annotationBase, dataset) - _ <- Fox.bool2Fox(newSkeletonIdOpt.isDefined || newVolumeIdOpt.isDefined) ?~> "annotation.needsEitherSkeletonOrVolume" - _ <- Fox.runOptional(newSkeletonIdOpt)(newSkeletonId => - oldSkeletonTracingIdOpt.toFox.map { oldSkeletonId => - annotationLayersDAO.replaceTracingId(annotation._id, oldSkeletonId, newSkeletonId) - }) - _ <- Fox.runOptional(newVolumeIdOpt)(newVolumeId => - oldVolumeTracingIdOpt.toFox.map { oldVolumeId => - annotationLayersDAO.replaceTracingId(annotation._id, oldVolumeId, newVolumeId) - }) - } yield () - } + def resetToBase(annotation: Annotation)(implicit ctx: DBAccessContext): Fox[Unit] = + for { + _ <- bool2Fox(annotation.typ == AnnotationType.Task) ?~> "annotation.revert.tasksOnly" + dataset <- datasetDAO.findOne(annotation._dataset) + tracingStoreClient <- tracingStoreService.clientFor(dataset) + _ <- tracingStoreClient.resetToBase(annotation._id) ?~> "annotation.revert.failed" + } yield () private def settingsFor(annotation: Annotation)(implicit ctx: DBAccessContext) = if (annotation.typ == AnnotationType.Task || annotation.typ == AnnotationType.TracingBase) diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 4502ae0c0fd..eb516b9ed5d 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -6,6 +6,7 @@ import com.scalableminds.util.io.ZipIO import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracings} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracings} import com.scalableminds.webknossos.datastore.models.VoxelSize @@ -23,6 +24,7 @@ import com.typesafe.scalalogging.LazyLogging import controllers.RpcTokenHolder import models.dataset.Dataset import net.liftweb.common.Box +import utils.ObjectId import scala.concurrent.ExecutionContext @@ -33,7 +35,7 @@ class WKRemoteTracingStoreClient( tracingDataSourceTemporaryStore: TracingDataSourceTemporaryStore)(implicit ec: ExecutionContext) extends LazyLogging { - def baseInfo = s" Dataset: ${dataset.name} Tracingstore: ${tracingStore.url}" + private def baseInfo = s" Dataset: ${dataset.name} Tracingstore: ${tracingStore.url}" def getSkeletonTracing(annotationLayer: AnnotationLayer, version: Option[Long]): Fox[FetchedAnnotationLayer] = { logger.debug("Called to get SkeletonTracing." + baseInfo) @@ -80,66 +82,67 @@ class WKRemoteTracingStoreClient( .postProtoWithJsonResponse[SkeletonTracings, List[Box[Option[String]]]](tracings) } + def saveAnnotationProto(annotationId: ObjectId, annotationProto: AnnotationProto): Fox[Unit] = { + logger.debug( + f"Called to save AnnotationProto $annotationId with layers ${annotationProto.annotationLayers.map(_.tracingId).mkString(",")}." + baseInfo) + rpc(s"${tracingStore.url}/tracings/annotation/save") + .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("annotationId" -> annotationId.toString) + .postProto[AnnotationProto](annotationProto) + } + + // Used in duplicate route. History and version are kept + def duplicateAnnotation(annotationId: ObjectId, + newAnnotationId: ObjectId, + version: Option[Long], + isFromTask: Boolean, + datasetBoundingBox: Option[BoundingBox]): Fox[AnnotationProto] = { + logger.debug(s"Called to duplicate annotation $annotationId." + baseInfo) + rpc(s"${tracingStore.url}/tracings/annotation/$annotationId/duplicate").withLongTimeout + .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("newAnnotationId" -> newAnnotationId.toString) + .addQueryStringOptional("version", version.map(_.toString)) + .addQueryStringOptional("datasetBoundingBox", datasetBoundingBox.map(_.toLiteral)) + .addQueryString("isFromTask" -> isFromTask.toString) + .postWithProtoResponse[AnnotationProto]()(AnnotationProto) + } + + // Used in task creation. History is dropped, new version will be zero. def duplicateSkeletonTracing(skeletonTracingId: String, - versionString: Option[String] = None, - isFromTask: Boolean = false, editPosition: Option[Vec3Int] = None, editRotation: Option[Vec3Double] = None, - boundingBox: Option[BoundingBox] = None): Fox[String] = { - logger.debug("Called to duplicate SkeletonTracing." + baseInfo) + boundingBox: Option[BoundingBox] = None): Fox[String] = rpc(s"${tracingStore.url}/tracings/skeleton/$skeletonTracingId/duplicate").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryStringOptional("version", versionString) .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) - .addQueryString("fromTask" -> isFromTask.toString) - .postWithJsonResponse[String] - } + .postWithJsonResponse[String]() + // Used in task creation. History is dropped, new version will be zero. def duplicateVolumeTracing(volumeTracingId: String, - isFromTask: Boolean = false, - datasetBoundingBox: Option[BoundingBox] = None, magRestrictions: MagRestrictions = MagRestrictions.empty, - downsample: Boolean = false, editPosition: Option[Vec3Int] = None, editRotation: Option[Vec3Double] = None, - boundingBox: Option[BoundingBox] = None): Fox[String] = { - logger.debug(s"Called to duplicate volume tracing $volumeTracingId. $baseInfo") + boundingBox: Option[BoundingBox] = None): Fox[String] = rpc(s"${tracingStore.url}/tracings/volume/$volumeTracingId/duplicate").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("fromTask" -> isFromTask.toString) - .addQueryStringOptional("minMag", magRestrictions.minStr) - .addQueryStringOptional("maxMag", magRestrictions.maxStr) .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) - .addQueryString("downsample" -> downsample.toString) - .postJsonWithJsonResponse[Option[BoundingBox], String](datasetBoundingBox) - } - - def addSegmentIndex(volumeTracingId: String, dryRun: Boolean): Fox[Unit] = - rpc(s"${tracingStore.url}/tracings/volume/$volumeTracingId/addSegmentIndex").withLongTimeout - .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("dryRun" -> dryRun.toString) - .silent - .post() - .map(_ => ()) - - def mergeSkeletonTracingsByIds(tracingIds: List[String], persistTracing: Boolean): Fox[String] = { - logger.debug("Called to merge SkeletonTracings by ids." + baseInfo) - rpc(s"${tracingStore.url}/tracings/skeleton/mergedFromIds").withLongTimeout - .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("persist" -> persistTracing.toString) - .postJsonWithJsonResponse[List[TracingSelector], String](tracingIds.map(TracingSelector(_))) - } + .addQueryStringOptional("minMag", magRestrictions.minStr) + .addQueryStringOptional("maxMag", magRestrictions.maxStr) + .postWithJsonResponse[String]() - def mergeVolumeTracingsByIds(tracingIds: List[String], persistTracing: Boolean): Fox[String] = { - logger.debug("Called to merge VolumeTracings by ids." + baseInfo) - rpc(s"${tracingStore.url}/tracings/volume/mergedFromIds").withLongTimeout + def mergeAnnotationsByIds(annotationIds: List[String], + newAnnotationId: ObjectId, + persist: Boolean): Fox[AnnotationProto] = { + logger.debug(s"Called to merge ${annotationIds.length} annotations by ids." + baseInfo) + rpc(s"${tracingStore.url}/tracings/annotation/mergedFromIds").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("persist" -> persistTracing.toString) - .postJsonWithJsonResponse[List[TracingSelector], String](tracingIds.map(TracingSelector(_))) + .addQueryString("persist" -> persist.toString) + .addQueryString("newAnnotationId" -> newAnnotationId.toString) + .postJsonWithProtoResponse[List[String], AnnotationProto](annotationIds)(AnnotationProto) } def mergeSkeletonTracingsByContents(tracings: SkeletonTracings, persistTracing: Boolean): Fox[String] = { @@ -236,4 +239,11 @@ class WKRemoteTracingStoreClient( } yield data } + def resetToBase(annotationId: ObjectId): Fox[Unit] = + for { + _ <- rpc(s"${tracingStore.url}/tracings/annotation/$annotationId/resetToBase").withLongTimeout + .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .post() + } yield () + } diff --git a/app/models/annotation/nml/NmlWriter.scala b/app/models/annotation/nml/NmlWriter.scala index ef8ddd14437..54d6b04992d 100644 --- a/app/models/annotation/nml/NmlWriter.scala +++ b/app/models/annotation/nml/NmlWriter.scala @@ -12,7 +12,7 @@ import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayerType, FetchedAnnotationLayer} import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.sun.xml.txw2.output.IndentingXMLStreamWriter -import models.annotation.Annotation +import models.annotation.{Annotation, AnnotationLayerPrecedence} import models.task.Task import models.user.User @@ -37,7 +37,7 @@ case class NmlParameters( editPositionAdditionalCoordinates: Seq[AdditionalCoordinateProto] ) -class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { +class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits with AnnotationLayerPrecedence { private lazy val outputService = XMLOutputFactory.newInstance() def toNmlStream(name: String, @@ -129,7 +129,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { datasetName: String, voxelSize: Option[VoxelSize]): Fox[NmlParameters] = for { - parameterSourceAnnotationLayer <- selectLayerWithPrecedence(skeletonLayers, volumeLayers) + parameterSourceAnnotationLayer <- selectLayerWithPrecedenceFetched(skeletonLayers, volumeLayers) nmlParameters = parameterSourceAnnotationLayer.tracing match { case Left(s) => NmlParameters( @@ -168,15 +168,6 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { } } yield nmlParameters - // If there is more than one tracing, select the one that has precedence for the parameters (they should be identical anyway) - private def selectLayerWithPrecedence(skeletonLayers: List[FetchedAnnotationLayer], - volumeLayers: List[FetchedAnnotationLayer]): Fox[FetchedAnnotationLayer] = - if (skeletonLayers.nonEmpty) { - Fox.successful(skeletonLayers.minBy(_.tracingId)) - } else if (volumeLayers.nonEmpty) { - Fox.successful(volumeLayers.minBy(_.tracingId)) - } else Fox.failure("annotation.download.noLayers") - private def writeParameters(parameters: NmlParameters)(implicit writer: XMLStreamWriter): Unit = Xml.withinElementSync("parameters") { Xml.withinElementSync("experiment") { diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala index 865316eef8b..3fbcb651bf7 100644 --- a/app/models/task/TaskCreationService.scala +++ b/app/models/task/TaskCreationService.scala @@ -150,7 +150,12 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, for { volumeTracingOpt <- baseAnnotation.volumeTracingId newVolumeTracingId <- volumeTracingOpt - .map(id => tracingStoreClient.duplicateVolumeTracing(id, magRestrictions = magRestrictions)) + .map( + id => + tracingStoreClient.duplicateVolumeTracing(id, + editPosition = Some(params.editPosition), + editRotation = Some(params.editRotation), + magRestrictions = magRestrictions)) .getOrElse( annotationService .createVolumeTracingBase( @@ -423,13 +428,14 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, .toList createAnnotationBaseResults: List[Fox[Unit]] = zipped.map( tuple => - annotationService.createAnnotationBase( + annotationService.createAndSaveAnnotationBase( taskFox = tuple._3, requestingUser._id, skeletonTracingIdBox = tuple._2._1, volumeTracingIdBox = tuple._2._2, dataset._id, - description = tuple._1.map(_._1.description).openOr(None) + description = tuple._1.map(_._1.description).openOr(None), + tracingStoreClient )) warnings <- warnIfTeamHasNoAccess(fullTasks.map(_._1), dataset, requestingUser) zippedTasksAndAnnotations = taskObjects zip createAnnotationBaseResults diff --git a/conf/messages b/conf/messages index 44eff4e660d..bed5b80e37d 100644 --- a/conf/messages +++ b/conf/messages @@ -188,6 +188,7 @@ annotation.volume.invalidLargestSegmentId=Cannot create tasks with fallback segm annotation.volume.magRestrictionsTooTight=Task type mag restrictions are too tight, resulting annotation has no magnifications. annotation.volume.magssDoNotMatch=Could not merge volume annotations, as their magnifications differ. Please ensure each annotation has the same set of mags. annotation.volume.largestSegmentIdExceedsRange=The largest segment id {0} specified for the annotation layer exceeds the range of its data type {1} +annotation.volume.noEditableMapping=This volume tracing does not have an editable mapping (not a “proofreading” annotation layer) annotation.notFound=Annotation could not be found annotation.notFound.considerLoggingIn=Annotation could not be found. If the annotation is not public, you need to log in to see it. annotation.invalid=Invalid annotation @@ -251,6 +252,7 @@ annotation.deleteLayer.explorationalsOnly=Could not delete a layer because it is annotation.deleteLayer.onlyLayer=Could not delete layer because it is the only layer in this annotation. annotation.layer.notFound=Layer could not be found. annotation.getNewestVersion.failed=Could not get the newest version information for this annotation layer +annotation.idForTracing.failed=Could not find the annotation id for this tracing id. mesh.notFound=Mesh could not be found mesh.write.failed=Failed to convert mesh info to json diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 8c51aadafba..1b067b3821e 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -121,11 +121,14 @@ PUT /datastores/:name # Tracingstores GET /tracingstore controllers.TracingStoreController.listOne() POST /tracingstores/:name/handleTracingUpdateReport controllers.WKRemoteTracingStoreController.handleTracingUpdateReport(name: String, key: String) +POST /tracingstores/:name/updateAnnotation controllers.WKRemoteTracingStoreController.updateAnnotation(name: String, key: String, annotationId: String) POST /tracingstores/:name/validateUserAccess controllers.UserTokenController.validateAccessViaTracingstore(name: String, key: String, token: Option[String]) PUT /tracingstores/:name controllers.TracingStoreController.update(name: String) GET /tracingstores/:name/dataSource controllers.WKRemoteTracingStoreController.dataSourceForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/dataSourceId controllers.WKRemoteTracingStoreController.dataSourceIdForTracing(name: String, key: String, tracingId: String) +GET /tracingstores/:name/annotationId controllers.WKRemoteTracingStoreController.annotationIdForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/dataStoreUri/:datasetName controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], datasetName: String) +POST /tracingstores/:name/createTracing controllers.WKRemoteTracingStoreController.createTracing(name: String, key: String, annotationId: String, previousVersion: Long) # User access tokens for datastore authentication POST /userToken/generate controllers.UserTokenController.generateTokenForDataStore() @@ -143,22 +146,13 @@ PUT /annotations/:typ/:id/reset PATCH /annotations/:typ/:id/transfer controllers.AnnotationController.transfer(typ: String, id: String) PATCH /annotations/:typ/:id/editLockedState controllers.AnnotationController.editLockedState(typ: String, id: String, isLockedByOwner: Boolean) -GET /annotations/:id/info controllers.AnnotationController.infoWithoutType(id: String, timestamp: Long) -PATCH /annotations/:id/makeHybrid controllers.AnnotationController.makeHybridWithoutType(id: String, fallbackLayerName: Option[String]) -PATCH /annotations/:id/downsample controllers.AnnotationController.downsampleWithoutType(id: String, tracingId: String) -PATCH /annotations/:id/addAnnotationLayer controllers.AnnotationController.addAnnotationLayerWithoutType(id: String) -PATCH /annotations/:id/deleteAnnotationLayer controllers.AnnotationController.deleteAnnotationLayerWithoutType(id: String, layerName: String) +GET /annotations/:id/info controllers.AnnotationController.infoWithoutType(id: String, timestamp: Option[Long]) DELETE /annotations/:id controllers.AnnotationController.cancelWithoutType(id: String) POST /annotations/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.mergeWithoutType(id: String, mergedTyp: String, mergedId: String) GET /annotations/:id/download controllers.AnnotationIOController.downloadWithoutType(id: String, skeletonVersion: Option[Long], volumeVersion: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) POST /annotations/:id/acquireMutex controllers.AnnotationController.tryAcquiringAnnotationMutex(id: String) -PATCH /annotations/addSegmentIndicesToAll controllers.AnnotationController.addSegmentIndicesToAll(parallelBatchCount: Int, dryRun: Boolean, skipTracings: Option[String]) -GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Long) -PATCH /annotations/:typ/:id/makeHybrid controllers.AnnotationController.makeHybrid(typ: String, id: String, fallbackLayerName: Option[String]) -PATCH /annotations/:typ/:id/downsample controllers.AnnotationController.downsample(typ: String, id: String, tracingId: String) -PATCH /annotations/:typ/:id/addAnnotationLayer controllers.AnnotationController.addAnnotationLayer(typ: String, id: String) -PATCH /annotations/:typ/:id/deleteAnnotationLayer controllers.AnnotationController.deleteAnnotationLayer(typ: String, id: String, layerName: String) +GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Option[Long]) DELETE /annotations/:typ/:id controllers.AnnotationController.cancel(typ: String, id: String) POST /annotations/:typ/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.merge(typ: String, id: String, mergedTyp: String, mergedId: String) GET /annotations/:typ/:id/download controllers.AnnotationIOController.download(typ: String, id: String, skeletonVersion: Option[Long], volumeVersion: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) diff --git a/conf/webknossos.versioned.routes b/conf/webknossos.versioned.routes index cfbc59297f1..0de33d1551a 100644 --- a/conf/webknossos.versioned.routes +++ b/conf/webknossos.versioned.routes @@ -43,7 +43,7 @@ GET /v4/datasets/:organizationName/:datasetName co GET /v4/datasets/:organizationName/:datasetName/isValidNewName controllers.LegacyApiController.assertValidNewNameV5(organizationName: String, datasetName: String) # v4: support changes to v5 -GET /v4/annotations/:typ/:id/info controllers.LegacyApiController.annotationInfoV4(typ: String, id: String, timestamp: Long) +GET /v4/annotations/:typ/:id/info controllers.LegacyApiController.annotationInfoV4(typ: String, id: String, timestamp: Option[Long]) PATCH /v4/annotations/:typ/:id/finish controllers.LegacyApiController.annotationFinishV4(typ: String, id: String, timestamp: Long) POST /v4/annotations/:typ/:id/merge/:mergedTyp/:mergedId controllers.LegacyApiController.annotationMergeV4(typ: String, id: String, mergedTyp: String, mergedId: String) PATCH /v4/annotations/:typ/:id/edit controllers.LegacyApiController.annotationEditV4(typ: String, id: String) @@ -69,7 +69,7 @@ GET /v3/datasets/:organizationName/:datasetName/isValidNewName co # v3: support changes to v5 PATCH /v3/annotations/:typ/:id/finish controllers.LegacyApiController.annotationFinishV4(typ: String, id: String, timestamp: Long) -GET /v3/annotations/:typ/:id/info controllers.LegacyApiController.annotationInfoV4(typ: String, id: String, timestamp: Long) +GET /v3/annotations/:typ/:id/info controllers.LegacyApiController.annotationInfoV4(typ: String, id: String, timestamp: Option[Long]) POST /v3/annotations/:typ/:id/merge/:mergedTyp/:mergedId controllers.LegacyApiController.annotationMergeV4(typ: String, id: String, mergedTyp: String, mergedId: String) PATCH /v3/annotations/:typ/:id/edit controllers.LegacyApiController.annotationEditV4(typ: String, id: String) POST /v3/annotations/:typ/:id/duplicate controllers.LegacyApiController.annotationDuplicateV4(typ: String, id: String) diff --git a/fossildb/run.sh b/fossildb/run.sh index 55853d3b302..53f56832c2f 100755 --- a/fossildb/run.sh +++ b/fossildb/run.sh @@ -14,7 +14,6 @@ if [ ! -f "$JAR" ] || [ ! "$CURRENT_VERSION" == "$VERSION" ]; then wget -q --show-progress -O "$JAR" "$URL" fi -# Note that the editableMappings column is no longer used by wk. Still here for backwards compatibility. -COLLECTIONS="skeletons,skeletonUpdates,volumes,volumeData,volumeUpdates,volumeSegmentIndex,editableMappings,editableMappingUpdates,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate" +COLLECTIONS="skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates" exec java -jar "$JAR" -c "$COLLECTIONS" -d "$FOSSILDB_HOME/data" -b "$FOSSILDB_HOME/backup" diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 83adbbc3dae..17d0e2d19ad 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1,70 +1,71 @@ import ResumableJS from "resumablejs"; import _ from "lodash"; import dayjs from "dayjs"; -import type { - APIActiveUser, - APIAnnotation, - APIAnnotationInfo, - APIAnnotationType, - APIAnnotationVisibility, - APIAnnotationWithTask, - APIBuildInfo, - APIConnectomeFile, - APIDataSource, - APIDataStore, - APIDataset, - APIDatasetId, - APIFeatureToggles, - APIHistogramData, - APIMapping, - APIMaybeUnimportedDataset, - APIMeshFile, - APIAvailableTasksReport, - APIOrganization, - APIOrganizationCompact, - APIProject, - APIProjectCreator, - APIProjectProgressReport, - APIProjectUpdater, - APIProjectWithStatus, - APIPublication, - APIMagRestrictions, - APIScript, - APIScriptCreator, - APIScriptUpdater, - APITask, - APITaskType, - APITeam, - APITimeInterval, - APITimeTrackingPerAnnotation, - APITimeTrackingSpan, - APITracingStore, - APIUpdateActionBatch, - APIUser, - APIUserLoggedTime, - APIUserTheme, - AnnotationLayerDescriptor, - AnnotationViewConfiguration, - EditableLayerProperties, - ExperienceDomainList, - ServerTracing, - TracingType, - ServerEditableMapping, - APICompoundType, - ZarrPrivateLink, - VoxelyticsWorkflowReport, - VoxelyticsChunkStatistics, - ShortLink, - VoxelyticsWorkflowListing, - APIPricingPlanStatus, - VoxelyticsLogLine, - APIUserCompact, - APIDatasetCompact, - MaintenanceInfo, - AdditionalCoordinate, - LayerLink, - VoxelSize, - APITimeTrackingPerUser, +import { + type APIActiveUser, + type APIAnnotation, + type APIAnnotationInfo, + type APIAnnotationType, + type APIAnnotationVisibility, + type APIAnnotationWithTask, + type APIBuildInfo, + type APIConnectomeFile, + type APIDataSource, + type APIDataStore, + type APIDataset, + type APIDatasetId, + type APIFeatureToggles, + type APIHistogramData, + type APIMapping, + type APIMaybeUnimportedDataset, + type APIMeshFile, + type APIAvailableTasksReport, + type APIOrganization, + type APIOrganizationCompact, + type APIProject, + type APIProjectCreator, + type APIProjectProgressReport, + type APIProjectUpdater, + type APIProjectWithStatus, + type APIPublication, + type APIMagRestrictions, + type APIScript, + type APIScriptCreator, + type APIScriptUpdater, + type APITask, + type APITaskType, + type APITeam, + type APITimeInterval, + type APITimeTrackingPerAnnotation, + type APITimeTrackingSpan, + type APITracingStore, + type APIUpdateActionBatch, + type APIUser, + type APIUserLoggedTime, + type APIUserTheme, + type AnnotationLayerDescriptor, + type AnnotationViewConfiguration, + type ExperienceDomainList, + type ServerTracing, + type TracingType, + type ServerEditableMapping, + type APICompoundType, + type ZarrPrivateLink, + type VoxelyticsWorkflowReport, + type VoxelyticsChunkStatistics, + type ShortLink, + type VoxelyticsWorkflowListing, + type APIPricingPlanStatus, + type VoxelyticsLogLine, + type APIUserCompact, + type APIDatasetCompact, + type MaintenanceInfo, + type AdditionalCoordinate, + type LayerLink, + type VoxelSize, + type APITimeTrackingPerUser, + AnnotationLayerType, + type APITracingStoreAnnotation, } from "types/api_flow_types"; import { APIAnnotationTypeEnum } from "types/api_flow_types"; import type { LOG_LEVELS, Vector2, Vector3 } from "oxalis/constants"; @@ -83,11 +84,11 @@ import type { import type { NewTask, TaskCreationResponseContainer } from "admin/task/task_create_bulk_view"; import type { QueryObject } from "admin/task/task_search_form"; import { V3 } from "libs/mjs"; -import type { Versions } from "oxalis/view/version_view"; import { enforceValidatedDatasetViewConfiguration } from "types/schemas/dataset_view_configuration_defaults"; import { parseProtoListOfLong, parseProtoTracing, + parseProtoTracingStoreAnnotation, serializeProtoListOfLong, } from "oxalis/model/helpers/proto_helpers"; import type { RequestOptions } from "libs/request"; @@ -97,7 +98,6 @@ import Toast from "libs/toast"; import * as Utils from "libs/utils"; import messages from "messages"; import window, { location } from "libs/window"; -import type { SaveQueueType } from "oxalis/model/actions/save_actions"; import type { DatasourceConfiguration } from "types/schemas/datasource.types"; import { doWithToken } from "./api/token"; import type BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; @@ -598,8 +598,6 @@ export function reOpenAnnotation( } export type EditableAnnotation = { - name: string; - description: string; visibility: APIAnnotationVisibility; tags: Array; viewConfiguration?: AnnotationViewConfiguration; @@ -642,25 +640,8 @@ export function setOthersMayEditForAnnotation( ); } -export function updateAnnotationLayer( - annotationId: string, - annotationType: APIAnnotationType, - tracingId: string, - layerProperties: EditableLayerProperties, -): Promise<{ - name: string | null | undefined; -}> { - return Request.sendJSONReceiveJSON( - `/api/annotations/${annotationType}/${annotationId}/editLayer/${tracingId}`, - { - method: "PATCH", - data: layerProperties, - }, - ); -} - type AnnotationLayerCreateDescriptor = { - typ: "Skeleton" | "Volume"; + typ: AnnotationLayerType; name: string | null | undefined; autoFallbackLayer?: boolean; fallbackLayerName?: string | null | undefined; @@ -668,20 +649,6 @@ type AnnotationLayerCreateDescriptor = { magRestrictions?: APIMagRestrictions | null | undefined; }; -export function addAnnotationLayer( - annotationId: string, - annotationType: APIAnnotationType, - newAnnotationLayer: AnnotationLayerCreateDescriptor, -): Promise { - return Request.sendJSONReceiveJSON( - `/api/annotations/${annotationType}/${annotationId}/addAnnotationLayer`, - { - method: "PATCH", - data: newAnnotationLayer, - }, - ); -} - export function deleteAnnotationLayer( annotationId: string, annotationType: APIAnnotationType, @@ -751,7 +718,7 @@ export function duplicateAnnotation( }); } -export async function getAnnotationInformation( +export async function getMaybeOutdatedAnnotationInformation( annotationId: string, options: RequestOptions = {}, ): Promise { @@ -764,6 +731,19 @@ export async function getAnnotationInformation( return annotation; } +export async function getNewestAnnotationInformation( + annotationId: string, + tracingstoreUrl: string, +): Promise { + const infoUrl = `${tracingstoreUrl}/tracings/annotation/${annotationId}`; + const annotationWithMessages = await Request.receiveJSON(infoUrl); // TODO adjust return type and implement proto type in frontend + + // Extract the potential messages property before returning the task to avoid + // failing e2e tests in annotations.e2e.ts + const { messages: _messages, ...annotation } = annotationWithMessages; + return annotation; +} + export async function getAnnotationCompoundInformation( annotationId: string, annotationType: APICompoundType, @@ -804,14 +784,14 @@ export function createExplorational( if (typ === "skeleton") { layers = [ { - typ: "Skeleton", + typ: AnnotationLayerType.Skeleton, name: "Skeleton", }, ]; } else if (typ === "volume") { layers = [ { - typ: "Volume", + typ: AnnotationLayerType.Volume, name: fallbackLayerName, fallbackLayerName, autoFallbackLayer, @@ -822,11 +802,11 @@ export function createExplorational( } else { layers = [ { - typ: "Skeleton", + typ: AnnotationLayerType.Skeleton, name: "Skeleton", }, { - typ: "Volume", + typ: AnnotationLayerType.Volume, name: fallbackLayerName, fallbackLayerName, autoFallbackLayer, @@ -841,12 +821,14 @@ export function createExplorational( export async function getTracingsForAnnotation( annotation: APIAnnotation, - versions: Versions = {}, + version: number | null | undefined, ): Promise> { - const skeletonLayers = annotation.annotationLayers.filter((layer) => layer.typ === "Skeleton"); + const skeletonLayers = annotation.annotationLayers.filter( + (layer) => layer.typ === AnnotationLayerType.Skeleton, + ); const fullAnnotationLayers = await Promise.all( annotation.annotationLayers.map((layer) => - getTracingForAnnotationType(annotation, layer, versions), + getTracingForAnnotationType(annotation, layer, version), ), ); @@ -871,27 +853,12 @@ export async function acquireAnnotationMutex( return { canEdit, blockedByUser }; } -function extractVersion( - versions: Versions, - tracingId: string, - typ: "Volume" | "Skeleton", -): number | null | undefined { - if (typ === "Skeleton") { - return versions.skeleton; - } else if (versions.volumes != null) { - return versions.volumes[tracingId]; - } - - return null; -} - export async function getTracingForAnnotationType( annotation: APIAnnotation, annotationLayerDescriptor: AnnotationLayerDescriptor, - versions: Versions = {}, + version?: number | null | undefined, // TODOM: Use this parameter ): Promise { const { tracingId, typ } = annotationLayerDescriptor; - const version = extractVersion(versions, tracingId, typ); const tracingType = typ.toLowerCase() as "skeleton" | "volume"; const possibleVersionString = version != null ? `&version=${version}` : ""; const tracingArrayBuffer = await doWithToken((token) => @@ -927,8 +894,7 @@ export async function getTracingForAnnotationType( export function getUpdateActionLog( tracingStoreUrl: string, - tracingId: string, - versionedObjectType: SaveQueueType, + annotationId: string, oldestVersion?: number, newestVersion?: number, ): Promise> { @@ -942,23 +908,39 @@ export function getUpdateActionLog( params.append("newestVersion", newestVersion.toString()); } return Request.receiveJSON( - `${tracingStoreUrl}/tracings/${versionedObjectType}/${tracingId}/updateActionLog?${params}`, + `${tracingStoreUrl}/tracings/annotation/${annotationId}/updateActionLog?${params}`, ); }); } export function getNewestVersionForTracing( tracingStoreUrl: string, - tracingId: string, - tracingType: SaveQueueType, + annotationId: string, ): Promise { return doWithToken((token) => Request.receiveJSON( - `${tracingStoreUrl}/tracings/${tracingType}/${tracingId}/newestVersion?token=${token}`, + `${tracingStoreUrl}/tracings/annotation/${annotationId}/newestVersion?token=${token}`, ).then((obj) => obj.version), ); } +export async function getNewestVersionOfTracing( + tracingStoreUrl: string, + annotationId: string, +): Promise { + const annotationArrayBuffer = await doWithToken((token) => + Request.receiveArraybuffer( + `${tracingStoreUrl}/tracings/annotation/${annotationId}?token=${token}`, + { + headers: { + Accept: "application/x-protobuf", + }, + }, + ), + ); + return parseProtoTracingStoreAnnotation(annotationArrayBuffer); +} + export function hasSegmentIndexInDataStore( dataStoreUrl: string, dataSetName: string, @@ -1020,17 +1002,6 @@ export async function importVolumeTracing( ); } -export function convertToHybridTracing( - annotationId: string, - fallbackLayerName: string | null | undefined, -): Promise { - return Request.receiveJSON(`/api/annotations/Explorational/${annotationId}/makeHybrid`, { - method: "PATCH", - // @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ method: "PATCH"; fallbackLayer... Remove this comment to see the full error message - fallbackLayerName, - }); -} - export async function downloadWithFilename(downloadUrl: string) { const link = document.createElement("a"); link.href = downloadUrl; @@ -1044,16 +1015,17 @@ export async function downloadAnnotation( annotationId: string, annotationType: APIAnnotationType, showVolumeFallbackDownloadWarning: boolean = false, - versions: Versions = {}, + _version: number | null | undefined = null, downloadFileFormat: "zarr3" | "wkw" | "nml" = "wkw", includeVolumeData: boolean = true, ) { const searchParams = new URLSearchParams(); - Object.entries(versions).forEach(([key, val]) => { + // TODO: Use the version parameter + /*Object.entries(versions).forEach(([key, val]) => { if (val != null) { searchParams.append(`${key}Version`, val.toString()); } - }); + });*/ if (includeVolumeData && showVolumeFallbackDownloadWarning) { Toast.info(messages["annotation.no_fallback_data_included"], { @@ -1620,7 +1592,7 @@ export function makeMappingEditable( ): Promise { return doWithToken((token) => Request.receiveJSON( - `${tracingStoreUrl}/tracings/volume/${tracingId}/makeMappingEditable?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${tracingId}/makeMappingEditable?token=${token}`, { method: "POST", }, @@ -2157,7 +2129,7 @@ export function getEditableAgglomerateSkeleton( ): Promise { return doWithToken((token) => Request.receiveArraybuffer( - `${tracingStoreUrl}/tracings/volume/${tracingId}/agglomerateSkeleton/${agglomerateId}?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomerateSkeleton/${agglomerateId}?token=${token}`, // The webworker code cannot do proper error handling and always expects an array buffer from the server. // However, the server might send an error json instead of an array buffer. Therefore, don't use the webworker code. { @@ -2319,7 +2291,7 @@ export async function getEdgesForAgglomerateMinCut( ): Promise> { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${tracingStoreUrl}/tracings/volume/${tracingId}/agglomerateGraphMinCut?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomerateGraphMinCut?token=${token}`, { data: { ...segmentsInfo, @@ -2350,7 +2322,7 @@ export async function getNeighborsForAgglomerateNode( ): Promise { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${tracingStoreUrl}/tracings/volume/${tracingId}/agglomerateGraphNeighbors?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomerateGraphNeighbors?token=${token}`, { data: { ...segmentInfo, diff --git a/frontend/javascripts/admin/task/task_create_form_view.tsx b/frontend/javascripts/admin/task/task_create_form_view.tsx index 075086defad..8b90128c6dc 100644 --- a/frontend/javascripts/admin/task/task_create_form_view.tsx +++ b/frontend/javascripts/admin/task/task_create_form_view.tsx @@ -35,7 +35,7 @@ import { createTaskFromNML, createTasks, getActiveDatasetsOfMyOrganization, - getAnnotationInformation, + getMaybeOutdatedAnnotationInformation, getProjects, getScripts, getTask, @@ -481,12 +481,12 @@ function TaskCreateFormView({ taskId, history }: Props) { const annotationResponse = (await tryToAwaitPromise( - getAnnotationInformation(value, { + getMaybeOutdatedAnnotationInformation(value, { showErrorToast: false, }), )) || (await tryToAwaitPromise( - getAnnotationInformation(value, { + getMaybeOutdatedAnnotationInformation(value, { showErrorToast: false, }), )); diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index 6710eba5b37..6378e6d6511 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -68,6 +68,8 @@ import { ActiveTabContext, RenderingTabContext } from "./dashboard_contexts"; import type { SearchProps } from "antd/lib/input"; import { getCombinedStatsFromServerAnnotation } from "oxalis/model/accessors/annotation_accessor"; import { AnnotationStats } from "oxalis/view/right-border-tabs/dataset_info_tab_view"; +import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; +import { updateMetadataOfAnnotation } from "oxalis/model/sagas/update_actions"; const { Search } = Input; const pageLength: number = 1000; @@ -384,14 +386,10 @@ class ExplorativeAnnotationsView extends React.PureComponent { }; renameTracing(tracing: APIAnnotationInfo, name: string) { - editAnnotation(tracing.id, tracing.typ, { name }) - .then(() => { - Toast.success(messages["annotation.was_edited"]); - this.updateTracingInLocalState(tracing, (t) => update(t, { name: { $set: name } })); - }) - .catch((error) => { - handleGenericError(error as Error, "Could not update the annotation name."); - }); + Store.dispatch( + pushSaveQueueTransaction([updateMetadataOfAnnotation(name)], "unused-tracing-id"), + ); + this.updateTracingInLocalState(tracing, (t) => update(t, { name: { $set: name } })); } archiveAll = () => { diff --git a/frontend/javascripts/oxalis/api/api_latest.ts b/frontend/javascripts/oxalis/api/api_latest.ts index 13d8cb52713..1aafbd8a0f0 100644 --- a/frontend/javascripts/oxalis/api/api_latest.ts +++ b/frontend/javascripts/oxalis/api/api_latest.ts @@ -7,7 +7,6 @@ import { getConstructorForElementClass } from "oxalis/model/bucket_data_handling import { type APICompoundType, APICompoundTypeEnum, type ElementClass } from "types/api_flow_types"; import { InputKeyboardNoLoop } from "libs/input"; import { M4x4, type Matrix4x4, V3, type Vector16 } from "libs/mjs"; -import type { Versions } from "oxalis/view/version_view"; import { addTreesAndGroupsAction, setActiveNodeAction, @@ -1115,7 +1114,7 @@ class TracingApi { newMaybeCompoundType: APICompoundType | null, newAnnotationId: string, newControlMode: ControlMode, - versions?: Versions, + version?: number | undefined | null, keepUrlState: boolean = false, ) { if (newControlMode === ControlModeEnum.VIEW) @@ -1134,7 +1133,7 @@ class TracingApi { type: newControlMode, }, false, - versions, + version, ); Store.dispatch(discardSaveQueuesAction()); Store.dispatch(wkReadyAction()); diff --git a/frontend/javascripts/oxalis/controller.tsx b/frontend/javascripts/oxalis/controller.tsx index 0912a7babe9..537977d8e9d 100644 --- a/frontend/javascripts/oxalis/controller.tsx +++ b/frontend/javascripts/oxalis/controller.tsx @@ -90,14 +90,12 @@ class Controller extends React.PureComponent { tryFetchingModel() { this.props.setControllerStatus("loading"); // Preview a working annotation version if the showVersionRestore URL parameter is supplied - const versions = Utils.hasUrlParam("showVersionRestore") - ? { - skeleton: Utils.hasUrlParam("skeletonVersion") - ? Number.parseInt(Utils.getUrlParamValue("skeletonVersion")) - : 1, - } + const version = Utils.hasUrlParam("showVersionRestore") + ? Utils.hasUrlParam("version") + ? Number.parseInt(Utils.getUrlParamValue("version")) + : 1 : undefined; - Model.fetch(this.props.initialMaybeCompoundType, this.props.initialCommandType, true, versions) + Model.fetch(this.props.initialMaybeCompoundType, this.props.initialCommandType, true, version) .then(() => this.modelFetchDone()) .catch((error) => { this.props.setControllerStatus("failedLoading"); diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index 11e4e68e2ed..08894a6b01f 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -177,23 +177,12 @@ const defaultState: OxalisState = { othersMayEdit: false, blockedByUser: null, annotationLayers: [], + version: 0, }, save: { - queue: { - skeleton: [], - volumes: {}, - mappings: {}, - }, - isBusyInfo: { - skeleton: false, - volumes: {}, - mappings: {}, - }, - lastSaveTimestamp: { - skeleton: 0, - volumes: {}, - mappings: {}, - }, + queue: [], + isBusy: false, + lastSaveTimestamp: 0, progressInfo: { processedActionCount: 0, totalActionCount: 0, diff --git a/frontend/javascripts/oxalis/model.ts b/frontend/javascripts/oxalis/model.ts index f2d9d5db78b..07f84588d31 100644 --- a/frontend/javascripts/oxalis/model.ts +++ b/frontend/javascripts/oxalis/model.ts @@ -1,6 +1,5 @@ import _ from "lodash"; import type { Vector3 } from "oxalis/constants"; -import type { Versions } from "oxalis/view/version_view"; import { getActiveSegmentationTracingLayer } from "oxalis/model/accessors/volumetracing_accessor"; import { getActiveMagIndexForLayer } from "oxalis/model/accessors/flycam_accessor"; import { @@ -9,7 +8,6 @@ import { isLayerVisible, } from "oxalis/model/accessors/dataset_accessor"; import { getTotalSaveQueueLength } from "oxalis/model/reducers/save_reducer"; -import { isBusy } from "oxalis/model/accessors/save_accessor"; import { isDatasetAccessibleBySwitching } from "admin/admin_rest_api"; import { saveNowAction } from "oxalis/model/actions/save_actions"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; @@ -33,14 +31,14 @@ export class OxalisModel { initialMaybeCompoundType: APICompoundType | null, initialCommandType: TraceOrViewCommand, initialFetch: boolean, - versions?: Versions, + version?: number | undefined | null, ) { try { const initializationInformation = await initialize( initialMaybeCompoundType, initialCommandType, initialFetch, - versions, + version, ); if (initializationInformation) { @@ -283,8 +281,7 @@ export class OxalisModel { stateSaved() { const state = Store.getState(); - const storeStateSaved = - !isBusy(state.save.isBusyInfo) && getTotalSaveQueueLength(state.save.queue) === 0; + const storeStateSaved = !state.save.isBusy && getTotalSaveQueueLength(state.save.queue) === 0; const pushQueuesSaved = _.reduce( this.dataLayers, @@ -341,7 +338,7 @@ export class OxalisModel { // The dispatch of the saveNowAction IN the while loop is deliberate. // Otherwise if an update action is pushed to the save queue during the Utils.sleep, // the while loop would continue running until the next save would be triggered. - if (!isBusy(Store.getState().save.isBusyInfo)) { + if (!Store.getState().save.isBusy) { Store.dispatch(saveNowAction()); } diff --git a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts index 1949bbadd23..092a5657d80 100644 --- a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts @@ -1,6 +1,5 @@ import _ from "lodash"; import type { OxalisState, Tracing } from "oxalis/store"; -import { getVolumeTracingById } from "./volumetracing_accessor"; import type { APIAnnotationInfo } from "types/api_flow_types"; import type { EmptyObject } from "types/globals"; @@ -47,60 +46,35 @@ type TracingStatsHelper = { // biome-ignore lint/complexity/noBannedTypes: {} should be avoided actually export type CombinedTracingStats = (SkeletonTracingStats | {}) & (VolumeTracingStats | {}); -export function getStats( - tracing: Tracing, - saveQueueType: "skeleton" | "volume" | "mapping", - tracingId: string, -): TracingStats | null { - switch (saveQueueType) { - case "skeleton": { - if (!tracing.skeleton) { - return null; - } - const trees = tracing.skeleton.trees; - return { - treeCount: _.size(trees), - nodeCount: _.reduce(trees, (sum, tree) => sum + tree.nodes.size(), 0), - edgeCount: _.reduce(trees, (sum, tree) => sum + tree.edges.size(), 0), - branchPointCount: _.reduce(trees, (sum, tree) => sum + _.size(tree.branchPoints), 0), - }; - } - case "volume": { - const volumeTracing = getVolumeTracingById(tracing, tracingId); - return { - segmentCount: volumeTracing.segments.size(), - }; - } - default: - return null; +export function getStats(tracing: Tracing): CombinedTracingStats { + const { skeleton, volumes } = tracing; + let totalSegmentCount = 0; + for (const volumeTracing of volumes) { + totalSegmentCount += volumeTracing.segments.size(); } -} - -export function getCombinedStats(tracing: Tracing): CombinedTracingStats { - const aggregatedStats: TracingStatsHelper = {}; - - if (tracing.skeleton) { - const skeletonStats = getStats(tracing, "skeleton", tracing.skeleton.tracingId); - if (skeletonStats && "treeCount" in skeletonStats) { - const { treeCount, nodeCount, edgeCount, branchPointCount } = skeletonStats; - aggregatedStats.treeCount = treeCount; - aggregatedStats.nodeCount = nodeCount; - aggregatedStats.edgeCount = edgeCount; - aggregatedStats.branchPointCount = branchPointCount; - } + let stats: TracingStats = { + segmentCount: totalSegmentCount, + }; + if (skeleton) { + stats = { + ...stats, + treeCount: _.size(skeleton.trees), + nodeCount: _.reduce(skeleton.trees, (sum, tree) => sum + tree.nodes.size(), 0), + edgeCount: _.reduce(skeleton.trees, (sum, tree) => sum + tree.edges.size(), 0), + branchPointCount: _.reduce(skeleton.trees, (sum, tree) => sum + _.size(tree.branchPoints), 0), + }; } + return stats; +} +export function getCreationTimestamp(tracing: Tracing) { + let timestamp = tracing.skeleton?.createdTimestamp; for (const volumeTracing of tracing.volumes) { - const volumeStats = getStats(tracing, "volume", volumeTracing.tracingId); - if (volumeStats && "segmentCount" in volumeStats) { - if (aggregatedStats.segmentCount == null) { - aggregatedStats.segmentCount = 0; - } - aggregatedStats.segmentCount += volumeStats.segmentCount; + if (!timestamp || volumeTracing.createdTimestamp < timestamp) { + timestamp = volumeTracing.createdTimestamp; } } - - return aggregatedStats; + return timestamp || 0; } export function getCombinedStatsFromServerAnnotation( diff --git a/frontend/javascripts/oxalis/model/accessors/save_accessor.ts b/frontend/javascripts/oxalis/model/accessors/save_accessor.ts deleted file mode 100644 index e35ed69805d..00000000000 --- a/frontend/javascripts/oxalis/model/accessors/save_accessor.ts +++ /dev/null @@ -1,27 +0,0 @@ -import type { IsBusyInfo, OxalisState, SaveQueueEntry } from "oxalis/store"; -import type { SaveQueueType } from "oxalis/model/actions/save_actions"; -import * as Utils from "libs/utils"; - -export function isBusy(isBusyInfo: IsBusyInfo): boolean { - return ( - isBusyInfo.skeleton || - Utils.values(isBusyInfo.volumes).some((el) => el) || - Utils.values(isBusyInfo.mappings).some((el) => el) - ); -} -export function selectQueue( - state: OxalisState, - saveQueueType: SaveQueueType, - tracingId: string, -): Array { - switch (saveQueueType) { - case "skeleton": - return state.save.queue.skeleton; - case "volume": - return state.save.queue.volumes[tracingId]; - case "mapping": - return state.save.queue.mappings[tracingId]; - default: - throw new Error(`Unknown save queue type: ${saveQueueType}`); - } -} diff --git a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts index b30708bb6c9..f668fc3f7c5 100644 --- a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts @@ -1,10 +1,11 @@ import Maybe from "data.maybe"; import _ from "lodash"; -import type { - ServerTracing, - ServerSkeletonTracing, - APIAnnotation, - AnnotationLayerDescriptor, +import { + type ServerTracing, + type ServerSkeletonTracing, + type APIAnnotation, + type AnnotationLayerDescriptor, + AnnotationLayerType, } from "types/api_flow_types"; import type { Tracing, @@ -41,7 +42,7 @@ export function getSkeletonDescriptor( annotation: APIAnnotation, ): AnnotationLayerDescriptor | null | undefined { const skeletonLayers = annotation.annotationLayers.filter( - (descriptor) => descriptor.typ === "Skeleton", + (descriptor) => descriptor.typ === AnnotationLayerType.Skeleton, ); if (skeletonLayers.length > 0) { diff --git a/frontend/javascripts/oxalis/model/actions/annotation_actions.ts b/frontend/javascripts/oxalis/model/actions/annotation_actions.ts index 1aa7ff5e470..a362151b945 100644 --- a/frontend/javascripts/oxalis/model/actions/annotation_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/annotation_actions.ts @@ -20,10 +20,10 @@ import Deferred from "libs/async/deferred"; import type { AdditionalCoordinate } from "types/api_flow_types"; type InitializeAnnotationAction = ReturnType; -type SetAnnotationNameAction = ReturnType; +export type SetAnnotationNameAction = ReturnType; type SetAnnotationVisibilityAction = ReturnType; export type EditAnnotationLayerAction = ReturnType; -type SetAnnotationDescriptionAction = ReturnType; +export type SetAnnotationDescriptionAction = ReturnType; type SetAnnotationAllowUpdateAction = ReturnType; type SetBlockedByUserAction = ReturnType; type SetUserBoundingBoxesAction = ReturnType; diff --git a/frontend/javascripts/oxalis/model/actions/save_actions.ts b/frontend/javascripts/oxalis/model/actions/save_actions.ts index dca4997b9f6..64f2c04eadc 100644 --- a/frontend/javascripts/oxalis/model/actions/save_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/save_actions.ts @@ -30,14 +30,12 @@ export type SaveAction = export const pushSaveQueueTransaction = ( items: Array, - saveQueueType: SaveQueueType, tracingId: string, transactionId: string = getUid(), ) => ({ type: "PUSH_SAVE_QUEUE_TRANSACTION", items, - saveQueueType, tracingId, transactionId, }) as const; @@ -47,16 +45,10 @@ export const saveNowAction = () => type: "SAVE_NOW", }) as const; -export const shiftSaveQueueAction = ( - count: number, - saveQueueType: SaveQueueType, - tracingId: string, -) => +export const shiftSaveQueueAction = (count: number) => ({ type: "SHIFT_SAVE_QUEUE", count, - saveQueueType, - tracingId, }) as const; export const discardSaveQueuesAction = () => @@ -64,36 +56,22 @@ export const discardSaveQueuesAction = () => type: "DISCARD_SAVE_QUEUES", }) as const; -export const setSaveBusyAction = ( - isBusy: boolean, - saveQueueType: SaveQueueType, - tracingId: string, -) => +export const setSaveBusyAction = (isBusy: boolean) => ({ type: "SET_SAVE_BUSY", isBusy, - saveQueueType, - tracingId, }) as const; -export const setLastSaveTimestampAction = (saveQueueType: SaveQueueType, tracingId: string) => +export const setLastSaveTimestampAction = () => ({ type: "SET_LAST_SAVE_TIMESTAMP", timestamp: Date.now(), - saveQueueType, - tracingId, }) as const; -export const setVersionNumberAction = ( - version: number, - saveQueueType: SaveQueueType, - tracingId: string, -) => +export const setVersionNumberAction = (version: number) => ({ type: "SET_VERSION_NUMBER", version, - saveQueueType, - tracingId, }) as const; export const undoAction = (callback?: () => void) => diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts index 894261633a5..c5e1294bf6e 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts @@ -17,6 +17,7 @@ const PUSH_DEBOUNCE_TIME = 1000; class PushQueue { cube: DataCube; + tracingId: string; // The pendingBuckets contains all buckets that should be: // - snapshotted, @@ -41,8 +42,9 @@ class PushQueue { // transaction. private waitTimeStartTimeStamp: number | null = null; - constructor(cube: DataCube) { + constructor(cube: DataCube, tracingId: string) { this.cube = cube; + this.tracingId = tracingId; this.pendingBuckets = new Set(); } @@ -131,7 +133,7 @@ class PushQueue { push = createDebouncedAbortableParameterlessCallable(this.pushImpl, PUSH_DEBOUNCE_TIME, this); - async pushTransaction(batch: Array): Promise { + private async pushTransaction(batch: Array): Promise { /* * Create a transaction from the batch and push it into the save queue. */ @@ -152,7 +154,7 @@ class PushQueue { const items = await this.fifoResolver.orderedWaitFor( createCompressedUpdateBucketActions(batch), ); - Store.dispatch(pushSaveQueueTransaction(items, "volume", this.cube.layerName)); + Store.dispatch(pushSaveQueueTransaction(items, this.tracingId, this.cube.layerName)); this.compressingBucketCount -= batch.length; } catch (error) { diff --git a/frontend/javascripts/oxalis/model/data_layer.ts b/frontend/javascripts/oxalis/model/data_layer.ts index a473c9edc7d..8834b7b311b 100644 --- a/frontend/javascripts/oxalis/model/data_layer.ts +++ b/frontend/javascripts/oxalis/model/data_layer.ts @@ -21,7 +21,12 @@ class DataLayer { fallbackLayerInfo: DataLayerType | null | undefined; isSegmentation: boolean; - constructor(layerInfo: DataLayerType, textureWidth: number, dataTextureCount: number) { + constructor( + layerInfo: DataLayerType, + textureWidth: number, + dataTextureCount: number, + tracingId: string, + ) { this.name = layerInfo.name; this.fallbackLayer = "fallbackLayer" in layerInfo && layerInfo.fallbackLayer != null @@ -46,7 +51,7 @@ class DataLayer { this.name, ); this.pullQueue = new PullQueue(this.cube, layerInfo.name, dataset.dataStore); - this.pushQueue = new PushQueue(this.cube); + this.pushQueue = new PushQueue(this.cube, tracingId); this.cube.initializeWithQueues(this.pullQueue, this.pushQueue); if (this.isSegmentation) { diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts index 6ab8bab4525..710ce9f273a 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts @@ -5,7 +5,10 @@ function removeAllButLastUpdateTracingAction(updateActionsBatches: Array batch.actions.length === 1 && batch.actions[0].name === "updateTracing", + (batch) => + batch.actions.length === 1 && + (batch.actions[0].name === "updateSkeletonTracing" || + batch.actions[0].name === "updateVolumeTracing"), ); return _.without(updateActionsBatches, ...updateTracingOnlyBatches.slice(0, -1)); } diff --git a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts index cd3430779d9..3af4f4e3c13 100644 --- a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts +++ b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts @@ -1,9 +1,11 @@ import { Root } from "protobufjs/light"; -import type { ServerTracing } from "types/api_flow_types"; +import type { APITracingStoreAnnotation, ServerTracing } from "types/api_flow_types"; // @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'SkeletonTracing.proto' or its ... Remove this comment to see the full error message import SkeletonTracingProto from "SkeletonTracing.proto"; // @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'VolumeTracing.proto' or its co... Remove this comment to see the full error message import VolumeTracingProto from "VolumeTracing.proto"; +// @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'AnnotationProto.proto' or its co... Remove this comment to see the full error message +import AnnotationProto from "Annotation.proto"; // @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'ListOfLong.proto' or its co... Remove this comment to see the full error message import ListOfLongProto from "ListOfLong.proto"; import { isBigInt } from "libs/utils"; @@ -64,4 +66,16 @@ export function parseProtoListOfLong( longs: Number, }).items; } + +export function parseProtoTracingStoreAnnotation(annotationArrayBuffer: ArrayBuffer): any { + const protoRoot = Root.fromJSON(AnnotationProto); + const messageType = protoRoot.lookupType(`${PROTO_PACKAGE}.AnnotationProto`); + const message = messageType.decode(new Uint8Array(annotationArrayBuffer)); + return messageType.toObject(message, { + arrays: true, + objects: true, + enums: String, + longs: Number, + }) as APITracingStoreAnnotation; +} export default {}; diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 7460f0e8de9..2bfd14c7f49 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -1,23 +1,14 @@ import _ from "lodash"; import update from "immutability-helper"; import type { Action } from "oxalis/model/actions/actions"; -import type { OxalisState, SaveState, SaveQueueEntry } from "oxalis/store"; -import type { - SetVersionNumberAction, - SetLastSaveTimestampAction, - SaveQueueType, -} from "oxalis/model/actions/save_actions"; +import type { OxalisState, SaveState } from "oxalis/store"; +import type { SetVersionNumberAction } from "oxalis/model/actions/save_actions"; import { getActionLog } from "oxalis/model/helpers/action_logger_middleware"; -import { getStats } from "oxalis/model/accessors/annotation_accessor"; +import { type CombinedTracingStats, getStats } from "oxalis/model/accessors/annotation_accessor"; import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "oxalis/model/sagas/save_saga_constants"; -import { selectQueue } from "oxalis/model/accessors/save_accessor"; -import { updateKey2 } from "oxalis/model/helpers/deep_update"; -import { - updateEditableMapping, - updateVolumeTracing, -} from "oxalis/model/reducers/volumetracing_reducer_helpers"; +import { updateKey, updateKey2 } from "oxalis/model/helpers/deep_update"; import Date from "libs/date"; -import * as Utils from "libs/utils"; +import type { UpdateAction, UpdateActionWithTracingId } from "../sagas/update_actions"; // These update actions are not idempotent. Having them // twice in the save queue causes a corruption of the current annotation. @@ -31,130 +22,39 @@ const NOT_IDEMPOTENT_ACTIONS = [ "deleteNode", ]; -type TracingDict = { - skeleton: V; - volumes: Record; - mappings: Record; -}; - -function updateTracingDict( - action: { saveQueueType: SaveQueueType; tracingId: string }, - oldDict: TracingDict, - newValue: V, -): TracingDict { - if (action.saveQueueType === "skeleton") { - return { ...oldDict, skeleton: newValue }; - } else if (action.saveQueueType === "volume") { - return { - ...oldDict, - volumes: { ...oldDict.volumes, [action.tracingId]: newValue }, - }; - } else if (action.saveQueueType === "mapping") { - return { - ...oldDict, - mappings: { ...oldDict.mappings, [action.tracingId]: newValue }, - }; - } - - return oldDict; -} - export function getTotalSaveQueueLength(queueObj: SaveState["queue"]) { - return ( - queueObj.skeleton.length + - _.sum( - Utils.values(queueObj.volumes).map((volumeQueue: SaveQueueEntry[]) => volumeQueue.length), - ) + - _.sum( - Utils.values(queueObj.mappings).map((mappingQueue: SaveQueueEntry[]) => mappingQueue.length), - ) - ); + return queueObj.length; } function updateVersion(state: OxalisState, action: SetVersionNumberAction) { - if (action.saveQueueType === "skeleton" && state.tracing.skeleton != null) { - return updateKey2(state, "tracing", "skeleton", { - version: action.version, - }); - } else if (action.saveQueueType === "volume") { - return updateVolumeTracing(state, action.tracingId, { - version: action.version, - }); - } else if (action.saveQueueType === "mapping") { - return updateEditableMapping(state, action.tracingId, { - version: action.version, - }); - } - - return state; -} - -function updateLastSaveTimestamp(state: OxalisState, action: SetLastSaveTimestampAction) { - if (action.saveQueueType === "skeleton") { - return updateKey2(state, "save", "lastSaveTimestamp", { - skeleton: action.timestamp, - }); - } else if (action.saveQueueType === "volume") { - const newVolumesDict = { - ...state.save.lastSaveTimestamp.volumes, - [action.tracingId]: action.timestamp, - }; - return updateKey2(state, "save", "lastSaveTimestamp", { - volumes: newVolumesDict, - }); - } else if (action.saveQueueType === "mapping") { - const newMappingsDict = { - ...state.save.lastSaveTimestamp.mappings, - [action.tracingId]: action.timestamp, - }; - return updateKey2(state, "save", "lastSaveTimestamp", { - mappings: newMappingsDict, - }); - } - - return state; + return updateKey(state, "tracing", { + version: action.version, + }); } function SaveReducer(state: OxalisState, action: Action): OxalisState { switch (action.type) { - case "INITIALIZE_VOLUMETRACING": { - // Set up empty save queue array for volume tracing - const newVolumesQueue = { ...state.save.queue.volumes, [action.tracing.id]: [] }; - return updateKey2(state, "save", "queue", { - volumes: newVolumesQueue, - }); - } - - case "INITIALIZE_EDITABLE_MAPPING": { - // Set up empty save queue array for editable mapping - const newMappingsQueue = { ...state.save.queue.mappings, [action.mapping.tracingId]: [] }; - return updateKey2(state, "save", "queue", { - mappings: newMappingsQueue, - }); - } - case "PUSH_SAVE_QUEUE_TRANSACTION": { - const { items, transactionId } = action; - if (items.length === 0) { - return state; - } - // Only report tracing statistics, if a "real" update to the tracing happened - const stats = _.some(action.items, (ua) => ua.name !== "updateTracing") - ? getStats(state.tracing, action.saveQueueType, action.tracingId) + // Use `dispatchedAction` to better distinguish this variable from + // update actions. + const dispatchedAction = action; + const { items, transactionId } = dispatchedAction; + const stats: CombinedTracingStats | null = _.some( + dispatchedAction.items, + (ua) => ua.name !== "updateSkeletonTracing" && ua.name !== "updateVolumeTracing", + ) + ? getStats(state.tracing) : null; const { activeUser } = state; if (activeUser == null) { throw new Error("Tried to save something even though user is not logged in."); } - const updateActionChunks = _.chunk( - items, - MAXIMUM_ACTION_COUNT_PER_BATCH[action.saveQueueType], - ); + const updateActionChunks = _.chunk(items, MAXIMUM_ACTION_COUNT_PER_BATCH); const transactionGroupCount = updateActionChunks.length; const actionLogInfo = JSON.stringify(getActionLog().slice(-10)); - const oldQueue = selectQueue(state, action.saveQueueType, action.tracingId); + const oldQueue = state.save.queue; const newQueue = oldQueue.concat( updateActionChunks.map((actions, transactionGroupIndex) => ({ // Placeholder, the version number will be updated before sending to the server @@ -164,7 +64,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { transactionGroupIndex, timestamp: Date.now(), authorId: activeUser.id, - actions, + actions: addTracingIdToActions(actions, dispatchedAction.tracingId), stats, // Redux Action Log context for debugging purposes. info: actionLogInfo, @@ -176,7 +76,6 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { // caught by the following check. If the bug appears again, we can investigate with more // details thanks to airbrake. if ( - action.saveQueueType === "skeleton" && oldQueue.length > 0 && newQueue.length > 0 && newQueue.at(-1)?.actions.some((action) => NOT_IDEMPOTENT_ACTIONS.includes(action.name)) && @@ -192,11 +91,10 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { ); } - const newQueueObj = updateTracingDict(action, state.save.queue, newQueue); return update(state, { save: { queue: { - $set: newQueueObj, + $set: newQueue, }, progressInfo: { totalActionCount: { @@ -211,7 +109,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { const { count } = action; if (count > 0) { - const queue = selectQueue(state, action.saveQueueType, action.tracingId); + const queue = state.save.queue; const processedQueueActionCount = _.sumBy( queue.slice(0, count), @@ -219,13 +117,12 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { ); const remainingQueue = queue.slice(count); - const newQueueObj = updateTracingDict(action, state.save.queue, remainingQueue); - const remainingQueueLength = getTotalSaveQueueLength(newQueueObj); + const remainingQueueLength = getTotalSaveQueueLength(remainingQueue); const resetCounter = remainingQueueLength === 0; return update(state, { save: { queue: { - $set: newQueueObj, + $set: remainingQueue, }, progressInfo: { // Reset progress counters if the queue is empty. Otherwise, @@ -248,11 +145,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { return update(state, { save: { queue: { - $set: { - skeleton: [], - volumes: _.mapValues(state.save.queue.volumes, () => []), - mappings: _.mapValues(state.save.queue.mappings, () => []), - }, + $set: [], }, progressInfo: { processedActionCount: { @@ -267,18 +160,17 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } case "SET_SAVE_BUSY": { - const newIsBusyInfo = updateTracingDict(action, state.save.isBusyInfo, action.isBusy); return update(state, { save: { - isBusyInfo: { - $set: newIsBusyInfo, + isBusy: { + $set: action.isBusy, }, }, }); } case "SET_LAST_SAVE_TIMESTAMP": { - return updateLastSaveTimestamp(state, action); + return updateKey2(state, "save", "lastSaveTimestamp", action.timestamp); } case "SET_VERSION_NUMBER": { @@ -301,4 +193,31 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } } +const layerIndependentActions = new Set([ + "updateTdCamera", + "revertToVersion", + "addLayerToAnnotation", + "deleteLayerFromAnnotation", + "updateLayerMetadata", + "updateMetadataOfAnnotation", +]); + +export function addTracingIdToActions( + actions: UpdateAction[], + tracingId: string, +): Array { + return actions.map((action) => { + if (layerIndependentActions.has(action.name)) { + return action as UpdateAction; + } + return { + ...action, + value: { + ...action.value, + actionTracingId: tracingId, + }, + } as UpdateActionWithTracingId; + }); +} + export default SaveReducer; diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx index c93961d49cd..5bb550e9eb0 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx @@ -4,16 +4,14 @@ import type { Action } from "oxalis/model/actions/actions"; import { type EditAnnotationLayerAction, setAnnotationAllowUpdateAction, + type SetAnnotationDescriptionAction, + type SetAnnotationNameAction, setBlockedByUserAction, type SetOthersMayEditForAnnotationAction, } from "oxalis/model/actions/annotation_actions"; import type { EditableAnnotation } from "admin/admin_rest_api"; import type { ActionPattern } from "redux-saga/effects"; -import { - editAnnotation, - updateAnnotationLayer, - acquireAnnotationMutex, -} from "admin/admin_rest_api"; +import { editAnnotation, acquireAnnotationMutex } from "admin/admin_rest_api"; import { SETTINGS_MAX_RETRY_COUNT, SETTINGS_RETRY_DELAY, @@ -47,12 +45,37 @@ import { determineLayout } from "oxalis/view/layouting/default_layout_configs"; import { getLastActiveLayout, getLayoutConfig } from "oxalis/view/layouting/layout_persistence"; import { is3dViewportMaximized } from "oxalis/view/layouting/flex_layout_helper"; import { needsLocalHdf5Mapping } from "../accessors/volumetracing_accessor"; +import { pushSaveQueueTransaction } from "../actions/save_actions"; +import { updateAnnotationLayerName, updateMetadataOfAnnotation } from "./update_actions"; /* Note that this must stay in sync with the back-end constant MaxMagForAgglomerateMapping compare https://github.com/scalableminds/webknossos/issues/5223. */ const MAX_MAG_FOR_AGGLOMERATE_MAPPING = 16; +export function* pushAnnotationNameUpdateAction(action: SetAnnotationNameAction) { + const mayEdit = yield* select((state) => mayEditAnnotationProperties(state)); + if (!mayEdit) { + return; + } + yield* put( + pushSaveQueueTransaction([updateMetadataOfAnnotation(action.name)], "unused-tracing-id"), + ); +} + +export function* pushAnnotationDescriptionUpdateAction(action: SetAnnotationDescriptionAction) { + const mayEdit = yield* select((state) => mayEditAnnotationProperties(state)); + if (!mayEdit) { + return; + } + yield* put( + pushSaveQueueTransaction( + [updateMetadataOfAnnotation(undefined, action.description)], + "unused-tracing-id", + ), + ); +} + export function* pushAnnotationUpdateAsync(action: Action) { const tracing = yield* select((state) => state.tracing); const mayEdit = yield* select((state) => mayEditAnnotationProperties(state)); @@ -70,9 +93,7 @@ export function* pushAnnotationUpdateAsync(action: Action) { }; // The extra type annotation is needed here for flow const editObject: Partial = { - name: tracing.name, visibility: tracing.visibility, - description: tracing.description, viewConfiguration, }; try { @@ -103,16 +124,11 @@ export function* pushAnnotationUpdateAsync(action: Action) { function* pushAnnotationLayerUpdateAsync(action: EditAnnotationLayerAction): Saga { const { tracingId, layerProperties } = action; - const annotationId = yield* select((storeState) => storeState.tracing.annotationId); - const annotationType = yield* select((storeState) => storeState.tracing.annotationType); - yield* retry( - SETTINGS_MAX_RETRY_COUNT, - SETTINGS_RETRY_DELAY, - updateAnnotationLayer, - annotationId, - annotationType, - tracingId, - layerProperties, + yield* put( + pushSaveQueueTransaction( + [updateAnnotationLayerName(tracingId, layerProperties.name)], + tracingId, + ), ); } @@ -214,9 +230,9 @@ export function* watchAnnotationAsync(): Saga { // name, only the latest action is relevant. If `_takeEvery` was used, // all updates to the annotation name would be retried regularly, which // would also cause race conditions. - yield* takeLatest("SET_ANNOTATION_NAME", pushAnnotationUpdateAsync); + yield* takeLatest("SET_ANNOTATION_NAME", pushAnnotationNameUpdateAction); yield* takeLatest("SET_ANNOTATION_VISIBILITY", pushAnnotationUpdateAsync); - yield* takeLatest("SET_ANNOTATION_DESCRIPTION", pushAnnotationUpdateAsync); + yield* takeLatest("SET_ANNOTATION_DESCRIPTION", pushAnnotationDescriptionUpdateAction); yield* takeLatest( ((action: Action) => action.type === "UPDATE_LAYER_SETTING" && diff --git a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts index 0ec9d30ab66..1a0044c4370 100644 --- a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts @@ -96,7 +96,9 @@ const takeLatestMappingChange = ( ); const mapping = getMappingInfo(activeMappingByLayer, layerName); - console.log("Changed from", lastBucketRetrievalSource, "to", bucketRetrievalSource); + if (process.env.NODE_ENV === "production") { + console.log("Changed from", lastBucketRetrievalSource, "to", bucketRetrievalSource); + } if (lastWatcherTask) { console.log("Cancel old bucket watcher"); diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index aa2727ae8af..c10b35693f0 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -268,6 +268,7 @@ function* createEditableMapping(): Saga { // Save before making the mapping editable to make sure the correct mapping is activated in the backend yield* call([Model, Model.ensureSavedState]); // Get volume tracing again to make sure the version is up to date + const tracing = yield* select((state) => state.tracing); const upToDateVolumeTracing = yield* select((state) => getActiveSegmentationTracing(state)); if (upToDateVolumeTracing == null) { throw new Error("No active segmentation tracing layer. Cannot create editble mapping."); @@ -277,11 +278,11 @@ function* createEditableMapping(): Saga { const layerName = volumeTracingId; const serverEditableMapping = yield* call(makeMappingEditable, tracingStoreUrl, volumeTracingId); // The server increments the volume tracing's version by 1 when switching the mapping to an editable one - yield* put(setVersionNumberAction(upToDateVolumeTracing.version + 1, "volume", volumeTracingId)); - yield* put(setMappingNameAction(layerName, serverEditableMapping.mappingName, "HDF5")); + yield* put(setVersionNumberAction(tracing.version + 1)); + yield* put(setMappingNameAction(layerName, volumeTracingId, "HDF5")); yield* put(setHasEditableMappingAction()); yield* put(initializeEditableMappingAction(serverEditableMapping)); - return serverEditableMapping.mappingName; + return volumeTracingId; } function* ensureHdf5MappingIsEnabled(layerName: string): Saga { @@ -453,7 +454,7 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { return; } - yield* put(pushSaveQueueTransaction(items, "mapping", volumeTracingId)); + yield* put(pushSaveQueueTransaction(items, volumeTracingId)); yield* call([Model, Model.ensureSavedState]); if (action.type === "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS" || action.type === "DELETE_EDGE") { @@ -781,7 +782,7 @@ function* handleProofreadMergeOrMinCut(action: Action) { return; } - yield* put(pushSaveQueueTransaction(items, "mapping", volumeTracingId)); + yield* put(pushSaveQueueTransaction(items, volumeTracingId)); yield* call([Model, Model.ensureSavedState]); if (action.type === "MIN_CUT_AGGLOMERATE") { @@ -942,7 +943,7 @@ function* handleProofreadCutFromNeighbors(action: Action) { return; } - yield* put(pushSaveQueueTransaction(items, "mapping", volumeTracingId)); + yield* put(pushSaveQueueTransaction(items, volumeTracingId)); yield* call([Model, Model.ensureSavedState]); // Now that the changes are saved, we can split the mapping locally (because it requires @@ -1272,12 +1273,12 @@ function* splitAgglomerateInMapping( .filter(([_segmentId, agglomerateId]) => agglomerateId === comparableSourceAgglomerateId) .map(([segmentId, _agglomerateId]) => segmentId); - const tracingStoreHost = yield* select((state) => state.tracing.tracingStore.url); + const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); // Ask the server to map the (split) segment ids. This creates a partial mapping // that only contains these ids. const mappingAfterSplit = yield* call( getAgglomeratesForSegmentsFromTracingstore, - tracingStoreHost, + tracingStoreUrl, volumeTracingId, splitSegmentIds, ); diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index d2acc8ca949..928816b7e89 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -10,11 +10,8 @@ import _ from "lodash"; import messages from "messages"; import { ControlModeEnum } from "oxalis/constants"; import { getMagInfo } from "oxalis/model/accessors/dataset_accessor"; -import { selectQueue } from "oxalis/model/accessors/save_accessor"; import { selectTracing } from "oxalis/model/accessors/tracing_accessor"; -import { getVolumeTracingById } from "oxalis/model/accessors/volumetracing_accessor"; import { FlycamActions } from "oxalis/model/actions/flycam_actions"; -import type { SaveQueueType } from "oxalis/model/actions/save_actions"; import { pushSaveQueueTransaction, setLastSaveTimestampAction, @@ -26,7 +23,6 @@ import type { InitializeSkeletonTracingAction } from "oxalis/model/actions/skele import { SkeletonTracingSaveRelevantActions } from "oxalis/model/actions/skeletontracing_actions"; import { ViewModeSaveRelevantActions } from "oxalis/model/actions/view_mode_actions"; import { - type InitializeEditableMappingAction, type InitializeVolumeTracingAction, VolumeTracingSaveRelevantActions, } from "oxalis/model/actions/volumetracing_actions"; @@ -43,7 +39,6 @@ import { } from "oxalis/model/sagas/save_saga_constants"; import { diffSkeletonTracing } from "oxalis/model/sagas/skeletontracing_saga"; import type { UpdateAction } from "oxalis/model/sagas/update_actions"; -import { updateTdCamera } from "oxalis/model/sagas/update_actions"; import { diffVolumeTracing } from "oxalis/model/sagas/volumetracing_saga"; import { ensureWkReady } from "oxalis/model/sagas/wk_ready_saga"; import { Model } from "oxalis/singletons"; @@ -58,18 +53,18 @@ import { call, delay, fork, put, race, take, takeEvery } from "typed-redux-saga" const ONE_YEAR_MS = 365 * 24 * 3600 * 1000; -export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: string): Saga { +export function* pushSaveQueueAsync(): Saga { yield* call(ensureWkReady); - yield* put(setLastSaveTimestampAction(saveQueueType, tracingId)); + yield* put(setLastSaveTimestampAction()); let loopCounter = 0; while (true) { loopCounter++; let saveQueue; // Check whether the save queue is actually empty, the PUSH_SAVE_QUEUE_TRANSACTION action - // could have been triggered during the call to sendRequestToServer - saveQueue = yield* select((state) => selectQueue(state, saveQueueType, tracingId)); + // could have been triggered during the call to sendSaveRequestToServer + saveQueue = yield* select((state) => state.save.queue); if (saveQueue.length === 0) { if (loopCounter % 100 === 0) { @@ -86,7 +81,7 @@ export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: str timeout: delay(PUSH_THROTTLE_TIME), forcePush: take("SAVE_NOW"), }); - yield* put(setSaveBusyAction(true, saveQueueType, tracingId)); + yield* put(setSaveBusyAction(true)); // Send (parts) of the save queue to the server. // There are two main cases: @@ -105,22 +100,22 @@ export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: str // ignored (they will be picked up in the next iteration of this loop). // Otherwise, the risk of a high number of save-requests (see case 1) // would be present here, too (note the risk would be greater, because the - // user didn't use the save button which is usually accompanied a small pause). + // user didn't use the save button which is usually accompanied by a small pause). const itemCountToSave = forcePush ? Number.POSITIVE_INFINITY - : yield* select((state) => selectQueue(state, saveQueueType, tracingId).length); + : yield* select((state) => state.save.queue.length); let savedItemCount = 0; while (savedItemCount < itemCountToSave) { - saveQueue = yield* select((state) => selectQueue(state, saveQueueType, tracingId)); + saveQueue = yield* select((state) => state.save.queue); if (saveQueue.length > 0) { - savedItemCount += yield* call(sendRequestToServer, saveQueueType, tracingId); + savedItemCount += yield* call(sendSaveRequestToServer); } else { break; } } - yield* put(setSaveBusyAction(false, saveQueueType, tracingId)); + yield* put(setSaveBusyAction(false)); } } export function sendRequestWithToken( @@ -132,17 +127,14 @@ export function sendRequestWithToken( // This function returns the first n batches of the provided array, so that the count of // all actions in these n batches does not exceed MAXIMUM_ACTION_COUNT_PER_SAVE -function sliceAppropriateBatchCount( - batches: Array, - saveQueueType: SaveQueueType, -): Array { +function sliceAppropriateBatchCount(batches: Array): Array { const slicedBatches = []; let actionCount = 0; for (const batch of batches) { const newActionCount = actionCount + batch.actions.length; - if (newActionCount <= MAXIMUM_ACTION_COUNT_PER_SAVE[saveQueueType]) { + if (newActionCount <= MAXIMUM_ACTION_COUNT_PER_SAVE) { actionCount = newActionCount; slicedBatches.push(batch); } else { @@ -162,22 +154,22 @@ function getRetryWaitTime(retryCount: number) { // at any time, because the browser page is reloaded after the message is shown, anyway. let didShowFailedSimultaneousTracingError = false; -export function* sendRequestToServer( - saveQueueType: SaveQueueType, - tracingId: string, -): Saga { +export function* sendSaveRequestToServer(): Saga { /* * Saves a reasonably-sized part of the save queue (that corresponds to the * tracingId) to the server (plus retry-mechanism). * The saga returns the number of save queue items that were saved. */ - const fullSaveQueue = yield* select((state) => selectQueue(state, saveQueueType, tracingId)); - const saveQueue = sliceAppropriateBatchCount(fullSaveQueue, saveQueueType); + const fullSaveQueue = yield* select((state) => state.save.queue); + const saveQueue = sliceAppropriateBatchCount(fullSaveQueue); let compactedSaveQueue = compactSaveQueue(saveQueue); - const { version, type } = yield* select((state) => - selectTracing(state, saveQueueType, tracingId), + const tracing = yield* select((state) => state.tracing); + const tracings = yield* select((state) => + _.compact([state.tracing.skeleton, ...state.tracing.volumes]), ); + const version = _.max(tracings.map((t) => t.version).concat([tracing.version])) || 0; + const annotationId = yield* select((state) => state.tracing.annotationId); const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); let versionIncrement; [compactedSaveQueue, versionIncrement] = addVersionNumbers(compactedSaveQueue, version); @@ -191,7 +183,8 @@ export function* sendRequestToServer( const startTime = Date.now(); yield* call( sendRequestWithToken, - `${tracingStoreUrl}/tracings/${type}/${tracingId}/update?token=`, + + `${tracingStoreUrl}/tracings/annotation/${annotationId}/update?token=`, { method: "POST", data: compactedSaveQueue, @@ -212,19 +205,17 @@ export function* sendRequestToServer( ); } - yield* put(setVersionNumberAction(version + versionIncrement, saveQueueType, tracingId)); - yield* put(setLastSaveTimestampAction(saveQueueType, tracingId)); - yield* put(shiftSaveQueueAction(saveQueue.length, saveQueueType, tracingId)); - - if (saveQueueType === "volume") { - try { - yield* call(markBucketsAsNotDirty, compactedSaveQueue, tracingId); - } catch (error) { - // If markBucketsAsNotDirty fails some reason, wk cannot recover from this error. - console.warn("Error when marking buckets as clean. No retry possible. Error:", error); - exceptionDuringMarkBucketsAsNotDirty = true; - throw error; - } + yield* put(setVersionNumberAction(version + versionIncrement)); + yield* put(setLastSaveTimestampAction()); + yield* put(shiftSaveQueueAction(saveQueue.length)); + + try { + yield* call(markBucketsAsNotDirty, compactedSaveQueue); + } catch (error) { + // If markBucketsAsNotDirty fails some reason, wk cannot recover from this error. + console.warn("Error when marking buckets as clean. No retry possible. Error:", error); + exceptionDuringMarkBucketsAsNotDirty = true; + throw error; } yield* call(toggleErrorHighlighting, false); @@ -289,33 +280,34 @@ export function* sendRequestToServer( } } -function* markBucketsAsNotDirty(saveQueue: Array, tracingId: string) { - const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); - const segmentationResolutionInfo = yield* call(getMagInfo, segmentationLayer.resolutions); - - if (segmentationLayer != null) { - for (const saveEntry of saveQueue) { - for (const updateAction of saveEntry.actions) { - if (updateAction.name === "updateBucket") { - const { position, mag, additionalCoordinates } = updateAction.value; - const resolutionIndex = segmentationResolutionInfo.getIndexByMag(mag); - const zoomedBucketAddress = globalPositionToBucketPosition( - position, - segmentationResolutionInfo.getDenseMags(), - resolutionIndex, - additionalCoordinates, - ); - const bucket = segmentationLayer.cube.getOrCreateBucket(zoomedBucketAddress); - - if (bucket.type === "null") { - continue; - } - - bucket.dirtyCount--; - - if (bucket.dirtyCount === 0) { - bucket.markAsPushed(); - } +function* markBucketsAsNotDirty(saveQueue: Array) { + for (const saveEntry of saveQueue) { + for (const updateAction of saveEntry.actions) { + if (updateAction.name === "updateBucket") { + // The ID must belong to a segmentation layer because we are handling + // an updateBucket action. + const { actionTracingId: tracingId } = updateAction.value; + const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); + const segmentationResolutionInfo = yield* call(getMagInfo, segmentationLayer.resolutions); + + const { position, mag, additionalCoordinates } = updateAction.value; + const resolutionIndex = segmentationResolutionInfo.getIndexByMag(mag); + const zoomedBucketAddress = globalPositionToBucketPosition( + position, + segmentationResolutionInfo.getDenseMags(), + resolutionIndex, + additionalCoordinates, + ); + const bucket = segmentationLayer.cube.getOrCreateBucket(zoomedBucketAddress); + + if (bucket.type === "null") { + continue; + } + + bucket.dirtyCount--; + + if (bucket.dirtyCount === 0) { + bucket.markAsPushed(); } } } @@ -355,8 +347,8 @@ export function performDiffTracing( tracing: SkeletonTracing | VolumeTracing, prevFlycam: Flycam, flycam: Flycam, - prevTdCamera: CameraData, - tdCamera: CameraData, + _prevTdCamera: CameraData, + _tdCamera: CameraData, ): Array { let actions: Array = []; @@ -372,39 +364,33 @@ export function performDiffTracing( ); } + /* + TODOp: restore this update action (decide how to handle it, does it belong to skeleton or volume or something else?) if (prevTdCamera !== tdCamera) { actions = actions.concat(updateTdCamera()); } + */ return actions; } export function* saveTracingAsync(): Saga { + yield* fork(pushSaveQueueAsync); yield* takeEvery("INITIALIZE_SKELETONTRACING", setupSavingForTracingType); yield* takeEvery("INITIALIZE_VOLUMETRACING", setupSavingForTracingType); - yield* takeEvery("INITIALIZE_EDITABLE_MAPPING", setupSavingForEditableMapping); } -export function* setupSavingForEditableMapping( - initializeAction: InitializeEditableMappingAction, -): Saga { - // No diffing needs to be done for editable mappings as the saga pushes update actions - // to the respective save queues, itself - const volumeTracingId = initializeAction.mapping.tracingId; - yield* fork(pushSaveQueueAsync, "mapping", volumeTracingId); -} export function* setupSavingForTracingType( initializeAction: InitializeSkeletonTracingAction | InitializeVolumeTracingAction, ): Saga { /* Listen to changes to the annotation and derive UpdateActions from the old and new state. - The actual push to the server is done by the forked pushSaveQueueAsync saga. + The actual push to the server is done by the forked pushSaveQueueAsync saga. */ const saveQueueType = initializeAction.type === "INITIALIZE_SKELETONTRACING" ? "skeleton" : "volume"; const tracingId = initializeAction.tracing.id; - yield* fork(pushSaveQueueAsync, saveQueueType, tracingId); let prevTracing = (yield* select((state) => selectTracing(state, saveQueueType, tracingId))) as | VolumeTracing | SkeletonTracing; @@ -456,7 +442,7 @@ export function* setupSavingForTracingType( ); if (items.length > 0) { - yield* put(pushSaveQueueTransaction(items, saveQueueType, tracingId)); + yield* put(pushSaveQueueTransaction(items, tracingId)); } prevTracing = tracing; @@ -498,32 +484,21 @@ function* watchForSaveConflicts() { const maybeSkeletonTracing = yield* select((state) => state.tracing.skeleton); const volumeTracings = yield* select((state) => state.tracing.volumes); const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); + const annotationId = yield* select((state) => state.tracing.annotationId); const tracings: Array = _.compact([ ...volumeTracings, maybeSkeletonTracing, ]); - for (const tracing of tracings) { - const versionOnServer = yield* call( - getNewestVersionForTracing, - tracingStoreUrl, - tracing.tracingId, - tracing.type, - ); + const versionOnServer = yield* call(getNewestVersionForTracing, tracingStoreUrl, annotationId); + for (const tracing of tracings) { // Read the tracing version again from the store, since the // old reference to tracing might be outdated now due to the // immutability. const versionOnClient = yield* select((state) => { - if (tracing.type === "volume") { - return getVolumeTracingById(state.tracing, tracing.tracingId).version; - } - const { skeleton } = state.tracing; - if (skeleton == null) { - throw new Error("Skeleton must exist at this point."); - } - return skeleton.version; + return state.tracing.version; }); const toastKey = `save_conflicts_warning_${tracing.tracingId}`; @@ -531,9 +506,7 @@ function* watchForSaveConflicts() { // The latest version on the server is greater than the most-recently // stored version. - const saveQueue = yield* select((state) => - selectQueue(state, tracing.type, tracing.tracingId), - ); + const saveQueue = yield* select((state) => state.save.queue); let msg = ""; if (!allowSave) { diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts b/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts index 0fdc776eb2c..c9aa2351d4c 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts @@ -11,14 +11,12 @@ export const UNDO_HISTORY_SIZE = 20; export const SETTINGS_RETRY_DELAY = 15 * 1000; export const SETTINGS_MAX_RETRY_COUNT = 20; // 20 * 15s == 5m -export const MAXIMUM_ACTION_COUNT_PER_BATCH = { - skeleton: 5000, - volume: 1000, // Since volume saving is slower, use a lower value here. - mapping: Number.POSITIVE_INFINITY, // The back-end does not accept transactions for mappings. -} as const; +export const MAXIMUM_ACTION_COUNT_PER_BATCH = 1000; -export const MAXIMUM_ACTION_COUNT_PER_SAVE = { - skeleton: 15000, - volume: 3000, - mapping: Number.POSITIVE_INFINITY, // The back-end does not accept transactions for mappings. -} as const; +// todop: should this be smarter? +// export const MAXIMUM_ACTION_COUNT_PER_SAVE = { +// skeleton: 15000, +// volume: 3000, +// mapping: Number.POSITIVE_INFINITY, // The back-end does not accept transactions for mappings. +// } as const; +export const MAXIMUM_ACTION_COUNT_PER_SAVE = 3000; diff --git a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts index d2c3aa976e7..4c56c6f837c 100644 --- a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts @@ -27,7 +27,7 @@ import { updateTreeEdgesVisibility, updateNode, updateSkeletonTracing, - updateUserBoundingBoxes, + updateUserBoundingBoxesInSkeletonTracing, updateTree, updateTreeGroups, } from "oxalis/model/sagas/update_actions"; @@ -250,9 +250,7 @@ function* getAgglomerateSkeletonTracing( const annotation = yield* select((state) => state.tracing); const layerInfo = getLayerByName(dataset, layerName); - const editableMapping = annotation.mappings.find( - (mapping) => mapping.mappingName === mappingName, - ); + const editableMapping = annotation.mappings.find((mapping) => mapping.tracingId === mappingName); try { let nmlProtoBuffer; @@ -638,7 +636,7 @@ export function* diffSkeletonTracing( } if (!_.isEqual(prevSkeletonTracing.userBoundingBoxes, skeletonTracing.userBoundingBoxes)) { - yield updateUserBoundingBoxes(skeletonTracing.userBoundingBoxes); + yield updateUserBoundingBoxesInSkeletonTracing(skeletonTracing.userBoundingBoxes); } } export default [ diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index 32ed16ef2da..cbfdc61f9e5 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -10,7 +10,11 @@ import type { NumberLike, } from "oxalis/store"; import { convertUserBoundingBoxesFromFrontendToServer } from "oxalis/model/reducers/reducer_helpers"; -import type { AdditionalCoordinate, MetadataEntryProto } from "types/api_flow_types"; +import type { + AdditionalCoordinate, + APIMagRestrictions, + MetadataEntryProto, +} from "types/api_flow_types"; export type NodeWithTreeId = { treeId: number; @@ -34,7 +38,12 @@ export type CreateSegmentUpdateAction = ReturnType; export type DeleteSegmentUpdateAction = ReturnType; export type DeleteSegmentDataUpdateAction = ReturnType; -type UpdateUserBoundingBoxesUpdateAction = ReturnType; +type UpdateUserBoundingBoxesInSkeletonTracingUpdateAction = ReturnType< + typeof updateUserBoundingBoxesInSkeletonTracing +>; +type UpdateUserBoundingBoxesInVolumeTracingUpdateAction = ReturnType< + typeof updateUserBoundingBoxesInVolumeTracing +>; export type UpdateBucketUpdateAction = ReturnType; type UpdateSegmentGroupsUpdateAction = ReturnType; @@ -46,6 +55,10 @@ export type RevertToVersionUpdateAction = ReturnType; export type RemoveFallbackLayerUpdateAction = ReturnType; export type UpdateTdCameraUpdateAction = ReturnType; export type UpdateMappingNameUpdateAction = ReturnType; +type AddLayerToAnnotationUpdateAction = ReturnType; +type DeleteAnnotationLayerUpdateAction = ReturnType; +type UpdateAnnotationLayerNameUpdateAction = ReturnType; +type UpdateMetadataOfAnnotationUpdateAction = ReturnType; export type SplitAgglomerateUpdateAction = ReturnType; export type MergeAgglomerateUpdateAction = ReturnType; @@ -61,7 +74,8 @@ export type UpdateAction = | DeleteEdgeUpdateAction | UpdateSkeletonTracingUpdateAction | UpdateVolumeTracingUpdateAction - | UpdateUserBoundingBoxesUpdateAction + | UpdateUserBoundingBoxesInSkeletonTracingUpdateAction + | UpdateUserBoundingBoxesInVolumeTracingUpdateAction | CreateSegmentUpdateAction | UpdateSegmentUpdateAction | DeleteSegmentUpdateAction @@ -76,8 +90,19 @@ export type UpdateAction = | RemoveFallbackLayerUpdateAction | UpdateTdCameraUpdateAction | UpdateMappingNameUpdateAction + | AddLayerToAnnotationUpdateAction + | DeleteAnnotationLayerUpdateAction + | UpdateAnnotationLayerNameUpdateAction + | UpdateMetadataOfAnnotationUpdateAction | SplitAgglomerateUpdateAction | MergeAgglomerateUpdateAction; + +export type UpdateActionWithTracingId = UpdateAction & { + value: UpdateAction["value"] & { + actionTracingId: string; + }; +}; + // This update action is only created in the frontend for display purposes type CreateTracingUpdateAction = { name: "createTracing"; @@ -107,6 +132,8 @@ type AddServerValuesFn = (arg0: T) => T & { type AsServerAction = ReturnType>; +// When the server delivers update actions (e.g., when requesting the version history +// of an annotation), ServerUpdateActions are sent which include some additional information. export type ServerUpdateAction = AsServerAction< | UpdateAction // These two actions are never sent by the frontend and, therefore, don't exist in the UpdateAction type @@ -262,7 +289,7 @@ export function updateSkeletonTracing( zoomLevel: number, ) { return { - name: "updateTracing", + name: "updateSkeletonTracing", value: { activeNode: tracing.activeNodeId, editPosition, @@ -294,7 +321,7 @@ export function updateVolumeTracing( zoomLevel: number, ) { return { - name: "updateTracing", + name: "updateVolumeTracing", value: { activeSegmentId: tracing.activeCellId, editPosition: position, @@ -305,9 +332,19 @@ export function updateVolumeTracing( }, } as const; } -export function updateUserBoundingBoxes(userBoundingBoxes: Array) { +export function updateUserBoundingBoxesInSkeletonTracing( + userBoundingBoxes: Array, +) { return { - name: "updateUserBoundingBoxes", + name: "updateUserBoundingBoxesInSkeletonTracing", + value: { + boundingBoxes: convertUserBoundingBoxesFromFrontendToServer(userBoundingBoxes), + }, + } as const; +} +export function updateUserBoundingBoxesInVolumeTracing(userBoundingBoxes: Array) { + return { + name: "updateUserBoundingBoxesInVolumeTracing", value: { boundingBoxes: convertUserBoundingBoxesFromFrontendToServer(userBoundingBoxes), }, @@ -504,6 +541,47 @@ export function mergeAgglomerate( } as const; } +type AnnotationLayerCreationParameters = { + typ: "Skeleton" | "Volume"; + name: string | null | undefined; + autoFallbackLayer?: boolean; + fallbackLayerName?: string | null | undefined; + mappingName?: string | null | undefined; + magRestrictions?: APIMagRestrictions | null | undefined; +}; + +export function addLayerToAnnotation(parameters: AnnotationLayerCreationParameters) { + return { + name: "addLayerToAnnotation", + value: { layerParameters: parameters }, + } as const; +} + +export function deleteAnnotationLayer( + tracingId: string, + layerName: string, + typ: "Skeleton" | "Volume", +) { + return { + name: "deleteLayerFromAnnotation", + value: { tracingId, layerName, typ }, + } as const; +} + +export function updateAnnotationLayerName(tracingId: string, newLayerName: string) { + return { + name: "updateLayerMetadata", + value: { tracingId, layerName: newLayerName }, + } as const; +} + +export function updateMetadataOfAnnotation(name?: string, description?: string) { + return { + name: "updateMetadataOfAnnotation", + value: { name, description }, + } as const; +} + function enforceValidMetadata(metadata: MetadataEntryProto[]): MetadataEntryProto[] { // We do not want to save metadata with duplicate keys. Validation errors // will warn the user in case this exists. However, we allow duplicate keys in the diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx index 13fc5f01d81..8d55bf16b94 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx @@ -95,7 +95,7 @@ import { deleteSegmentVolumeAction, removeFallbackLayer, updateSegmentVolumeAction, - updateUserBoundingBoxes, + updateUserBoundingBoxesInVolumeTracing, updateVolumeTracing, updateMappingName, } from "oxalis/model/sagas/update_actions"; @@ -699,7 +699,7 @@ export function* diffVolumeTracing( } if (!_.isEqual(prevVolumeTracing.userBoundingBoxes, volumeTracing.userBoundingBoxes)) { - yield updateUserBoundingBoxes(volumeTracing.userBoundingBoxes); + yield updateUserBoundingBoxesInVolumeTracing(volumeTracing.userBoundingBoxes); } if (prevVolumeTracing !== volumeTracing) { @@ -947,11 +947,7 @@ function* handleDeleteSegmentData(): Saga { yield* put(setBusyBlockingInfoAction(true, "Segment is being deleted.")); yield* put( - pushSaveQueueTransaction( - [deleteSegmentDataVolumeAction(action.segmentId)], - "volume", - action.layerName, - ), + pushSaveQueueTransaction([deleteSegmentDataVolumeAction(action.segmentId)], action.layerName), ); yield* call([Model, Model.ensureSavedState]); diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 6bbafd8d8a6..7a022cc4933 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -11,7 +11,6 @@ import type { APICompoundType, APISegmentationLayer, } from "types/api_flow_types"; -import type { Versions } from "oxalis/view/version_view"; import { computeDataTexturesSetup, getSupportedTextureSpecs, @@ -36,7 +35,7 @@ import { getServerVolumeTracings } from "oxalis/model/accessors/volumetracing_ac import { getSomeServerTracing } from "oxalis/model/accessors/tracing_accessor"; import { getTracingsForAnnotation, - getAnnotationInformation, + getMaybeOutdatedAnnotationInformation, getEmptySandboxAnnotationInformation, getDataset, getSharingTokenFromUrlParameters, @@ -44,6 +43,7 @@ import { getDatasetViewConfiguration, getEditableMappingInfo, getAnnotationCompoundInformation, + getNewestVersionOfTracing, } from "admin/admin_rest_api"; import { dispatchMaybeFetchMeshFilesAsync, @@ -106,6 +106,7 @@ import { isFeatureAllowedByPricingPlan, } from "admin/organization/pricing_plan_utils"; import { convertServerAdditionalAxesToFrontEnd } from "./model/reducers/reducer_helpers"; +import { setVersionNumberAction } from "./model/actions/save_actions"; export const HANDLED_ERROR = "error_was_handled"; type DataLayerCollection = Record; @@ -114,7 +115,7 @@ export async function initialize( initialMaybeCompoundType: APICompoundType | null, initialCommandType: TraceOrViewCommand, initialFetch: boolean, - versions?: Versions, + version?: number | undefined | null, ): Promise< | { dataLayers: DataLayerCollection; @@ -129,10 +130,34 @@ export async function initialize( if (initialCommandType.type === ControlModeEnum.TRACE) { const { annotationId } = initialCommandType; - annotation = - initialMaybeCompoundType != null - ? await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType) - : await getAnnotationInformation(annotationId); + if (initialMaybeCompoundType != null) { + annotation = await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType); + } else { + let maybeOutdatedAnnotation = await getMaybeOutdatedAnnotationInformation(annotationId); + const annotationFromTracingStore = await getNewestVersionOfTracing( + maybeOutdatedAnnotation.tracingStore.url, + maybeOutdatedAnnotation.id, + ); + const layersWithStats = annotationFromTracingStore.annotationLayers.map((layer) => { + const matchingLayer = maybeOutdatedAnnotation.annotationLayers.find( + (l) => l.tracingId === layer.tracingId, + ); + + return { + tracingId: layer.tracingId, + name: layer.name, + typ: layer.type, + stats: matchingLayer?.stats || {}, + }; + }); + const completeAnnotation = { + ...maybeOutdatedAnnotation, + name: annotationFromTracingStore.name, + description: annotationFromTracingStore.description, + annotationLayers: layersWithStats, + }; + annotation = completeAnnotation; + } datasetId = { name: annotation.dataSetName, owningOrganization: annotation.organization, @@ -169,7 +194,7 @@ export async function initialize( const [dataset, initialUserSettings, serverTracings] = await fetchParallel( annotation, datasetId, - versions, + version, ); const serverVolumeTracings = getServerVolumeTracings(serverTracings); const serverVolumeTracingIds = serverVolumeTracings.map((volumeTracing) => volumeTracing.id); @@ -237,12 +262,12 @@ export async function initialize( async function fetchParallel( annotation: APIAnnotation | null | undefined, datasetId: APIDatasetId, - versions?: Versions, + version: number | undefined | null, ): Promise<[APIDataset, UserConfiguration, Array]> { return Promise.all([ getDataset(datasetId, getSharingTokenFromUrlParameters()), getUserConfiguration(), // Fetch the actual tracing from the datastore, if there is an skeletonAnnotation - annotation ? getTracingsForAnnotation(annotation, versions) : [], + annotation ? getTracingsForAnnotation(annotation, version) : [], ]); } @@ -294,6 +319,7 @@ function initializeTracing( // This method is not called for the View mode const { dataset } = Store.getState(); let annotation = _annotation; + let version = 0; const { allowedModes, preferredMode } = determineAllowedModes(annotation.settings); _.extend(annotation.settings, { @@ -325,6 +351,7 @@ function initializeTracing( getSegmentationLayers(dataset).length > 0, messages["tracing.volume_missing_segmentation"], ); + version = Math.max(version, volumeTracing.version); Store.dispatch(initializeVolumeTracingAction(volumeTracing)); }); @@ -336,8 +363,10 @@ function initializeTracing( // To generate a huge amount of dummy trees, use: // import generateDummyTrees from "./model/helpers/generate_dummy_trees"; // tracing.trees = generateDummyTrees(1, 200000); + version = Math.max(version, skeletonTracing.version); Store.dispatch(initializeSkeletonTracingAction(skeletonTracing)); } + Store.dispatch(setVersionNumberAction(version)); } // Initialize 'flight', 'oblique' or 'orthogonal' mode @@ -464,6 +493,7 @@ function initializeDataLayerInstances(gpuFactor: number | null | undefined): { layer, textureInformation.textureSize, textureInformation.textureCount, + layer.name, // In case of a volume tracing layer the layer name will equal its tracingId. ); } diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index f36f1c95592..22585186ed6 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -28,7 +28,7 @@ import type { AdditionalAxis, MetadataEntryProto, } from "types/api_flow_types"; -import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; +import type { CombinedTracingStats } from "oxalis/model/accessors/annotation_accessor"; import type { Action } from "oxalis/model/actions/actions"; import type { BoundingBoxType, @@ -50,7 +50,7 @@ import type { } from "oxalis/constants"; import type { BLEND_MODES, ControlModeEnum } from "oxalis/constants"; import type { Matrix4x4 } from "libs/mjs"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { UpdateAction, UpdateActionWithTracingId } from "oxalis/model/sagas/update_actions"; import AnnotationReducer from "oxalis/model/reducers/annotation_reducer"; import DatasetReducer from "oxalis/model/reducers/dataset_reducer"; import type DiffableMap from "libs/diffable_map"; @@ -191,6 +191,7 @@ export type AnnotationVisibility = APIAnnotationVisibility; export type RestrictionsAndSettings = Restrictions & Settings; export type Annotation = { readonly annotationId: string; + readonly version: number; readonly restrictions: RestrictionsAndSettings; readonly visibility: AnnotationVisibility; readonly annotationLayers: Array; @@ -445,34 +446,21 @@ export type SaveQueueEntry = { version: number; timestamp: number; authorId: string; - actions: Array; + actions: Array; transactionId: string; transactionGroupCount: number; transactionGroupIndex: number; - stats: TracingStats | null | undefined; + stats: CombinedTracingStats | null | undefined; info: string; }; export type ProgressInfo = { readonly processedActionCount: number; readonly totalActionCount: number; }; -export type IsBusyInfo = { - readonly skeleton: boolean; - readonly volumes: Record; - readonly mappings: Record; -}; export type SaveState = { - readonly isBusyInfo: IsBusyInfo; - readonly queue: { - readonly skeleton: Array; - readonly volumes: Record>; - readonly mappings: Record>; - }; - readonly lastSaveTimestamp: { - readonly skeleton: number; - readonly volumes: Record; - readonly mappings: Record; - }; + readonly isBusy: boolean; + readonly queue: Array; + readonly lastSaveTimestamp: number; readonly progressInfo: ProgressInfo; }; export type Flycam = { diff --git a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx index 9f3f5f52d9d..cd09d7c3f84 100644 --- a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx @@ -8,7 +8,7 @@ import { addTreesAndGroupsAction } from "oxalis/model/actions/skeletontracing_ac import { getSkeletonDescriptor } from "oxalis/model/accessors/skeletontracing_accessor"; import { createMutableTreeMapFromTreeArray } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; import { - getAnnotationInformation, + getMaybeOutdatedAnnotationInformation, getAnnotationCompoundInformation, getTracingForAnnotationType, } from "admin/admin_rest_api"; @@ -145,7 +145,7 @@ class _MergeModalView extends PureComponent { const { selectedExplorativeAnnotation } = this.state; if (selectedExplorativeAnnotation != null) { - const annotation = await getAnnotationInformation(selectedExplorativeAnnotation); + const annotation = await getMaybeOutdatedAnnotationInformation(selectedExplorativeAnnotation); this.mergeAnnotationIntoActiveTracing(annotation); } }; diff --git a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx index e577c802e94..d08db492659 100644 --- a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx @@ -2,8 +2,7 @@ import { connect } from "react-redux"; import React from "react"; import _ from "lodash"; import Store, { type SaveState } from "oxalis/store"; -import type { OxalisState, IsBusyInfo } from "oxalis/store"; -import { isBusy } from "oxalis/model/accessors/save_accessor"; +import type { OxalisState } from "oxalis/store"; import ButtonComponent from "oxalis/view/components/button_component"; import { Model } from "oxalis/singletons"; import window from "libs/window"; @@ -14,7 +13,6 @@ import { LoadingOutlined, } from "@ant-design/icons"; import ErrorHandling from "libs/error_handling"; -import * as Utils from "libs/utils"; import FastTooltip from "components/fast_tooltip"; import { Tooltip } from "antd"; import { reuseInstanceOnEquality } from "oxalis/model/accessors/accessor_helpers"; @@ -25,7 +23,7 @@ type OwnProps = { }; type StateProps = { progressFraction: number | null | undefined; - isBusyInfo: IsBusyInfo; + isBusy: boolean; }; type Props = OwnProps & StateProps; type State = { @@ -101,7 +99,7 @@ class SaveButton extends React.PureComponent { getSaveButtonIcon() { if (this.state.isStateSaved) { return ; - } else if (isBusy(this.props.isBusyInfo)) { + } else if (this.props.isBusy) { return ; } else { return ; @@ -109,7 +107,7 @@ class SaveButton extends React.PureComponent { } shouldShowProgress(): boolean { - return isBusy(this.props.isBusyInfo) && this.props.progressFraction != null; + return this.props.isBusy && this.props.progressFraction != null; } render() { @@ -176,27 +174,23 @@ class SaveButton extends React.PureComponent { function getOldestUnsavedTimestamp(saveQueue: SaveState["queue"]): number | null | undefined { let oldestUnsavedTimestamp; - if (saveQueue.skeleton.length > 0) { - oldestUnsavedTimestamp = saveQueue.skeleton[0].timestamp; - } - - for (const volumeQueue of Utils.values(saveQueue.volumes)) { - if (volumeQueue.length > 0) { - const oldestVolumeTimestamp = volumeQueue[0].timestamp; - oldestUnsavedTimestamp = Math.min( - oldestUnsavedTimestamp != null ? oldestUnsavedTimestamp : Number.POSITIVE_INFINITY, - oldestVolumeTimestamp, - ); - } + if (saveQueue.length > 0) { + // todop: theoretically, could this be not the oldest one? + // e.g., items are added to the queue like that: + // SkelT=1, SkelT=2, SkelT=3, VolT=1 + // now the first action is saved and the following remains: + // SkelT=2, SkelT=3, VolT=1 + // even if it could happen, probably not critical for the current context? + oldestUnsavedTimestamp = saveQueue[0].timestamp; } return oldestUnsavedTimestamp; } function mapStateToProps(state: OxalisState): StateProps { - const { progressInfo, isBusyInfo } = state.save; + const { progressInfo, isBusy } = state.save; return { - isBusyInfo, + isBusy, // For a low action count, the progress info would show only for a very short amount of time. // Therefore, the progressFraction is set to null, if the count is low. progressFraction: diff --git a/frontend/javascripts/oxalis/view/components/editable_text_label.tsx b/frontend/javascripts/oxalis/view/components/editable_text_label.tsx index 6b514d5bc2d..443698164f7 100644 --- a/frontend/javascripts/oxalis/view/components/editable_text_label.tsx +++ b/frontend/javascripts/oxalis/view/components/editable_text_label.tsx @@ -14,7 +14,7 @@ type Rule = { }; export type EditableTextLabelProp = { value: string; - onChange: (...args: Array) => any; + onChange: (newValue: string) => any; rules?: Rule[]; rows?: number; markdown?: boolean; diff --git a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx index 1153c9eaf0e..51968b6bae6 100644 --- a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx +++ b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx @@ -21,7 +21,7 @@ import { getSegmentationLayers, } from "oxalis/model/accessors/dataset_accessor"; import { - getAnnotationInformation, + getMaybeOutdatedAnnotationInformation, getDataset, getTracingForAnnotationType, runTraining, @@ -35,7 +35,12 @@ import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; import { formatVoxels } from "libs/format_utils"; import * as Utils from "libs/utils"; import { V3 } from "libs/mjs"; -import type { APIAnnotation, APIDataset, ServerVolumeTracing } from "types/api_flow_types"; +import { + AnnotationLayerType, + type APIAnnotation, + type APIDataset, + type ServerVolumeTracing, +} from "types/api_flow_types"; import type { Vector3 } from "oxalis/constants"; import { serverVolumeToClientVolumeTracing } from "oxalis/model/reducers/volumetracing_reducer"; import { convertUserBoundingBoxesFromServerToFrontend } from "oxalis/model/reducers/reducer_helpers"; @@ -472,7 +477,7 @@ function AnnotationsCsvInput({ const newAnnotationsWithDatasets = await Promise.all( newItems.map(async (item) => { - const annotation = await getAnnotationInformation(item.annotationId); + const annotation = await getMaybeOutdatedAnnotationInformation(item.annotationId); const dataset = await getDataset({ owningOrganization: annotation.organization, name: annotation.dataSetName, @@ -493,7 +498,7 @@ function AnnotationsCsvInput({ let userBoundingBoxes = volumeTracings[0]?.userBoundingBoxes; if (!userBoundingBoxes) { const skeletonLayer = annotation.annotationLayers.find( - (layer) => layer.typ === "Skeleton", + (layer) => layer.typ === AnnotationLayerType.Skeleton, ); if (skeletonLayer) { const skeletonTracing = await getTracingForAnnotationType(annotation, skeletonLayer); diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx index 2a3a4b3fe93..bd9ef5bd1a8 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx @@ -21,6 +21,7 @@ import _ from "lodash"; import classnames from "classnames"; import update from "immutability-helper"; import { + AnnotationLayerType, APIAnnotationTypeEnum, type APIDataLayer, type APIDataset, @@ -49,8 +50,6 @@ import { findDataPositionForLayer, clearCache, findDataPositionForVolumeTracing, - convertToHybridTracing, - deleteAnnotationLayer, updateDatasetDefaultConfiguration, startComputeSegmentIndexFileJob, } from "admin/admin_rest_api"; @@ -131,6 +130,8 @@ import { getDefaultLayerViewConfiguration, } from "types/schemas/dataset_view_configuration.schema"; import defaultState from "oxalis/default_state"; +import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; +import { addLayerToAnnotation, deleteAnnotationLayer } from "oxalis/model/sagas/update_actions"; type DatasetSettingsProps = { userConfiguration: UserConfiguration; @@ -150,6 +151,8 @@ type DatasetSettingsProps = { onZoomToMag: (layerName: string, arg0: Vector3) => number; onChangeUser: (key: keyof UserConfiguration, value: any) => void; reloadHistogram: (layerName: string) => void; + addSkeletonLayerToAnnotation: () => void; + deleteAnnotationLayer: (tracingId: string, type: AnnotationLayerType, layerName: string) => void; tracing: Tracing; task: Task | null | undefined; onEditAnnotationLayer: (tracingId: string, layerProperties: EditableLayerProperties) => void; @@ -453,26 +456,39 @@ class DatasetSettings extends React.PureComponent { ); - getDeleteAnnotationLayerButton = (readableName: string, layer?: APIDataLayer) => ( + getDeleteAnnotationLayerButton = ( + readableName: string, + type: AnnotationLayerType, + tracingId: string, + ) => (
this.deleteAnnotationLayerIfConfirmed(readableName, layer)} + onClick={() => this.deleteAnnotationLayerIfConfirmed(readableName, type, tracingId)} className="fas fa-trash icon-margin-right" />
); - getDeleteAnnotationLayerDropdownOption = (readableName: string, layer?: APIDataLayer) => ( -
this.deleteAnnotationLayerIfConfirmed(readableName, layer)}> + getDeleteAnnotationLayerDropdownOption = ( + readableName: string, + type: AnnotationLayerType, + tracingId: string, + layer?: APIDataLayer, + ) => ( +
this.deleteAnnotationLayerIfConfirmed(readableName, type, tracingId, layer)} + > Delete this annotation layer
); deleteAnnotationLayerIfConfirmed = async ( - readableAnnoationLayerName: string, + readableAnnotationLayerName: string, + type: AnnotationLayerType, + tracingId: string, layer?: APIDataLayer, ) => { const fallbackLayerNote = @@ -481,7 +497,7 @@ class DatasetSettings extends React.PureComponent { : ""; const shouldDelete = await confirmAsync({ title: `Deleting an annotation layer makes its content and history inaccessible. ${fallbackLayerNote}This cannot be undone. Are you sure you want to delete this layer?`, - okText: `Yes, delete annotation layer “${readableAnnoationLayerName}”`, + okText: `Yes, delete annotation layer “${readableAnnotationLayerName}”`, cancelText: "Cancel", maskClosable: true, closable: true, @@ -495,12 +511,8 @@ class DatasetSettings extends React.PureComponent { }, }); if (!shouldDelete) return; + this.props.deleteAnnotationLayer(tracingId, type, readableAnnotationLayerName); await Model.ensureSavedState(); - await deleteAnnotationLayer( - this.props.tracing.annotationId, - this.props.tracing.annotationType, - readableAnnoationLayerName, - ); location.reload(); }; @@ -623,6 +635,8 @@ class DatasetSettings extends React.PureComponent { const { intensityRange } = layerSettings; const layer = getLayerByName(dataset, layerName); const isSegmentation = layer.category === "segmentation"; + const layerType = + layer.category === "segmentation" ? AnnotationLayerType.Volume : AnnotationLayerType.Skeleton; const canBeMadeEditable = isSegmentation && layer.tracingId == null && this.props.controlMode === "TRACE"; const isVolumeTracing = isSegmentation ? layer.tracingId != null : false; @@ -687,7 +701,12 @@ class DatasetSettings extends React.PureComponent { ? { label: (
- {this.getDeleteAnnotationLayerDropdownOption(readableName, layer)} + {this.getDeleteAnnotationLayerDropdownOption( + readableName, + layerType, + layer.tracingId, + layer, + )}
), key: "deleteAnnotationLayer", @@ -1173,7 +1192,7 @@ class DatasetSettings extends React.PureComponent { const readableName = "Skeleton"; const skeletonTracing = enforceSkeletonTracing(tracing); const isOnlyAnnotationLayer = tracing.annotationLayers.length === 1; - const { showSkeletons } = skeletonTracing; + const { showSkeletons, tracingId } = skeletonTracing; const activeNodeRadius = getActiveNode(skeletonTracing)?.radius ?? 0; return ( @@ -1224,7 +1243,13 @@ class DatasetSettings extends React.PureComponent { }} > - {!isOnlyAnnotationLayer ? this.getDeleteAnnotationLayerButton(readableName) : null} + {!isOnlyAnnotationLayer + ? this.getDeleteAnnotationLayerButton( + readableName, + AnnotationLayerType.Skeleton, + tracingId, + ) + : null}
{showSkeletons ? ( @@ -1325,8 +1350,8 @@ class DatasetSettings extends React.PureComponent { }; addSkeletonAnnotationLayer = async () => { + this.props.addSkeletonLayerToAnnotation(); await Model.ensureSavedState(); - await convertToHybridTracing(this.props.tracing.annotationId, null); location.reload(); }; @@ -1639,6 +1664,25 @@ const mapDispatchToProps = (dispatch: Dispatch) => ({ reloadHistogram(layerName: string) { dispatch(reloadHistogramAction(layerName)); }, + + addSkeletonLayerToAnnotation() { + dispatch( + pushSaveQueueTransaction( + [ + addLayerToAnnotation({ + typ: "Skeleton", + name: "skeleton", + fallbackLayerName: undefined, + }), + ], + "unused-tracing-id", + ), + ); + }, + + deleteAnnotationLayer(tracingId: string, type: AnnotationLayerType, layerName: string) { + dispatch(deleteAnnotationLayer(tracingId, layerName, type)); + }, }); const connector = connect(mapStateToProps, mapDispatchToProps); diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx index 565b8ace677..33885d71953 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx @@ -10,7 +10,6 @@ import { RestrictMagnificationSlider, } from "dashboard/advanced_dataset/create_explorative_modal"; import Store, { type Tracing } from "oxalis/store"; -import { addAnnotationLayer } from "admin/admin_rest_api"; import { getSomeMagInfoForDataset, getLayerByName, @@ -24,9 +23,12 @@ import { } from "oxalis/model/accessors/volumetracing_accessor"; import messages from "messages"; import InputComponent from "oxalis/view/components/input_component"; -import { api } from "oxalis/singletons"; +import { api, Model } from "oxalis/singletons"; import Toast from "libs/toast"; import { MappingStatusEnum } from "oxalis/constants"; +import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; +import { useDispatch } from "react-redux"; +import { addLayerToAnnotation } from "oxalis/model/sagas/update_actions"; export type ValidationResult = { isValid: boolean; message: string }; export function checkForLayerNameDuplication( @@ -101,6 +103,7 @@ export default function AddVolumeLayerModal({ const [selectedSegmentationLayerName, setSelectedSegmentationLayerName] = useState< string | undefined >(preselectedLayerName); + const dispatch = useDispatch(); const allReadableLayerNames = useMemo( () => getAllReadableLayerNames(dataset, tracing), [dataset, tracing], @@ -162,15 +165,23 @@ export default function AddVolumeLayerModal({ ); if (selectedSegmentationLayerName == null) { - await addAnnotationLayer(tracing.annotationId, tracing.annotationType, { - typ: "Volume", - name: newLayerName, - fallbackLayerName: undefined, - magRestrictions: { - min: minResolutionAllowed, - max: maxResolutionAllowed, - }, - }); + dispatch( + pushSaveQueueTransaction( + [ + addLayerToAnnotation({ + typ: "Volume", + name: newLayerName, + fallbackLayerName: undefined, + magRestrictions: { + min: minResolutionAllowed, + max: maxResolutionAllowed, + }, + }), + ], + "unused-tracing-id", + ), + ); + await Model.ensureSavedState(); } else { if (selectedSegmentationLayer == null) { throw new Error("Segmentation layer is null"); @@ -189,16 +200,24 @@ export default function AddVolumeLayerModal({ maybeMappingName = mappingInfo.mappingName; } - await addAnnotationLayer(tracing.annotationId, tracing.annotationType, { - typ: "Volume", - name: newLayerName, - fallbackLayerName, - magRestrictions: { - min: minResolutionAllowed, - max: maxResolutionAllowed, - }, - mappingName: maybeMappingName, - }); + dispatch( + pushSaveQueueTransaction( + [ + addLayerToAnnotation({ + typ: "Volume", + name: newLayerName, + fallbackLayerName, + magRestrictions: { + min: minResolutionAllowed, + max: maxResolutionAllowed, + }, + mappingName: maybeMappingName, + }), + ], + "unused-tracing-id", + ), + ); + await Model.ensureSavedState(); } await api.tracing.hardReload(); diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx index 9a1a0ae8447..bf5b4f783cd 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx @@ -13,10 +13,7 @@ import { getMagnificationUnion, } from "oxalis/model/accessors/dataset_accessor"; import { getActiveMagInfo } from "oxalis/model/accessors/flycam_accessor"; -import { - getCombinedStats, - type CombinedTracingStats, -} from "oxalis/model/accessors/annotation_accessor"; +import { getStats, type CombinedTracingStats } from "oxalis/model/accessors/annotation_accessor"; import { setAnnotationNameAction, setAnnotationDescriptionAction, @@ -273,7 +270,7 @@ export class DatasetInfoTabView extends React.PureComponent { getAnnotationStatistics() { if (this.props.isDatasetViewMode) return null; - return ; + return ; } getKeyboardShortcuts() { diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/skeleton_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/skeleton_tab_view.tsx index 51cea0ccecf..87df5ab1e4a 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/skeleton_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/skeleton_tab_view.tsx @@ -244,13 +244,7 @@ export async function importTracingFiles(files: Array, createGroupForEachF if (oldVolumeTracing) { Store.dispatch(importVolumeTracingAction()); - Store.dispatch( - setVersionNumberAction( - oldVolumeTracing.version + 1, - "volume", - oldVolumeTracing.tracingId, - ), - ); + Store.dispatch(setVersionNumberAction(tracing.version + 1)); Store.dispatch(setLargestSegmentIdAction(newLargestSegmentId)); await clearCache(dataset, oldVolumeTracing.tracingId); await api.data.reloadBuckets(oldVolumeTracing.tracingId); diff --git a/frontend/javascripts/oxalis/view/version_entry.tsx b/frontend/javascripts/oxalis/view/version_entry.tsx index c792f7171d6..e363c9060b1 100644 --- a/frontend/javascripts/oxalis/view/version_entry.tsx +++ b/frontend/javascripts/oxalis/view/version_entry.tsx @@ -38,12 +38,14 @@ import type { MergeTreeUpdateAction, UpdateMappingNameUpdateAction, DeleteSegmentDataUpdateAction, + UpdateActionWithTracingId, } from "oxalis/model/sagas/update_actions"; import FormattedDate from "components/formatted_date"; import { MISSING_GROUP_ID } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; import { useSelector } from "react-redux"; -import type { OxalisState } from "oxalis/store"; +import type { HybridTracing, OxalisState } from "oxalis/store"; import { formatUserName, getContributorById } from "oxalis/model/accessors/user_accessor"; +import { getReadableNameByVolumeTracingId } from "oxalis/model/accessors/volumetracing_accessor"; type Description = { description: string; icon: React.ReactNode; @@ -56,7 +58,10 @@ const updateTracingDescription = { // determines the order in which update actions are checked // to describe an update action batch. See also the comment // of the `getDescriptionForBatch` function. -const descriptionFns: Record Description> = { +const descriptionFns: Record< + ServerUpdateAction["name"], + (firstAction: any, actionCount: number, tracing: HybridTracing) => Description +> = { importVolumeTracing: (): Description => ({ description: "Imported a volume tracing.", icon: , @@ -65,7 +70,11 @@ const descriptionFns: Record Descr description: "Created the annotation.", icon: , }), - updateUserBoundingBoxes: (): Description => ({ + updateUserBoundingBoxesInSkeletonTracing: (): Description => ({ + description: "Updated a bounding box.", + icon: , + }), + updateUserBoundingBoxesInVolumeTracing: (): Description => ({ description: "Updated a bounding box.", icon: , }), @@ -118,14 +127,28 @@ const descriptionFns: Record Descr description: `Updated the tree with id ${action.value.id}.`, icon: , }), - updateBucket: (): Description => ({ - description: "Updated the segmentation.", - icon: , - }), - updateSegmentGroups: (): Description => ({ - description: "Updated the segment groups.", - icon: , - }), + updateBucket: ( + firstAction: UpdateActionWithTracingId, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Updated the segmentation of layer ${layerName}.`, + icon: , + }; + }, + updateSegmentGroups: ( + firstAction: UpdateActionWithTracingId, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Updated the segment groups of layer ${layerName}.`, + icon: , + }; + }, updateNode: (action: UpdateNodeUpdateAction): Description => ({ description: `Updated the node with id ${action.value.id}.`, icon: , @@ -156,26 +179,61 @@ const descriptionFns: Record Descr description: "Updated the 3D view.", icon: , }), - createSegment: (action: CreateSegmentUpdateAction): Description => ({ - description: `Added the segment with id ${action.value.id} to the segments list.`, - icon: , - }), - updateSegment: (action: UpdateSegmentUpdateAction): Description => ({ - description: `Updated the segment with id ${action.value.id} in the segments list.`, - icon: , - }), - deleteSegment: (action: DeleteSegmentUpdateAction): Description => ({ - description: `Deleted the segment with id ${action.value.id} from the segments list.`, - icon: , - }), - deleteSegmentData: (action: DeleteSegmentDataUpdateAction): Description => ({ - description: `Deleted the data of segment ${action.value.id}. All voxels with that id were overwritten with 0.`, - icon: , - }), - addSegmentIndex: (): Description => ({ - description: "Added segment index to enable segment statistics.", - icon: , - }), + createSegment: ( + firstAction: UpdateActionWithTracingId & CreateSegmentUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Added the segment with id ${firstAction.value.id} to the segments list of layer ${layerName}.`, + icon: , + }; + }, + updateSegment: ( + firstAction: UpdateActionWithTracingId & UpdateSegmentUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Updated the segment with id ${firstAction.value.id} in the segments list of layer ${layerName}.`, + icon: , + }; + }, + deleteSegment: ( + firstAction: UpdateActionWithTracingId & DeleteSegmentUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Deleted the segment with id ${firstAction.value.id} from the segments list of layer ${layerName}.`, + icon: , + }; + }, + deleteSegmentData: ( + firstAction: UpdateActionWithTracingId & DeleteSegmentDataUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Deleted the data of segment ${firstAction.value.id} of layer ${layerName}. All voxels with that id were overwritten with 0.`, + icon: , + }; + }, + addSegmentIndex: ( + firstAction: UpdateActionWithTracingId, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Added segment index to layer ${layerName} to enable segment statistics.`, + icon: , + }; + }, // This should never be shown since currently this update action can only be triggered // by merging or splitting trees which is recognized separately, before this description // is accessed. @@ -188,20 +246,29 @@ const descriptionFns: Record Descr description: `Merged the trees with id ${action.value.sourceId} and ${action.value.targetId}.`, icon: , }), - updateTracing: (): Description => updateTracingDescription, -}; + updateSkeletonTracing: (): Description => updateTracingDescription, + updateVolumeTracing: (): Description => updateTracingDescription, +} as const; + +function maybeGetReadableVolumeTracingName(tracing: HybridTracing, tracingId: string): string { + const volumeTracing = tracing.volumes.find((volume) => volume.tracingId === tracingId); + return volumeTracing != null + ? getReadableNameByVolumeTracingId(tracing, volumeTracing.tracingId) + : ""; +} function getDescriptionForSpecificBatch( actions: Array, type: string, + tracing: HybridTracing, ): Description { const firstAction = actions[0]; if (firstAction.name !== type) { throw new Error("Type constraint violated"); } - - return descriptionFns[type](firstAction, actions.length); + const fn = descriptionFns[type]; + return fn(firstAction, actions.length, tracing); } // An update action batch can consist of more than one update action as a single user action @@ -215,7 +282,10 @@ function getDescriptionForSpecificBatch( // "more expressive" update actions first and for more general ones later. // The order is determined by the order in which the update actions are added to the // `descriptionFns` object. -function getDescriptionForBatch(actions: Array): Description { +function getDescriptionForBatch( + actions: Array, + tracing: HybridTracing, +): Description { const groupedUpdateActions = _.groupBy(actions, "name"); const moveTreeComponentUAs = groupedUpdateActions.moveTreeComponent; @@ -265,7 +335,7 @@ function getDescriptionForBatch(actions: Array): Description const updateActions = groupedUpdateActions[key]; if (updateActions != null) { - return getDescriptionForSpecificBatch(updateActions, key); + return getDescriptionForSpecificBatch(updateActions, key, tracing); } } @@ -297,6 +367,7 @@ export default function VersionEntry({ const contributors = useSelector((state: OxalisState) => state.tracing.contributors); const activeUser = useSelector((state: OxalisState) => state.activeUser); const owner = useSelector((state: OxalisState) => state.tracing.owner); + const tracing = useSelector((state: OxalisState) => state.tracing); const liClassName = classNames("version-entry", { "active-version-entry": isActive, @@ -312,7 +383,7 @@ export default function VersionEntry({ {allowUpdate ? "Restore" : "Download"} ); - const { description, icon } = getDescriptionForBatch(actions); + const { description, icon } = getDescriptionForBatch(actions, tracing); // In case the actionAuthorId is not set, the action was created before the multi-contributor // support. Default to the owner in that case. diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index be21599922e..20928c8345a 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -3,7 +3,6 @@ import { useState, useEffect } from "react"; import _ from "lodash"; import dayjs from "dayjs"; import type { APIUpdateActionBatch } from "types/api_flow_types"; -import type { Versions } from "oxalis/view/version_view"; import { chunkIntoTimeWindows } from "libs/utils"; import { getUpdateActionLog, @@ -13,7 +12,6 @@ import { import { handleGenericError } from "libs/error_handling"; import { pushSaveQueueTransaction, - type SaveQueueType, setVersionNumberAction, } from "oxalis/model/actions/save_actions"; import { @@ -24,21 +22,19 @@ import { import { setAnnotationAllowUpdateAction } from "oxalis/model/actions/annotation_actions"; import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; import { Model } from "oxalis/singletons"; -import type { EditableMapping, OxalisState, SkeletonTracing, VolumeTracing } from "oxalis/store"; +import type { HybridTracing, OxalisState } from "oxalis/store"; import Store from "oxalis/store"; import VersionEntryGroup from "oxalis/view/version_entry_group"; import { api } from "oxalis/singletons"; -import Toast from "libs/toast"; import { useInfiniteQuery, useQueryClient } from "@tanstack/react-query"; import { useEffectOnlyOnce } from "libs/react_hooks"; import { useFetch } from "libs/react_helpers"; import { useSelector } from "react-redux"; +import { getCreationTimestamp } from "oxalis/model/accessors/annotation_accessor"; const ENTRIES_PER_PAGE = 5000; type Props = { - versionedObjectType: SaveQueueType; - tracing: SkeletonTracing | VolumeTracing | EditableMapping; allowUpdate: boolean; }; @@ -49,25 +45,18 @@ type GroupedAndChunkedVersions = Record - Model.getSegmentationTracingLayer(volumeTracingId), - ); - segmentationLayersToReload.push(...versionedSegmentationLayers); - } + // TODO: properly determine which layers to reload. + // No versions were passed which means that the newest annotation should be + // shown. Therefore, reload all segmentation layers. + segmentationLayersToReload.push(...Model.getSegmentationTracingLayers()); for (const segmentationLayer of segmentationLayersToReload) { segmentationLayer.cube.collectAllBuckets(); @@ -80,20 +69,13 @@ async function handleRestoreVersion( versions: APIUpdateActionBatch[], version: number, ) { - const getNewestVersion = () => _.max(versions.map((batch) => batch.version)) || 0; if (props.allowUpdate) { - Store.dispatch( - setVersionNumberAction( - getNewestVersion(), - props.versionedObjectType, - props.tracing.tracingId, - ), - ); + const newestVersion = _.max(versions.map((batch) => batch.version)) || 0; + Store.dispatch(setVersionNumberAction(newestVersion)); Store.dispatch( pushSaveQueueTransaction( [revertToVersion(version)], - props.versionedObjectType, - props.tracing.tracingId, + "experimental; leaving out tracingId as this should not be required", ), ); await Model.ensureSavedState(); @@ -102,28 +84,7 @@ async function handleRestoreVersion( } else { const { annotationType, annotationId, volumes } = Store.getState().tracing; const includesVolumeFallbackData = volumes.some((volume) => volume.fallbackLayer != null); - downloadAnnotation(annotationId, annotationType, includesVolumeFallbackData, { - [props.versionedObjectType]: version, - }); - } -} - -function handlePreviewVersion(props: Props, version: number) { - if (props.versionedObjectType === "skeleton") { - return previewVersion({ - skeleton: version, - }); - } else if (props.versionedObjectType === "volume") { - return previewVersion({ - volumes: { - [props.tracing.tracingId]: version, - }, - }); - } else { - Toast.warning( - `Version preview and restoring for ${props.versionedObjectType}s is not supported yet.`, - ); - return Promise.resolve(); + downloadAnnotation(annotationId, annotationType, includesVolumeFallbackData, version); } } @@ -146,10 +107,9 @@ const getGroupedAndChunkedVersions = _.memoize( ); async function getUpdateActionLogPage( - props: Props, + tracing: HybridTracing, tracingStoreUrl: string, - tracingId: string, - versionedObjectType: SaveQueueType, + annotationId: string, newestVersion: number, // 0 is the "newest" page (i.e., the page in which the newest version is) relativePageNumber: number, @@ -177,8 +137,7 @@ async function getUpdateActionLogPage( const updateActionLog = await getUpdateActionLog( tracingStoreUrl, - tracingId, - versionedObjectType, + annotationId, oldestVersionInPage, newestVersionInPage, ); @@ -188,7 +147,7 @@ async function getUpdateActionLogPage( if (oldestVersionInPage === 1) { updateActionLog.push({ version: 0, - value: [serverCreateTracing(props.tracing.createdTimestamp)], + value: [serverCreateTracing(getCreationTimestamp(tracing))], }); } @@ -201,14 +160,15 @@ async function getUpdateActionLogPage( } function VersionList(props: Props) { - const { tracing } = props; const tracingStoreUrl = useSelector((state: OxalisState) => state.tracing.tracingStore.url); + const annotationId = useSelector((state: OxalisState) => state.tracing.annotationId); const newestVersion = useFetch( - () => getNewestVersionForTracing(tracingStoreUrl, tracing.tracingId, props.versionedObjectType), + () => getNewestVersionForTracing(tracingStoreUrl, annotationId), null, - [tracing], + [annotationId], ); + console.log("newestVersion", newestVersion); if (newestVersion == null) { return ( @@ -222,31 +182,26 @@ function VersionList(props: Props) { } function InnerVersionList(props: Props & { newestVersion: number }) { + const tracing = useSelector((state: OxalisState) => state.tracing); const queryClient = useQueryClient(); // Remember the version with which the version view was opened ( // the active version could change by the actions of the user). // Based on this version, the page numbers are calculated. const { newestVersion } = props; - const [initialVersion] = useState(props.tracing.version); + const [initialVersion] = useState(tracing.version); function fetchPaginatedVersions({ pageParam }: { pageParam?: number }) { + // TODO: maybe refactor this so that this method is not calculated very rendering cycle if (pageParam == null) { pageParam = Math.floor((newestVersion - initialVersion) / ENTRIES_PER_PAGE); } - const { tracingId } = props.tracing; const { url: tracingStoreUrl } = Store.getState().tracing.tracingStore; + const annotationId = Store.getState().tracing.annotationId; - return getUpdateActionLogPage( - props, - tracingStoreUrl, - tracingId, - props.versionedObjectType, - newestVersion, - pageParam, - ); + return getUpdateActionLogPage(tracing, tracingStoreUrl, annotationId, newestVersion, pageParam); } - const queryKey = ["versions", props.tracing.tracingId]; + const queryKey = ["versions", tracing.annotationId]; useEffectOnlyOnce(() => { // Remove all previous existent queries so that the content of this view @@ -338,11 +293,11 @@ function InnerVersionList(props: Props & { newestVersion: number }) { batches={batchesOrDateString} allowUpdate={props.allowUpdate} newestVersion={flattenedVersions[0].version} - activeVersion={props.tracing.version} + activeVersion={tracing.version} onRestoreVersion={(version) => handleRestoreVersion(props, flattenedVersions, version) } - onPreviewVersion={(version) => handlePreviewVersion(props, version)} + onPreviewVersion={(version) => previewVersion(version)} key={batchesOrDateString[0].version} /> ) diff --git a/frontend/javascripts/oxalis/view/version_view.tsx b/frontend/javascripts/oxalis/view/version_view.tsx index 8068ad59be6..e755a6c6f38 100644 --- a/frontend/javascripts/oxalis/view/version_view.tsx +++ b/frontend/javascripts/oxalis/view/version_view.tsx @@ -1,14 +1,14 @@ -import { Button, Alert, Tabs, type TabsProps } from "antd"; +import { Button, Alert } from "antd"; import { CloseOutlined } from "@ant-design/icons"; -import { connect } from "react-redux"; +import { connect, useDispatch } from "react-redux"; import * as React from "react"; -import { getReadableNameByVolumeTracingId } from "oxalis/model/accessors/volumetracing_accessor"; import { setAnnotationAllowUpdateAction } from "oxalis/model/actions/annotation_actions"; import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; import type { OxalisState, Tracing } from "oxalis/store"; -import { type TracingType, TracingTypeEnum } from "types/api_flow_types"; import Store from "oxalis/store"; import VersionList, { previewVersion } from "oxalis/view/version_list"; +import { useState } from "react"; +import { useWillUnmount } from "beautiful-react-hooks"; export type Versions = { skeleton?: number | null | undefined; @@ -21,151 +21,84 @@ type OwnProps = { allowUpdate: boolean; }; type Props = StateProps & OwnProps; -type State = { - activeTracingType: TracingType; - initialAllowUpdate: boolean; -}; -class VersionView extends React.Component { - state: State = { - activeTracingType: - this.props.tracing.skeleton != null ? TracingTypeEnum.skeleton : TracingTypeEnum.volume, - // Remember whether the tracing could originally be updated - initialAllowUpdate: this.props.allowUpdate, - }; +const VersionView: React.FC = (props: Props) => { + const [initialAllowUpdate] = useState(props.allowUpdate); + const dispatch = useDispatch(); - componentWillUnmount() { - Store.dispatch(setAnnotationAllowUpdateAction(this.state.initialAllowUpdate)); - } + useWillUnmount(() => { + dispatch(setAnnotationAllowUpdateAction(initialAllowUpdate)); + }); - handleClose = async () => { + const handleClose = async () => { // This will load the newest version of both skeleton and volume tracings await previewVersion(); Store.dispatch(setVersionRestoreVisibilityAction(false)); - Store.dispatch(setAnnotationAllowUpdateAction(this.state.initialAllowUpdate)); - }; - - onChangeTab = (activeKey: string) => { - this.setState({ - activeTracingType: activeKey as TracingType, - }); + Store.dispatch(setAnnotationAllowUpdateAction(initialAllowUpdate)); }; - render() { - const tabs: TabsProps["items"] = []; - - if (this.props.tracing.skeleton != null) - tabs.push({ - label: "Skeleton", - key: "skeleton", - children: ( - - ), - }); - - tabs.push( - ...this.props.tracing.volumes.map((volumeTracing) => ({ - label: getReadableNameByVolumeTracingId(this.props.tracing, volumeTracing.tracingId), - key: volumeTracing.tracingId, - children: ( - - ), - })), - ); - - tabs.push( - ...this.props.tracing.mappings.map((mapping) => ({ - label: `${getReadableNameByVolumeTracingId( - this.props.tracing, - mapping.tracingId, - )} (Editable Mapping)`, - key: `${mapping.tracingId}-${mapping.mappingName}`, - children: ( - - ), - })), - ); - - return ( + return ( +
-
-

- Version History -

-
+ Version History + +
- ); - } -} +
+ +
+
+ ); +}; function mapStateToProps(state: OxalisState): StateProps { return { diff --git a/frontend/javascripts/router.tsx b/frontend/javascripts/router.tsx index dcbc7815c49..178958aba06 100644 --- a/frontend/javascripts/router.tsx +++ b/frontend/javascripts/router.tsx @@ -1,6 +1,6 @@ import { createExplorational, - getAnnotationInformation, + getMaybeOutdatedAnnotationInformation, getOrganizationForDataset, getShortLink, } from "admin/admin_rest_api"; @@ -198,7 +198,9 @@ class ReactRouter extends React.Component { serverAuthenticationCallback = async ({ match }: ContextRouter) => { try { - const annotationInformation = await getAnnotationInformation(match.params.id || ""); + const annotationInformation = await getMaybeOutdatedAnnotationInformation( + match.params.id || "", + ); return annotationInformation.visibility === "Public"; } catch (_ex) { // Annotation could not be found diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index 449d5bc8a29..ffb9b0243c7 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -20,6 +20,8 @@ import * as api from "admin/admin_rest_api"; import generateDummyTrees from "oxalis/model/helpers/generate_dummy_trees"; import test from "ava"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; +import type { SaveQueueEntry } from "oxalis/store"; + const datasetId = { name: "confocal-multi_knossos", owningOrganization: "Organization_X", @@ -33,7 +35,7 @@ test.before("Reset database", async () => { }); test("getAnnotationInformation()", async (t) => { const annotationId = "570ba0092a7c0e980056fe9b"; - const annotation = await api.getAnnotationInformation(annotationId); + const annotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); t.is(annotation.id, annotationId); writeTypeCheckingFile(annotation, "annotation", "APIAnnotation"); t.snapshot(annotation); @@ -41,7 +43,7 @@ test("getAnnotationInformation()", async (t) => { test("getAnnotationInformation() for public annotation while logged out", async (t) => { setCurrToken("invalidToken"); const annotationId = "88135c192faeb34c0081c05d"; - const annotation = await api.getAnnotationInformation(annotationId); + const annotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); t.is(annotation.id, annotationId); t.snapshot(annotation); setCurrToken(tokenUserA); @@ -76,7 +78,7 @@ test.serial("finishAnnotation() and reOpenAnnotation() for explorational", async }); test.serial("editAnnotation()", async (t) => { const annotationId = "68135c192faeb34c0081c05d"; - const originalAnnotation = await api.getAnnotationInformation(annotationId); + const originalAnnotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); const { name, visibility, description } = originalAnnotation; const newName = "new name"; const newVisibility = "Public"; @@ -86,7 +88,7 @@ test.serial("editAnnotation()", async (t) => { visibility: newVisibility, description: newDescription, }); - const editedAnnotation = await api.getAnnotationInformation(annotationId); + const editedAnnotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); t.is(editedAnnotation.name, newName); t.is(editedAnnotation.visibility, newVisibility); t.is(editedAnnotation.description, newDescription); @@ -104,7 +106,7 @@ test.serial("finishAllAnnotations()", async (t) => { const annotationIds = ["78135c192faeb34c0081c05d", "78135c192faeb34c0081c05e"]; await api.finishAllAnnotations(annotationIds); const finishedAnnotations = await Promise.all( - annotationIds.map((id) => api.getAnnotationInformation(id)), + annotationIds.map((id) => api.getMaybeOutdatedAnnotationInformation(id)), ); t.is(finishedAnnotations.length, 2); finishedAnnotations.forEach((annotation) => { @@ -118,7 +120,9 @@ test.serial("createExplorational() and finishAnnotation()", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); t.snapshot(replaceVolatileValues(createdExplorational)); await api.finishAnnotation(createdExplorational.id, APIAnnotationTypeEnum.Explorational); - const finishedAnnotation = await api.getAnnotationInformation(createdExplorational.id); + const finishedAnnotation = await api.getMaybeOutdatedAnnotationInformation( + createdExplorational.id, + ); t.is(finishedAnnotation.state, "Finished"); }); test.serial("getTracingsForAnnotation()", async (t) => { @@ -146,8 +150,7 @@ test.serial("getTracingsForAnnotation() for hybrid", async (t) => { }); }); -// @ts-expect-error ts-migrate(7006) FIXME: Parameter 'queue' implicitly has an 'any' type. -async function sendUpdateActionsForSkeleton(explorational: APIAnnotation, queue) { +async function sendUpdateActionsForSkeleton(explorational: APIAnnotation, queue: SaveQueueEntry[]) { const skeletonTracing = getSkeletonDescriptor(explorational); if (skeletonTracing == null) throw new Error("No skeleton annotation present."); return sendRequestWithToken( @@ -173,6 +176,7 @@ test.serial("Send update actions and compare resulting tracing", async (t) => { [UpdateActions.updateSkeletonTracing(initialSkeleton, [2, 3, 4], null, [1, 2, 3], 2)], ], 123456789, + createdExplorational.annotationLayers[0].tracingId, ), 0, ); @@ -207,6 +211,7 @@ test("Send complex update actions and compare resulting tracing", async (t) => { createSaveQueueFromUpdateActions( [createTreesUpdateActions, [updateTreeGroupsUpdateAction]], 123456789, + createdExplorational.annotationLayers[0].tracingId, ), 0, ); diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index 55a2c1fa71d..b2c4d8db95b 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -1,6 +1,10 @@ -import type { ServerSkeletonTracing, APIAnnotation } from "types/api_flow_types"; +import { + type ServerSkeletonTracing, + type APIAnnotation, + AnnotationLayerType, +} from "types/api_flow_types"; export const tracing: ServerSkeletonTracing = { - typ: "Skeleton", + typ: AnnotationLayerType.Skeleton, id: "47e37793-d0be-4240-a371-87ce68561a13", trees: [ { @@ -173,11 +177,12 @@ export const annotation: APIAnnotation = { allowDownload: true, allowSave: true, }, + version: 0, annotationLayers: [ { - name: "Skeleton", + name: AnnotationLayerType.Skeleton, tracingId: "47e37793-d0be-4240-a371-87ce68561a13", - typ: "Skeleton", + typ: AnnotationLayerType.Skeleton, stats: {}, }, ], diff --git a/frontend/javascripts/test/helpers/saveHelpers.ts b/frontend/javascripts/test/helpers/saveHelpers.ts index 53ba1f35865..09703d25e29 100644 --- a/frontend/javascripts/test/helpers/saveHelpers.ts +++ b/frontend/javascripts/test/helpers/saveHelpers.ts @@ -1,4 +1,5 @@ import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; +import { addTracingIdToActions } from "oxalis/model/reducers/save_reducer"; import type { UpdateAction } from "oxalis/model/sagas/update_actions"; import type { SaveQueueEntry } from "oxalis/store"; import dummyUser from "test/fixtures/dummy_user"; @@ -6,13 +7,14 @@ import dummyUser from "test/fixtures/dummy_user"; export function createSaveQueueFromUpdateActions( updateActions: UpdateAction[][], timestamp: number, + tracingId: string, stats: TracingStats | null = null, ): SaveQueueEntry[] { return updateActions.map((ua) => ({ version: -1, timestamp, stats, - actions: ua.slice(), + actions: addTracingIdToActions(ua, tracingId), info: "[]", transactionGroupCount: 1, authorId: dummyUser.id, @@ -21,7 +23,9 @@ export function createSaveQueueFromUpdateActions( })); } export function withoutUpdateTracing(items: Array): Array { - return items.filter((item) => item.name !== "updateTracing"); + return items.filter( + (item) => item.name !== "updateSkeletonTracing" && item.name !== "updateVolumeTracing", + ); } export function withoutUpdateTree(items: Array): Array { return items.filter((item) => item.name !== "updateTree"); diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index 0a4b398f274..6428fb94bd8 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -2,10 +2,10 @@ import mockRequire from "mock-require"; import test from "ava"; import "test/reducers/save_reducer.mock"; import dummyUser from "test/fixtures/dummy_user"; -import type { SaveState } from "oxalis/store"; -import type { APIUser } from "types/api_flow_types"; +import type { OxalisState } from "oxalis/store"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; -import type { EmptyObject } from "types/globals"; +import type { UpdateAction } from "oxalis/model/sagas/update_actions"; + const TIMESTAMP = 1494695001688; const DateMock = { now: () => TIMESTAMP, @@ -15,97 +15,94 @@ const AccessorMock = { }; mockRequire("libs/date", DateMock); mockRequire("oxalis/model/accessors/skeletontracing_accessor", AccessorMock); -const SaveActions = mockRequire.reRequire("oxalis/model/actions/save_actions"); -const SaveReducer = mockRequire.reRequire("oxalis/model/reducers/save_reducer").default; -const { createEdge } = mockRequire.reRequire("oxalis/model/sagas/update_actions"); -const initialState: { save: SaveState; activeUser: APIUser; tracing: EmptyObject } = { +const SaveActions = mockRequire.reRequire( + "oxalis/model/actions/save_actions", +) as typeof import("oxalis/model/actions/save_actions"); +const SaveReducer = mockRequire.reRequire("oxalis/model/reducers/save_reducer") + .default as typeof import("oxalis/model/reducers/save_reducer")["default"]; +const { createEdge } = mockRequire.reRequire( + "oxalis/model/sagas/update_actions", +) as typeof import("oxalis/model/sagas/update_actions"); + +const tracingId = "1234567890"; +const initialState = { activeUser: dummyUser, save: { - isBusyInfo: { - skeleton: false, - volumes: {}, - mappings: {}, - }, - queue: { - skeleton: [], - volumes: {}, - mappings: {}, - }, - lastSaveTimestamp: { - skeleton: 0, - volumes: {}, - mappings: {}, - }, + isBusy: false, + queue: [], + lastSaveTimestamp: 0, progressInfo: { processedActionCount: 0, totalActionCount: 0, }, }, tracing: {}, -}; +} as any as OxalisState; test("Save should add update actions to the queue", (t) => { const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; - const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP); - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); + const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP, tracingId); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); const newState = SaveReducer(initialState, pushAction); - t.deepEqual(newState.save.queue.skeleton, saveQueue); + t.deepEqual(newState.save.queue, saveQueue); }); test("Save should add more update actions to the queue", (t) => { const getItems = (treeId: number) => [createEdge(treeId, 1, 2), createEdge(treeId, 2, 3)]; - const saveQueue = createSaveQueueFromUpdateActions([getItems(0), getItems(1)], TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions( + [getItems(0), getItems(1)], + TIMESTAMP, + tracingId, + ); const testState = SaveReducer( initialState, - SaveActions.pushSaveQueueTransaction(getItems(0), "skeleton"), + SaveActions.pushSaveQueueTransaction(getItems(0), "skeleton", tracingId), ); const newState = SaveReducer( testState, - SaveActions.pushSaveQueueTransaction(getItems(1), "skeleton"), + SaveActions.pushSaveQueueTransaction(getItems(1), "skeleton", tracingId), ); - t.deepEqual(newState.save.queue.skeleton, saveQueue); + t.deepEqual(newState.save.queue, saveQueue); }); test("Save should add zero update actions to the queue", (t) => { - // @ts-expect-error ts-migrate(7034) FIXME: Variable 'items' implicitly has type 'any[]' in so... Remove this comment to see the full error message - const items = []; - // @ts-expect-error ts-migrate(7005) FIXME: Variable 'items' implicitly has an 'any[]' type. - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); + const items: UpdateAction[] = []; + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); const newState = SaveReducer(initialState, pushAction); - t.deepEqual(newState.save.queue.skeleton, []); + t.deepEqual(newState.save.queue, []); }); test("Save should remove one update actions from the queue", (t) => { const firstItem = [createEdge(0, 1, 2)]; const secondItem = [createEdge(1, 2, 3)]; - const saveQueue = createSaveQueueFromUpdateActions([secondItem], TIMESTAMP); - const firstPushAction = SaveActions.pushSaveQueueTransaction(firstItem, "skeleton"); - const secondPushAction = SaveActions.pushSaveQueueTransaction(secondItem, "skeleton"); - const popAction = SaveActions.shiftSaveQueueAction(1, "skeleton"); + const saveQueue = createSaveQueueFromUpdateActions([secondItem], TIMESTAMP, tracingId); + const firstPushAction = SaveActions.pushSaveQueueTransaction(firstItem, "skeleton", tracingId); + const secondPushAction = SaveActions.pushSaveQueueTransaction(secondItem, "skeleton", tracingId); + const popAction = SaveActions.shiftSaveQueueAction(1); let newState = SaveReducer(initialState, firstPushAction); newState = SaveReducer(newState, secondPushAction); newState = SaveReducer(newState, popAction); - t.deepEqual(newState.save.queue.skeleton, saveQueue); + t.deepEqual(newState.save.queue, saveQueue); }); test("Save should remove zero update actions from the queue", (t) => { const items = [createEdge(0, 1, 2), createEdge(1, 2, 3)]; - const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP); - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); - const popAction = SaveActions.shiftSaveQueueAction(0, "skeleton"); + const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP, tracingId); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); + const popAction = SaveActions.shiftSaveQueueAction(0); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); - t.deepEqual(newState.save.queue.skeleton, saveQueue); + t.deepEqual(newState.save.queue, saveQueue); }); test("Save should remove all update actions from the queue (1/2)", (t) => { const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); - const popAction = SaveActions.shiftSaveQueueAction(2, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); + const popAction = SaveActions.shiftSaveQueueAction(2); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); - t.deepEqual(newState.save.queue.skeleton, []); + t.deepEqual(newState.save.queue, []); }); test("Save should remove all update actions from the queue (2/2)", (t) => { const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); - const popAction = SaveActions.shiftSaveQueueAction(5, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); + const popAction = SaveActions.shiftSaveQueueAction(5); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); - t.deepEqual(newState.save.queue.skeleton, []); + t.deepEqual(newState.save.queue, []); }); diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index 771fb88b194..62031c20d51 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -14,17 +14,20 @@ import { setActiveUserAction } from "oxalis/model/actions/user_actions"; import dummyUser from "test/fixtures/dummy_user"; import { hasRootSagaCrashed } from "oxalis/model/sagas/root_saga"; import { omit } from "lodash"; +import { tracing as TaskTracing } from "test/fixtures/tasktracing_server_objects"; -const { - createTreeMapFromTreeArray, - generateTreeName, -} = require("oxalis/model/reducers/skeletontracing_reducer_helpers"); +const { createTreeMapFromTreeArray, generateTreeName } = + require("oxalis/model/reducers/skeletontracing_reducer_helpers") as typeof import("oxalis/model/reducers/skeletontracing_reducer_helpers"); const { addTreesAndGroupsAction, deleteNodeAction } = mockRequire.reRequire( "oxalis/model/actions/skeletontracing_actions", -); -const { discardSaveQueuesAction } = mockRequire.reRequire("oxalis/model/actions/save_actions"); -const UpdateActions = mockRequire.reRequire("oxalis/model/sagas/update_actions"); +) as typeof import("oxalis/model/actions/skeletontracing_actions"); +const { discardSaveQueuesAction } = mockRequire.reRequire( + "oxalis/model/actions/save_actions", +) as typeof import("oxalis/model/actions/save_actions"); +const UpdateActions = mockRequire.reRequire( + "oxalis/model/sagas/update_actions", +) as typeof import("oxalis/model/sagas/update_actions"); test.beforeEach(async (t) => { // Setup oxalis, this will execute model.fetch(...) and initialize the store with the tracing, etc. @@ -58,7 +61,7 @@ test.serial( [ UpdateActions.updateTree(treeWithCorrectName), UpdateActions.updateSkeletonTracing( - Store.getState().tracing.skeleton, + enforceSkeletonTracing(Store.getState().tracing), [1, 2, 3], [], [0, 0, 0], @@ -67,10 +70,11 @@ test.serial( ], ], TIMESTAMP, + TaskTracing.id, getStats(state.tracing, "skeleton", "irrelevant_in_skeleton_case") || undefined, ); // Reset the info field which is just for debugging purposes - const actualSaveQueue = state.save.queue.skeleton.map((entry) => { + const actualSaveQueue = state.save.queue.map((entry) => { return { ...omit(entry, "info"), info: "[]" }; }); // Once the updateTree update action is in the save queue, we're good. @@ -81,24 +85,21 @@ test.serial( test.serial("Save actions should not be chunked below the chunk limit (1/3)", (t) => { Store.dispatch(discardSaveQueuesAction()); - t.deepEqual(Store.getState().save.queue.skeleton, []); + t.deepEqual(Store.getState().save.queue, []); const trees = generateDummyTrees(1000, 1); Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); - t.is(Store.getState().save.queue.skeleton.length, 1); - t.true( - Store.getState().save.queue.skeleton[0].actions.length < - MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton, - ); + t.is(Store.getState().save.queue.length, 1); + t.true(Store.getState().save.queue[0].actions.length < MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); }); test.serial("Save actions should be chunked above the chunk limit (2/3)", (t) => { Store.dispatch(discardSaveQueuesAction()); - t.deepEqual(Store.getState().save.queue.skeleton, []); + t.deepEqual(Store.getState().save.queue, []); const trees = generateDummyTrees(5000, 1); Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); const state = Store.getState(); - t.true(state.save.queue.skeleton.length > 1); - t.is(state.save.queue.skeleton[0].actions.length, MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); + t.true(state.save.queue.length > 1); + t.is(state.save.queue[0].actions.length, MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); }); test.serial("Save actions should be chunked after compacting (3/3)", (t) => { @@ -107,12 +108,12 @@ test.serial("Save actions should be chunked after compacting (3/3)", (t) => { const trees = generateDummyTrees(1, nodeCount); Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); Store.dispatch(discardSaveQueuesAction()); - t.deepEqual(Store.getState().save.queue.skeleton, []); + t.deepEqual(Store.getState().save.queue, []); // Delete some node, NOTE that this is not the node in the middle of the tree! // The addTreesAndGroupsAction gives new ids to nodes and edges in a non-deterministic way. const middleNodeId = trees[0].nodes[nodeCount / 2].id; Store.dispatch(deleteNodeAction(middleNodeId)); - const { skeleton: skeletonSaveQueue } = Store.getState().save.queue; + const skeletonSaveQueue = Store.getState().save.queue; // There should only be one chunk t.is(skeletonSaveQueue.length, 1); t.true(skeletonSaveQueue[0].actions.length < MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 4707cf28bee..90cf88d7579 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -18,17 +18,27 @@ mockRequire("libs/date", DateMock); mockRequire("oxalis/model/sagas/root_saga", function* () { yield; }); -const UpdateActions = mockRequire.reRequire("oxalis/model/sagas/update_actions"); -const SaveActions = mockRequire.reRequire("oxalis/model/actions/save_actions"); -const { take, call, put } = mockRequire.reRequire("redux-saga/effects"); +const UpdateActions = mockRequire.reRequire( + "oxalis/model/sagas/update_actions", +) as typeof import("oxalis/model/sagas/update_actions"); +const SaveActions = mockRequire.reRequire( + "oxalis/model/actions/save_actions", +) as typeof import("oxalis/model/actions/save_actions"); +const { take, call, put } = mockRequire.reRequire( + "redux-saga/effects", +) as typeof import("redux-saga/effects"); const { pushSaveQueueAsync, - sendRequestToServer, + sendSaveRequestToServer, toggleErrorHighlighting, addVersionNumbers, sendRequestWithToken, -} = mockRequire.reRequire("oxalis/model/sagas/save_saga"); -const tracingId = "1234567890"; +} = mockRequire.reRequire( + "oxalis/model/sagas/save_saga", +) as typeof import("oxalis/model/sagas/save_saga"); + +const annotationId = "annotation-abcdefgh"; +const tracingId = "tracing-1234567890"; const initialState = { dataset: { dataSource: { @@ -71,17 +81,18 @@ const TRACING_TYPE = "skeleton"; test("SaveSaga should compact multiple updateTracing update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.updateSkeletonTracing(initialState, [1, 2, 3], [0, 0, 1], 1)], - [UpdateActions.updateSkeletonTracing(initialState, [2, 3, 4], [0, 0, 1], 2)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [1, 2, 3], [], [0, 0, 1], 1)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [2, 3, 4], [], [0, 0, 1], 2)], ], TIMESTAMP, + tracingId, ); t.deepEqual(compactSaveQueue(saveQueue), [saveQueue[1]]); }); test("SaveSaga should send update actions", (t) => { const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); - const saga = pushSaveQueueAsync(TRACING_TYPE, tracingId); + const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); + const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); // setLastSaveTimestampAction @@ -95,15 +106,15 @@ test("SaveSaga should send update actions", (t) => { saga.next({ forcePush: SaveActions.saveNowAction(), }), - put(setSaveBusyAction(true, TRACING_TYPE, tracingId)), + put(setSaveBusyAction(true)), ); saga.next(); // advance to next select state - expectValueDeepEqual(t, saga.next(saveQueue), call(sendRequestToServer, TRACING_TYPE, tracingId)); + expectValueDeepEqual(t, saga.next(saveQueue), call(sendSaveRequestToServer)); saga.next(saveQueue.length); // select state - expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE, tracingId))); + expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); // Test that loop repeats saga.next(); // select state @@ -113,37 +124,47 @@ test("SaveSaga should send request to server", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]], TIMESTAMP, + tracingId, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next([ + { + version: LAST_VERSION, + type: TRACING_TYPE, + tracingId, + }, + ]); + saga.next(annotationId); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 2); expectValueDeepEqual( t, saga.next(TRACINGSTORE_URL), - call(sendRequestWithToken, `${TRACINGSTORE_URL}/tracings/skeleton/1234567890/update?token=`, { - method: "POST", - data: saveQueueWithVersions, - compress: false, - showErrorToast: false, - }), + call( + sendRequestWithToken, + `${TRACINGSTORE_URL}/tracings/annotation/${annotationId}/update?token=`, + { + method: "POST", + data: saveQueueWithVersions, + compress: false, + showErrorToast: false, + }, + ), ); }); test("SaveSaga should retry update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]], TIMESTAMP, + tracingId, ); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 2); const requestWithTokenCall = call( sendRequestWithToken, - `${TRACINGSTORE_URL}/tracings/skeleton/1234567890/update?token=`, + `${TRACINGSTORE_URL}/tracings/annotation/${annotationId}/update?token=`, { method: "POST", data: saveQueueWithVersions, @@ -151,13 +172,17 @@ test("SaveSaga should retry update actions", (t) => { showErrorToast: false, }, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next([ + { + version: LAST_VERSION, + type: TRACING_TYPE, + tracingId, + }, + ]); + saga.next(annotationId); expectValueDeepEqual(t, saga.next(TRACINGSTORE_URL), requestWithTokenCall); saga.throw("Timeout"); expectValueDeepEqual(t, saga.next("Explorational"), call(toggleErrorHighlighting, true)); @@ -172,25 +197,34 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => const saveQueue = createSaveQueueFromUpdateActions( [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]], TIMESTAMP, + tracingId, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next([ + { + version: LAST_VERSION, + type: TRACING_TYPE, + tracingId, + }, + ]); + saga.next(annotationId); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 2); expectValueDeepEqual( t, saga.next(TRACINGSTORE_URL), - call(sendRequestWithToken, `${TRACINGSTORE_URL}/tracings/skeleton/1234567890/update?token=`, { - method: "POST", - data: saveQueueWithVersions, - compress: false, - showErrorToast: false, - }), + call( + sendRequestWithToken, + `${TRACINGSTORE_URL}/tracings/annotation/${annotationId}/update?token=`, + { + method: "POST", + data: saveQueueWithVersions, + compress: false, + showErrorToast: false, + }, + ), ); saga.throw({ status: 409, @@ -209,8 +243,8 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => }); test("SaveSaga should send update actions right away and try to reach a state where all updates are saved", (t) => { const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); - const saga = pushSaveQueueAsync(TRACING_TYPE, tracingId); + const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); + const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); saga.next(); // select state @@ -224,16 +258,16 @@ test("SaveSaga should send update actions right away and try to reach a state wh saga.next(); // select state - saga.next(saveQueue); // call sendRequestToServer + saga.next(saveQueue); // call sendSaveRequestToServer saga.next(1); // advance to select state - expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE, tracingId))); + expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); }); test("SaveSaga should not try to reach state with all actions being saved when saving is triggered by a timeout", (t) => { const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); - const saga = pushSaveQueueAsync(TRACING_TYPE, tracingId); + const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); + const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); saga.next(); // select state @@ -245,41 +279,34 @@ test("SaveSaga should not try to reach state with all actions being saved when s timeout: "a placeholder", }); // put setSaveBusyAction - saga.next(saveQueue); // call sendRequestToServer + saga.next(saveQueue); // call sendSaveRequestToServer - expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE, tracingId))); + expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); }); test("SaveSaga should remove the correct update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.updateSkeletonTracing(initialState, [1, 2, 3], [0, 0, 1], 1)], - [UpdateActions.updateSkeletonTracing(initialState, [2, 3, 4], [0, 0, 1], 2)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [1, 2, 3], [], [0, 0, 1], 1)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [2, 3, 4], [], [0, 0, 1], 2)], ], TIMESTAMP, + tracingId, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next([ + { + version: LAST_VERSION, + type: TRACING_TYPE, + tracingId, + }, + ]); + saga.next(annotationId); saga.next(TRACINGSTORE_URL); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setVersionNumberAction(3, TRACING_TYPE, tracingId)), - ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setLastSaveTimestampAction(TRACING_TYPE, tracingId)), - ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.shiftSaveQueueAction(2, TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setVersionNumberAction(3))); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); + expectValueDeepEqual(t, saga.next(), put(SaveActions.shiftSaveQueueAction(2))); }); test("SaveSaga should set the correct version numbers", (t) => { const saveQueue = createSaveQueueFromUpdateActions( @@ -289,64 +316,50 @@ test("SaveSaga should set the correct version numbers", (t) => { [UpdateActions.createEdge(2, 3, 4)], ], TIMESTAMP, + tracingId, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next([ + { + version: LAST_VERSION, + type: TRACING_TYPE, + tracingId, + }, + ]); + saga.next(annotationId); saga.next(TRACINGSTORE_URL); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setVersionNumberAction(LAST_VERSION + 3, TRACING_TYPE, tracingId)), - ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setLastSaveTimestampAction(TRACING_TYPE, tracingId)), - ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.shiftSaveQueueAction(3, TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setVersionNumberAction(LAST_VERSION + 3))); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); + expectValueDeepEqual(t, saga.next(), put(SaveActions.shiftSaveQueueAction(3))); }); test("SaveSaga should set the correct version numbers if the save queue was compacted", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.updateSkeletonTracing(initialState, [1, 2, 3], [0, 0, 1], 1)], - [UpdateActions.updateSkeletonTracing(initialState, [2, 3, 4], [0, 0, 1], 2)], - [UpdateActions.updateSkeletonTracing(initialState, [3, 4, 5], [0, 0, 1], 3)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [1, 2, 3], [], [0, 0, 1], 1)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [2, 3, 4], [], [0, 0, 1], 2)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [3, 4, 5], [], [0, 0, 1], 3)], ], TIMESTAMP, + tracingId, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next([ + { + version: LAST_VERSION, + type: TRACING_TYPE, + tracingId, + }, + ]); + saga.next(annotationId); saga.next(TRACINGSTORE_URL); // two of the updateTracing update actions are removed by compactSaveQueue - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setVersionNumberAction(LAST_VERSION + 1, TRACING_TYPE, tracingId)), - ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setLastSaveTimestampAction(TRACING_TYPE, tracingId)), - ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.shiftSaveQueueAction(3, TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setVersionNumberAction(LAST_VERSION + 1))); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); + expectValueDeepEqual(t, saga.next(), put(SaveActions.shiftSaveQueueAction(3))); }); test("SaveSaga addVersionNumbers should set the correct version numbers", (t) => { const saveQueue = createSaveQueueFromUpdateActions( @@ -357,6 +370,7 @@ test("SaveSaga addVersionNumbers should set the correct version numbers", (t) => ], TIMESTAMP, + tracingId, ); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 3); diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index d6c9490cc8b..1e1011bba7c 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -27,6 +27,8 @@ import { TreeTypeEnum } from "oxalis/constants"; import type { Action } from "oxalis/model/actions/actions"; import type { ServerSkeletonTracing } from "types/api_flow_types"; import { enforceSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; +import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; const TIMESTAMP = 1494347146379; const DateMock = { @@ -77,15 +79,39 @@ function testDiffing( ); } +// TODOM +// biome-ignore lint/correctness/noUnusedVariables: function compactSaveQueueWithUpdateActions( queue: Array, tracing: SkeletonTracing, ): Array { return compactSaveQueue( + // todop + // Do we really need compactSaveQueueWithUpdateActions? actually, compactUpdateActions + // is never called with a save queue in prod (instead, the function is called before + // filling the save queue). one could probably combine compactUpdateActions and + // createSaveQueueFromUpdateActions to have a createCompactedSaveQueueFromUpdateActions + // helper function and use that in this spec. queue.map((batch) => ({ ...batch, actions: compactUpdateActions(batch.actions, tracing) })), ); } +function createCompactedSaveQueueFromUpdateActions( + updateActions: UpdateAction[][], + timestamp: number, + tracing: SkeletonTracing, + stats: TracingStats | null = null, +) { + return compactSaveQueue( + createSaveQueueFromUpdateActions( + updateActions.map((batch) => compactUpdateActions(batch, tracing)), + timestamp, + tracing.tracingId, + stats, + ), + ); +} + const skeletonTracing: SkeletonTracing = { type: "skeleton", createdTimestamp: 0, @@ -180,7 +206,6 @@ test("SkeletonTracingSaga shouldn't do anything if unchanged (saga test)", (t) = const saga = setupSavingForTracingType( SkeletonTracingActions.initializeSkeletonTracingAction(serverSkeletonTracing), ); - saga.next(); // forking pushSaveQueueAsync saga.next(); saga.next(initialState.tracing.skeleton); @@ -200,7 +225,6 @@ test("SkeletonTracingSaga should do something if changed (saga test)", (t) => { const saga = setupSavingForTracingType( SkeletonTracingActions.initializeSkeletonTracingAction(serverSkeletonTracing), ); - saga.next(); // forking pushSaveQueueAsync saga.next(); saga.next(initialState.tracing.skeleton); @@ -635,16 +659,18 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); + const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; // This should result in a moved treeComponent of size three t.deepEqual(simplifiedFirstBatch[0], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [1, 2, 3], @@ -654,6 +680,7 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 1, }, }); @@ -661,6 +688,7 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 4, target: 1, @@ -695,16 +723,18 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { testDiffing(newState1.tracing, newState2.tracing, newState1.flycam, newState2.flycam), ); // compactUpdateActions is triggered by the saving, it can therefore contain the results of more than one diffing - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState2.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + updateActions, + TIMESTAMP, + skeletonTracing, ); + // This should result in one created node and its edge (a) const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.like(simplifiedFirstBatch[0], { name: "createNode", value: { + actionTracingId: "tracingId", id: 5, treeId: 2, }, @@ -712,6 +742,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.like(simplifiedFirstBatch[1], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 4, target: 5, @@ -723,6 +754,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.deepEqual(simplifiedSecondBatch[0], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [1, 2, 3], @@ -732,6 +764,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.deepEqual(simplifiedSecondBatch[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 1, }, }); @@ -742,6 +775,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.deepEqual(simplifiedSecondBatch[4], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 5, target: 1, @@ -797,16 +831,17 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { ), ); // compactUpdateActions is triggered by the saving, it can therefore contain the results of more than one diffing - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + updateActions, + TIMESTAMP, + skeletonTracing, ); // This should result in a moved treeComponent of size one (a) const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.deepEqual(simplifiedFirstBatch[0], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 2, targetId: 1, nodeIds: [4], @@ -816,6 +851,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 2, }, }); @@ -823,6 +859,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 1, source: 1, target: 4, @@ -841,6 +878,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedThirdBatch[0], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 2, targetId: 1, nodeIds: [5, 6], @@ -849,12 +887,14 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedThirdBatch[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 2, }, }); t.deepEqual(simplifiedThirdBatch[2], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 1, source: 1, target: 6, @@ -879,16 +919,19 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); + // This should result in a new tree const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.like(simplifiedFirstBatch[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 2, }, }); @@ -896,6 +939,7 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [3, 4], @@ -905,6 +949,7 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 1, }, @@ -937,22 +982,24 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); // This should result in two new trees and two moved treeComponents of size three and two const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.like(simplifiedFirstBatch[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 2, }, }); t.deepEqual(simplifiedFirstBatch[1], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [3, 4], @@ -961,12 +1008,14 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { t.like(simplifiedFirstBatch[2], { name: "createTree", value: { + actionTracingId: "tracingId", id: 3, }, }); t.deepEqual(simplifiedFirstBatch[3], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 3, nodeIds: [5, 6, 7], @@ -976,6 +1025,7 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { t.deepEqual(simplifiedFirstBatch[4], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 1, }, @@ -1009,16 +1059,17 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { updateActions.push( testDiffing(newState1.tracing, newState2.tracing, newState1.flycam, newState2.flycam), ); - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState2.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + updateActions, + TIMESTAMP, + skeletonTracing, ); // This should result in the creation of a new tree (a) const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.like(simplifiedFirstBatch[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 2, }, }); @@ -1026,6 +1077,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [3, 4, 5, 6], @@ -1035,6 +1087,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 1, }, @@ -1047,6 +1100,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.like(simplifiedSecondBatch[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 3, }, }); @@ -1054,6 +1108,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedSecondBatch[1], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 2, targetId: 3, nodeIds: [5, 6], @@ -1063,6 +1118,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedSecondBatch[2], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 4, treeId: 2, }, @@ -1096,17 +1152,22 @@ test("compactUpdateActions should do nothing if it cannot compact", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const saveQueueOriginal = createSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing.tracingId, + ); + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); // The deleteTree optimization in compactUpdateActions (that is unrelated to this test) // will remove the first deleteNode update action as the first tree is deleted because of the merge, // therefore remove it here as well - saveQueue[0].actions.shift(); + saveQueueOriginal[0].actions.shift(); // Nothing should be changed as the moveTreeComponent update action cannot be inserted - t.deepEqual(simplifiedUpdateActions, saveQueue); + t.deepEqual(simplifiedUpdateActions, saveQueueOriginal); }); test("compactUpdateActions should detect a deleted tree", (t) => { const testState = ChainReducer(initialState) @@ -1125,15 +1186,16 @@ test("compactUpdateActions should detect a deleted tree", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.deepEqual(simplifiedFirstBatch[0], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 2, }, }); @@ -1157,15 +1219,16 @@ test("compactUpdateActions should not detect a deleted tree if there is no delet testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.deepEqual(simplifiedFirstBatch[0], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 2, }, @@ -1173,6 +1236,7 @@ test("compactUpdateActions should not detect a deleted tree if there is no delet t.deepEqual(simplifiedFirstBatch[1], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 3, treeId: 2, }, diff --git a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts index d7b16773e50..f30a50ea181 100644 --- a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts @@ -144,7 +144,6 @@ test("VolumeTracingSaga shouldn't do anything if unchanged (saga test)", (t) => const saga = setupSavingForTracingType( VolumeTracingActions.initializeVolumeTracingAction(serverVolumeTracing), ); - saga.next(); // forking pushSaveQueueAsync saga.next(); saga.next(initialState.tracing.volumes[0]); @@ -165,7 +164,6 @@ test("VolumeTracingSaga should do something if changed (saga test)", (t) => { const saga = setupSavingForTracingType( VolumeTracingActions.initializeVolumeTracingAction(serverVolumeTracing), ); - saga.next(); // forking pushSaveQueueAsync saga.next(); saga.next(initialState.tracing.volumes[0]); diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 85bf5537ee7..b2c564e91b1 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -385,6 +385,10 @@ export enum TracingTypeEnum { volume = "volume", hybrid = "hybrid", } +export enum AnnotationLayerType { + Skeleton = "Skeleton", + Volume = "Volume", +} export type TracingType = keyof typeof TracingTypeEnum; export type APITaskType = { readonly id: string; @@ -467,12 +471,12 @@ export type APITask = { export type AnnotationLayerDescriptor = { name: string; tracingId: string; - typ: "Skeleton" | "Volume"; + typ: AnnotationLayerType; stats: TracingStats | EmptyObject; }; -export type EditableLayerProperties = Partial<{ +export type EditableLayerProperties = { name: string; -}>; +}; export type APIAnnotationInfo = { readonly annotationLayers: Array; readonly dataSetName: string; @@ -551,6 +555,7 @@ type APIAnnotationBase = APIAnnotationInfo & { readonly owner?: APIUserBase; // This `user` attribute is deprecated and should not be used, anymore. It only exists to satisfy e2e type checks readonly user?: APIUserBase; + readonly version: number; readonly contributors: APIUserBase[]; readonly othersMayEdit: boolean; }; @@ -570,6 +575,19 @@ export type APITimeTrackingPerAnnotation = { timeMillis: number; annotationLayerStats: Array; }; +type APITracingStoreAnnotationLayer = { + tracingId: string; + name: string; + type: AnnotationLayerType; +}; + +export type APITracingStoreAnnotation = { + name: string; + description: string; + version: number; + annotationLayers: APITracingStoreAnnotationLayer[]; +}; + export type APITimeTrackingPerUser = { user: APIUserCompact & { email: string; @@ -870,8 +888,6 @@ export type ServerVolumeTracing = ServerTracingBase & { export type ServerTracing = ServerSkeletonTracing | ServerVolumeTracing; export type ServerEditableMapping = { createdTimestamp: number; - version: number; - mappingName: string; baseMappingName: string; // The id of the volume tracing the editable mapping belongs to tracingId: string; diff --git a/test/backend/Dummies.scala b/test/backend/Dummies.scala index 8aaf33e4b0f..a66085396b7 100644 --- a/test/backend/Dummies.scala +++ b/test/backend/Dummies.scala @@ -61,6 +61,8 @@ object Dummies { Some(true)) val treeGroup2: TreeGroup = TreeGroup("Axon 2", 2, Seq.empty, Some(true)) + val tracingId: String = "dummyTracingId" + val skeletonTracing: SkeletonTracing = SkeletonTracing( "dummy_dataset", Seq(tree1, tree2), diff --git a/test/backend/SkeletonUpdateActionsUnitTestSuite.scala b/test/backend/SkeletonUpdateActionsUnitTestSuite.scala index 8d12fe3331d..7c6b191b9f6 100644 --- a/test/backend/SkeletonUpdateActionsUnitTestSuite.scala +++ b/test/backend/SkeletonUpdateActionsUnitTestSuite.scala @@ -9,7 +9,7 @@ import org.scalatestplus.play._ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { - private def applyUpdateAction(action: UpdateAction.SkeletonUpdateAction): SkeletonTracing = + private def applyUpdateAction(action: SkeletonUpdateAction): SkeletonTracing = action.applyOn(Dummies.skeletonTracing) def listConsistsOfLists[T](joinedList: Seq[T], sublist1: Seq[T], sublist2: Seq[T]): Boolean = @@ -30,7 +30,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { comments = List[UpdateActionComment](), groupId = None, isVisible = Option(true), - edgesAreVisible = Option(true) + edgesAreVisible = Option(true), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(createTreeAction) @@ -47,7 +48,7 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "DeleteTreeSkeletonAction" should { "delete the specified tree" in { - val deleteTreeAction = new DeleteTreeSkeletonAction(id = 1) + val deleteTreeAction = new DeleteTreeSkeletonAction(id = 1, actionTracingId = Dummies.tracingId) val result = applyUpdateAction(deleteTreeAction) assert(result.trees.length == Dummies.skeletonTracing.trees.length - 1) @@ -70,7 +71,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { groupId = None, metadata = Some( List(MetadataEntry("myKey", numberValue = Some(5.0)), - MetadataEntry("anotherKey", stringListValue = Some(Seq("hello", "there"))))) + MetadataEntry("anotherKey", stringListValue = Some(Seq("hello", "there"))))), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateTreeAction) @@ -88,7 +90,7 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "MergeTreeSkeletonAction" should { "merge the specified trees" in { - val mergeTreeAction = new MergeTreeSkeletonAction(sourceId = 1, targetId = 2) + val mergeTreeAction = new MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId) val sourceTree = Dummies.tree1 val targetTree = Dummies.tree2 val result = applyUpdateAction(mergeTreeAction) @@ -109,7 +111,10 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "MoveTreeComponentSkeletonAction" should { "move the specified (separate) nodes" in { val moveTreeComponentSkeletonAction = - new MoveTreeComponentSkeletonAction(Dummies.comp1Nodes.map(_.id).toList, sourceId = 3, targetId = 4) + new MoveTreeComponentSkeletonAction(Dummies.comp1Nodes.map(_.id).toList, + sourceId = 3, + targetId = 4, + actionTracingId = Dummies.tracingId) val result = moveTreeComponentSkeletonAction.applyOn(Dummies.componentSkeletonTracing) assert(result.trees.length == Dummies.componentSkeletonTracing.trees.length) @@ -127,7 +132,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "CreateEdgeSkeletonAction" should { "create a new edge in the right tree" in { - val createEdgeSkeletonAction = new CreateEdgeSkeletonAction(source = 1, target = 7, treeId = 1) + val createEdgeSkeletonAction = + new CreateEdgeSkeletonAction(source = 1, target = 7, treeId = 1, actionTracingId = Dummies.tracingId) val result = applyUpdateAction(createEdgeSkeletonAction) assert(result.trees.length == Dummies.skeletonTracing.trees.length) @@ -140,8 +146,10 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "DeleteEdgeSkeletonAction" should { "undo CreateEdgeSkeletonAction" in { - val createEdgeSkeletonAction = new CreateEdgeSkeletonAction(source = 0, target = 7, treeId = 1) - val deleteEdgeSkeletonAction = new DeleteEdgeSkeletonAction(source = 0, target = 7, treeId = 1) + val createEdgeSkeletonAction = + new CreateEdgeSkeletonAction(source = 0, target = 7, treeId = 1, actionTracingId = Dummies.tracingId) + val deleteEdgeSkeletonAction = + new DeleteEdgeSkeletonAction(source = 0, target = 7, treeId = 1, actionTracingId = Dummies.tracingId) val result = deleteEdgeSkeletonAction.applyOn(createEdgeSkeletonAction.applyOn(Dummies.skeletonTracing)) assert(result == Dummies.skeletonTracing) } @@ -161,7 +169,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { Option(newNode.interpolation), treeId = 1, Dummies.timestamp, - None + None, + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(createNodeSkeletonAction) assert(result.trees.length == Dummies.skeletonTracing.trees.length) @@ -186,7 +195,7 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { Option(newNode.interpolation), treeId = 1, Dummies.timestamp, - None + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateNodeSkeletonAction) assert(result.trees.length == Dummies.skeletonTracing.trees.length) @@ -211,9 +220,10 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { Option(newNode.interpolation), treeId = 1, Dummies.timestamp, - None + actionTracingId = Dummies.tracingId ) - val deleteNodeSkeletonAction = new DeleteNodeSkeletonAction(newNode.id, treeId = 1) + val deleteNodeSkeletonAction = + new DeleteNodeSkeletonAction(newNode.id, treeId = 1, actionTracingId = Dummies.tracingId) val result = deleteNodeSkeletonAction.applyOn(createNodeSkeletonAction.applyOn(Dummies.skeletonTracing)) assert(result == Dummies.skeletonTracing) } @@ -223,7 +233,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "update a top level tree group" in { val updatedName = "Axon 2 updated" val updateTreeGroupsSkeletonAction = new UpdateTreeGroupsSkeletonAction( - List(UpdateActionTreeGroup(updatedName, 2, Some(true), List())) + List(UpdateActionTreeGroup(updatedName, 2, Some(true), List())), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateTreeGroupsSkeletonAction) assert(result.trees == Dummies.skeletonTracing.trees) @@ -238,7 +249,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { UpdateActionTreeGroup(updatedNameTop, 1, Some(true), - List(UpdateActionTreeGroup(updatedNameNested, 3, Some(false), List())))) + List(UpdateActionTreeGroup(updatedNameNested, 3, Some(false), List())))), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateTreeGroupsSkeletonAction) assert(result.trees == Dummies.skeletonTracing.trees) @@ -261,7 +273,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { editPosition, editRotation, zoomLevel, - userBoundingBox + userBoundingBox, + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateTreeGroupsSkeletonAction) assert(result.trees == Dummies.skeletonTracing.trees) diff --git a/test/backend/UpdateGroupHandlingUnitTestSuite.scala b/test/backend/UpdateGroupHandlingUnitTestSuite.scala new file mode 100644 index 00000000000..a2d27f7f00a --- /dev/null +++ b/test/backend/UpdateGroupHandlingUnitTestSuite.scala @@ -0,0 +1,39 @@ +package backend + +import com.scalableminds.webknossos.tracingstore.annotation.{RevertToVersionAnnotationAction, UpdateGroupHandling} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.MergeTreeSkeletonAction +import org.scalatestplus.play.PlaySpec + +class UpdateGroupHandlingUnitTestSuite extends PlaySpec with UpdateGroupHandling { + + "regroup" should { + "work" in { + val updateGroupsBefore = List( + (5L, + List( + MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 2, targetId = 3, actionTracingId = Dummies.tracingId) + )), + (6L, + List( + RevertToVersionAnnotationAction(sourceVersion = 1), + )), + (7L, + List( + MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 2, targetId = 3, actionTracingId = Dummies.tracingId) + )), + (8L, + List( + MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 2, targetId = 3, actionTracingId = Dummies.tracingId) + )), + ) + val res = regroupByIsolationSensitiveActions(updateGroupsBefore) + assert(res.length == 3) + assert(res(1)._2.length == 1) + assert(res(1)._1 == 6L) + } + } + +} diff --git a/test/backend/VolumeUpdateActionsUnitTestSuite.scala b/test/backend/VolumeUpdateActionsUnitTestSuite.scala index 91459fc614b..35dd3f9b0b4 100644 --- a/test/backend/VolumeUpdateActionsUnitTestSuite.scala +++ b/test/backend/VolumeUpdateActionsUnitTestSuite.scala @@ -3,8 +3,8 @@ package backend import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.tracingstore.tracings.UpdateAction import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + ApplyableVolumeUpdateAction, CreateSegmentVolumeAction, DeleteSegmentVolumeAction, UpdateActionSegmentGroup, @@ -15,7 +15,7 @@ import org.scalatestplus.play._ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplicits { - private def applyUpdateAction(action: UpdateAction.VolumeUpdateAction): VolumeTracing = + private def applyUpdateAction(action: ApplyableVolumeUpdateAction): VolumeTracing = action.applyOn(Dummies.volumeTracing) "CreateSegmentVolumeAction" should { @@ -26,7 +26,8 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic color = None, name = Some("aSegment"), groupId = Some(1), - creationTime = Some(Dummies.timestampLong) + creationTime = Some(Dummies.timestampLong), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(createSegmentAction) @@ -39,7 +40,7 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic "DeleteSegmentVolumeAction" should { "delete the specified segment" in { - val deleteSegmentAction = DeleteSegmentVolumeAction(id = 5) + val deleteSegmentAction = DeleteSegmentVolumeAction(id = 5, actionTracingId = Dummies.tracingId) val result = applyUpdateAction(deleteSegmentAction) assert(result.segments.length == Dummies.volumeTracing.segments.length - 1) @@ -58,7 +59,8 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic name = Some("aRenamedSegment"), color = None, creationTime = Some(Dummies.timestampLong), - groupId = None + groupId = None, + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateSegmentAction) @@ -76,7 +78,8 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic "update a top level segment group" in { val updatedName = "Segment Group 2 updated" val updateSegmentGroupsVolumeAction = new UpdateSegmentGroupsVolumeAction( - List(UpdateActionSegmentGroup(updatedName, 2, isExpanded = Some(true), List())) + List(UpdateActionSegmentGroup(updatedName, 2, isExpanded = Some(true), List())), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateSegmentGroupsVolumeAction) assert(result.segments == Dummies.volumeTracing.segments) @@ -87,7 +90,13 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic val updatedNameTop = "Segment Group 1 updated" val updatedNameNested = "Segment Group 3 updated" val updateSegmentGroupsVolumeAction = new UpdateSegmentGroupsVolumeAction( - List(UpdateActionSegmentGroup(updatedNameTop, 1, isExpanded = Some(true), List(UpdateActionSegmentGroup(updatedNameNested, 3, isExpanded = Some(false), List())))) + List( + UpdateActionSegmentGroup( + updatedNameTop, + 1, + isExpanded = Some(true), + List(UpdateActionSegmentGroup(updatedNameNested, 3, isExpanded = Some(false), List())))), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateSegmentGroupsVolumeAction) assert(result.segments == Dummies.volumeTracing.segments) diff --git a/tools/migrate-editable-mappings/SegmentToAgglomerateProto_pb2.py b/tools/migrate-editable-mappings/SegmentToAgglomerateProto_pb2.py index 91d2140c7b3..d7b553b51d0 100644 --- a/tools/migrate-editable-mappings/SegmentToAgglomerateProto_pb2.py +++ b/tools/migrate-editable-mappings/SegmentToAgglomerateProto_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: SegmentToAgglomerateProto.proto +# source: SegmentToAgglomerateChunkProto.proto """Generated protocol buffer code.""" from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor @@ -13,15 +13,15 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fSegmentToAgglomerateProto.proto\x12&com.scalableminds.webknossos.datastore\"B\n\x16SegmentAgglomeratePair\x12\x11\n\tsegmentId\x18\x01 \x02(\x03\x12\x15\n\ragglomerateId\x18\x02 \x02(\x03\"y\n\x19SegmentToAgglomerateProto\x12\\\n\x14segmentToAgglomerate\x18\x01 \x03(\x0b\x32>.com.scalableminds.webknossos.datastore.SegmentAgglomeratePair') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fSegmentToAgglomerateChunkProto.proto\x12&com.scalableminds.webknossos.datastore\"B\n\x16SegmentAgglomeratePair\x12\x11\n\tsegmentId\x18\x01 \x02(\x03\x12\x15\n\ragglomerateId\x18\x02 \x02(\x03\"y\n\x19SegmentToAgglomerateChunkProto\x12\\\n\x14segmentToAgglomerate\x18\x01 \x03(\x0b\x32>.com.scalableminds.webknossos.datastore.SegmentAgglomeratePair') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'SegmentToAgglomerateProto_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'SegmentToAgglomerateChunkProto_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None _SEGMENTAGGLOMERATEPAIR._serialized_start=75 _SEGMENTAGGLOMERATEPAIR._serialized_end=141 - _SEGMENTTOAGGLOMERATEPROTO._serialized_start=143 - _SEGMENTTOAGGLOMERATEPROTO._serialized_end=264 + _SegmentToAgglomerateChunkProto._serialized_start=143 + _SegmentToAgglomerateChunkProto._serialized_end=264 # @@protoc_insertion_point(module_scope) diff --git a/util/src/main/scala/collections/SequenceUtils.scala b/util/src/main/scala/collections/SequenceUtils.scala index 9d839141c7d..a584ae2923d 100644 --- a/util/src/main/scala/collections/SequenceUtils.scala +++ b/util/src/main/scala/collections/SequenceUtils.scala @@ -6,4 +6,33 @@ object SequenceUtils { if (uniqueElements.length == 1) uniqueElements.headOption else None } + + /* + Split a list into n parts, isolating the elements that satisfy the given predicate. + Those elements will be in single-item lists + Example: + splitAndIsolate(List(1,2,3,4,5,6,7))(i => i == 4) + → List(List(1, 2, 3), List(4), List(5, 6, 7)) + splitAndIsolate(List(1,2,3,4,5,6,7))(i => i % 3 == 0) + → List(List(1, 2), List(3), List(4, 5), List(6), List(7)) + splitAndIsolate(List(1,2,3,4,5,6,7))(i => i > 1000) # no matches → no splitting + → List(List(1, 2, 3, 4, 5, 6, 7)) + splitAndIsolate(List())(i => true) # empty list stays empty + → List() + */ + def splitAndIsolate[T](list: List[T])(predicate: T => Boolean): List[List[T]] = + list + .foldLeft(List[List[T]]()) { (acc, item) => + if (predicate(item)) { + List.empty :: List(item) :: acc + } else { + acc match { + case head :: tail => (item :: head) :: tail + case Nil => List(List(item)) + } + } + } + .reverse // we prepended on the outer list (for perf reasons) + .map(_.reverse) // we prepended on the inner lists (for perf reasons) + } diff --git a/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala b/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala new file mode 100644 index 00000000000..2a74b356bf4 --- /dev/null +++ b/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala @@ -0,0 +1,4 @@ +package com.scalableminds.util.accesscontext + +// to be used in datastore and tracingstore to hand around tokens that were supplied with the request +case class TokenContext(userTokenOpt: Option[String]) diff --git a/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala b/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala index af2a52e2db8..b5f05b3810b 100644 --- a/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala +++ b/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala @@ -1,6 +1,7 @@ package com.scalableminds.util.mvc import com.google.protobuf.CodedInputStream +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common._ @@ -235,8 +236,8 @@ trait ValidationHelpers { } trait RequestTokenHelper { - protected def urlOrHeaderToken(token: Option[String], request: Request[Any]): Option[String] = - token.orElse(request.headers.get("X-Auth-Token")) + implicit def tokenContextForRequest(implicit request: Request[Any]): TokenContext = + TokenContext(request.target.getQueryParameter("token").orElse(request.headers.get("X-Auth-Token"))) } trait ExtendedController diff --git a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala index 59af5b50d28..953ed2b7f5b 100644 --- a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala +++ b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala @@ -101,13 +101,14 @@ object Fox extends FoxImplicits { def sequence[T](l: List[Fox[T]])(implicit ec: ExecutionContext): Future[List[Box[T]]] = Future.sequence(l.map(_.futureBox)) - def combined[T](l: List[Fox[T]])(implicit ec: ExecutionContext): Fox[List[T]] = + def combined[T](l: Seq[Fox[T]])(implicit ec: ExecutionContext): Fox[List[T]] = Fox(Future.sequence(l.map(_.futureBox)).map { results => results.find(_.isEmpty) match { case Some(Empty) => Empty case Some(failure: Failure) => failure case _ => - Full(results.map(_.openOrThrowException("An exception should never be thrown, all boxes must be full"))) + Full( + results.map(_.openOrThrowException("An exception should never be thrown, all boxes must be full")).toList) } }) @@ -133,7 +134,7 @@ object Fox extends FoxImplicits { } // Run serially, fail on the first failure - def serialCombined[A, B](l: List[A])(f: A => Fox[B])(implicit ec: ExecutionContext): Fox[List[B]] = + def serialCombined[A, B](l: Iterable[A])(f: A => Fox[B])(implicit ec: ExecutionContext): Fox[List[B]] = serialCombined(l.iterator)(f) // Run serially, fail on the first failure diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala index 7b25237b40d..59632807752 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala @@ -48,13 +48,12 @@ class BinaryDataController @Inject()( val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService def requestViaWebknossos( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String ): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { logTime(slackNotificationService.noticeSlowRequest) { val t = Instant.now for { @@ -78,7 +77,6 @@ class BinaryDataController @Inject()( * Handles requests for raw binary data via HTTP GET. */ def requestRawCuboid( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, @@ -96,8 +94,8 @@ class BinaryDataController @Inject()( halfByte: Boolean, mappingName: Option[String] ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -116,13 +114,12 @@ class BinaryDataController @Inject()( } def requestRawCuboidPost( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String ): Action[RawCuboidRequest] = Action.async(validateJson[RawCuboidRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -135,8 +132,7 @@ class BinaryDataController @Inject()( /** * Handles a request for raw binary data via a HTTP GET. Used by knossos. */ - def requestViaKnossos(token: Option[String], - organizationId: String, + def requestViaKnossos(organizationId: String, datasetName: String, dataLayerName: String, mag: Int, @@ -144,8 +140,8 @@ class BinaryDataController @Inject()( y: Int, z: Int, cubeSize: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -161,8 +157,7 @@ class BinaryDataController @Inject()( } } - def thumbnailJpeg(token: Option[String], - organizationId: String, + def thumbnailJpeg(organizationId: String, datasetName: String, dataLayerName: String, x: Int, @@ -176,8 +171,8 @@ class BinaryDataController @Inject()( intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -218,14 +213,13 @@ class BinaryDataController @Inject()( } def mappingJson( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -240,13 +234,12 @@ class BinaryDataController @Inject()( /** * Handles ad-hoc mesh requests. */ - def requestAdHocMesh(token: Option[String], - organizationId: String, + def requestAdHocMesh(organizationId: String, datasetName: String, dataLayerName: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -282,13 +275,10 @@ class BinaryDataController @Inject()( private def formatNeighborList(neighbors: List[Int]): String = "[" + neighbors.mkString(", ") + "]" - def findData(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String): Action[AnyContent] = + def findData(organizationId: String, datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -298,13 +288,10 @@ class BinaryDataController @Inject()( } } - def histogram(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String): Action[AnyContent] = + def histogram(organizationId: String, datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala index f2f4d5921c0..eed1704178e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala @@ -23,34 +23,30 @@ class DSMeshController @Inject()( override def allowRemoteOrigin: Boolean = true - def listMeshFiles(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String): Action[AnyContent] = + def listMeshFiles(organizationId: String, datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFiles <- meshFileService.exploreMeshFiles(organizationId, datasetName, dataLayerName) } yield Ok(Json.toJson(meshFiles)) } } - def listMeshChunksForSegment(token: Option[String], - organizationId: String, + def listMeshChunksForSegment(organizationId: String, datasetName: String, dataLayerName: String, /* If targetMappingName is set, assume that meshfile contains meshes for - the oversegmentation. Collect mesh chunks of all *unmapped* segment ids - belonging to the supplied agglomerate id. - If it is not set, use meshfile as is, assume passed id is present in meshfile - Note: in case of an editable mapping, targetMappingName is its baseMapping name. + the oversegmentation. Collect mesh chunks of all *unmapped* segment ids + belonging to the supplied agglomerate id. + If it is not set, use meshfile as is, assume passed id is present in meshfile + Note: in case of an editable mapping, targetMappingName is its baseMapping name. */ targetMappingName: Option[String], editableMappingTracingId: Option[String]): Action[ListMeshChunksRequest] = Action.async(validateJson[ListMeshChunksRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { _ <- Fox.successful(()) mappingNameForMeshFile = meshFileService.mappingNameForMeshFile(organizationId, @@ -65,8 +61,7 @@ class DSMeshController @Inject()( editableMappingTracingId, request.body.segmentId, mappingNameForMeshFile, - omitMissing = false, - urlOrHeaderToken(token, request) + omitMissing = false ) chunkInfos <- meshFileService.listMeshChunksForSegmentsMerged(organizationId, datasetName, @@ -77,13 +72,12 @@ class DSMeshController @Inject()( } } - def readMeshChunk(token: Option[String], - organizationId: String, + def readMeshChunk(organizationId: String, datasetName: String, dataLayerName: String): Action[MeshChunkDataRequestList] = Action.async(validateJson[MeshChunkDataRequestList]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (data, encoding) <- meshFileService.readMeshChunk(organizationId, datasetName, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" } yield { @@ -94,19 +88,12 @@ class DSMeshController @Inject()( } } - def loadFullMeshStl(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String): Action[FullMeshRequest] = + def loadFullMeshStl(organizationId: String, datasetName: String, dataLayerName: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { - data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], - organizationId, - datasetName, - dataLayerName, - request.body) ?~> "mesh.file.loadChunk.failed" + data: Array[Byte] <- fullMeshService.loadFor(organizationId, datasetName, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" } yield Ok(data) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 25347e60f4e..a088e55bae8 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -66,12 +66,11 @@ class DataSourceController @Inject()( override def allowRemoteOrigin: Boolean = true - def readInboxDataSource(token: Option[String], organizationId: String, datasetName: String): Action[AnyContent] = + def readInboxDataSource(organizationId: String, datasetName: String): Action[AnyContent] = Action.async { implicit request => { - accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContextForSyncBlock( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { // Read directly from file, not from repository to ensure recent changes are seen val dataSource: InboxDataSource = dataSourceService.dataSourceFromDir( @@ -82,35 +81,33 @@ class DataSourceController @Inject()( } } - def triggerInboxCheckBlocking(token: Option[String]): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources, urlOrHeaderToken(token, request)) { + def triggerInboxCheckBlocking(): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { _ <- dataSourceService.checkInbox(verbose = true) } yield Ok } } - def reserveUpload(token: Option[String]): Action[ReserveUploadInformation] = + def reserveUpload(): Action[ReserveUploadInformation] = Action.async(validateJson[ReserveUploadInformation]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organization), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.administrateDataSources(request.body.organization)) { for { isKnownUpload <- uploadService.isKnownUpload(request.body.uploadId) _ <- if (!isKnownUpload) { - (remoteWebknossosClient.reserveDataSourceUpload(request.body, urlOrHeaderToken(token, request)) ?~> "dataset.upload.validation.failed") + (remoteWebknossosClient.reserveDataSourceUpload(request.body) ?~> "dataset.upload.validation.failed") .flatMap(_ => uploadService.reserveUpload(request.body)) } else Fox.successful(()) } yield Ok } } - def getUnfinishedUploads(token: Option[String], organizationName: String): Action[AnyContent] = + def getUnfinishedUploads(organizationName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationName), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationName)) { for { - unfinishedUploads <- remoteWebknossosClient.getUnfinishedUploadsForUser(urlOrHeaderToken(token, request), - organizationName) + unfinishedUploads <- remoteWebknossosClient.getUnfinishedUploadsForUser(organizationName) unfinishedUploadsWithUploadIds <- uploadService.addUploadIdsToUnfinishedUploads(unfinishedUploads) } yield Ok(Json.toJson(unfinishedUploadsWithUploadIds)) } @@ -118,10 +115,10 @@ class DataSourceController @Inject()( // To be called by people with disk access but not DatasetManager role. This way, they can upload a dataset manually on disk, // and it can be put in a webknossos folder where they have access - def reserveManualUpload(token: Option[String]): Action[ReserveManualUploadInformation] = + def reserveManualUpload(): Action[ReserveManualUploadInformation] = Action.async(validateJson[ReserveManualUploadInformation]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organization), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.administrateDataSources(request.body.organization)) { for { _ <- remoteWebknossosClient.reserveDataSourceUpload( ReserveUploadInformation( @@ -133,8 +130,7 @@ class DataSourceController @Inject()( None, request.body.initialTeamIds, request.body.folderId - ), - urlOrHeaderToken(token, request) + ) ) ?~> "dataset.upload.validation.failed" } yield Ok } @@ -154,7 +150,7 @@ class DataSourceController @Inject()( - As GET parameter: - token (string): datastore token identifying the uploading user */ - def uploadChunk(token: Option[String]): Action[MultipartFormData[Files.TemporaryFile]] = + def uploadChunk(): Action[MultipartFormData[Files.TemporaryFile]] = Action.async(parse.multipartFormData) { implicit request => val uploadForm = Form( tuple( @@ -173,8 +169,8 @@ class DataSourceController @Inject()( for { dataSourceId <- uploadService.getDataSourceIdByUploadId( uploadService.extractDatasetUploadId(uploadFileId)) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + result <- accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.writeDataSource(dataSourceId)) { for { isKnownUpload <- uploadService.isKnownUploadByFileId(uploadFileId) _ <- bool2Fox(isKnownUpload) ?~> "dataset.upload.validation.failed" @@ -191,13 +187,12 @@ class DataSourceController @Inject()( ) } - def testChunk(token: Option[String], resumableChunkNumber: Int, resumableIdentifier: String): Action[AnyContent] = + def testChunk(resumableChunkNumber: Int, resumableIdentifier: String): Action[AnyContent] = Action.async { implicit request => for { dataSourceId <- uploadService.getDataSourceIdByUploadId( uploadService.extractDatasetUploadId(resumableIdentifier)) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + result <- accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataSource(dataSourceId)) { for { isKnownUpload <- uploadService.isKnownUploadByFileId(resumableIdentifier) _ <- bool2Fox(isKnownUpload) ?~> "dataset.upload.validation.failed" @@ -207,38 +202,33 @@ class DataSourceController @Inject()( } yield result } - def finishUpload(token: Option[String]): Action[UploadInformation] = Action.async(validateJson[UploadInformation]) { - implicit request => - log() { - for { - dataSourceId <- uploadService - .getDataSourceIdByUploadId(request.body.uploadId) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { - for { - (dataSourceId, datasetSizeBytes) <- uploadService - .finishUpload(request.body) ?~> "dataset.upload.finishFailed" - _ <- remoteWebknossosClient.reportUpload( - dataSourceId, - datasetSizeBytes, - request.body.needsConversion.getOrElse(false), - viaAddRoute = false, - userToken = urlOrHeaderToken(token, request)) ?~> "reportUpload.failed" - } yield Ok - } - } yield result - } + def finishUpload(): Action[UploadInformation] = Action.async(validateJson[UploadInformation]) { implicit request => + log() { + for { + dataSourceId <- uploadService + .getDataSourceIdByUploadId(request.body.uploadId) ?~> "dataset.upload.validation.failed" + result <- accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataSource(dataSourceId)) { + for { + (dataSourceId, datasetSizeBytes) <- uploadService + .finishUpload(request.body) ?~> "dataset.upload.finishFailed" + _ <- remoteWebknossosClient.reportUpload(dataSourceId, + datasetSizeBytes, + request.body.needsConversion.getOrElse(false), + viaAddRoute = false) ?~> "reportUpload.failed" + } yield Ok + } + } yield result + } } - def cancelUpload(token: Option[String]): Action[CancelUploadInformation] = + def cancelUpload(): Action[CancelUploadInformation] = Action.async(validateJson[CancelUploadInformation]) { implicit request => val dataSourceIdFox = uploadService.isKnownUpload(request.body.uploadId).flatMap { case false => Fox.failure("dataset.upload.validation.failed") case true => uploadService.getDataSourceIdByUploadId(request.body.uploadId) } dataSourceIdFox.flatMap { dataSourceId => - accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.deleteDataSource(dataSourceId)) { for { _ <- remoteWebknossosClient.deleteDataSource(dataSourceId) ?~> "dataset.delete.webknossos.failed" _ <- uploadService.cancelUpload(request.body) ?~> "Could not cancel the upload." @@ -248,27 +238,24 @@ class DataSourceController @Inject()( } def listMappings( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContextForSyncBlock( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { addNoCacheHeaderFallback( Ok(Json.toJson(dataSourceService.exploreMappings(organizationId, datasetName, dataLayerName)))) } } def listAgglomerates( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateList = agglomerateService.exploreAgglomerates(organizationId, datasetName, dataLayerName) @@ -277,15 +264,14 @@ class DataSourceController @Inject()( } def generateAgglomerateSkeleton( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox skeleton <- agglomerateService.generateSkeleton(organizationId, @@ -298,15 +284,14 @@ class DataSourceController @Inject()( } def agglomerateGraph( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateGraph <- agglomerateService.generateAgglomerateGraph( @@ -317,15 +302,14 @@ class DataSourceController @Inject()( } def positionForSegmentViaAgglomerateFile( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, segmentId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox position <- agglomerateService.positionForSegmentId( @@ -336,14 +320,13 @@ class DataSourceController @Inject()( } def largestAgglomerateId( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox largestAgglomerateId: Long <- agglomerateService @@ -361,14 +344,13 @@ class DataSourceController @Inject()( } def agglomerateIdsForSegmentIds( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateIds: Seq[Long] <- agglomerateService @@ -387,14 +369,13 @@ class DataSourceController @Inject()( } def agglomerateIdsForAllSegmentIds( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateIds: Array[Long] <- agglomerateService @@ -411,10 +392,10 @@ class DataSourceController @Inject()( } } - def update(token: Option[String], organizationId: String, datasetName: String): Action[DataSource] = + def update(organizationId: String, datasetName: String): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.writeDataSource(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.writeDataSource(DataSourceId(datasetName, organizationId))) { for { _ <- Fox.successful(()) dataSource <- dataSourceRepository.find(DataSourceId(datasetName, organizationId)).toFox ?~> Messages( @@ -425,12 +406,9 @@ class DataSourceController @Inject()( } // Stores a remote dataset in the database. - def add(token: Option[String], - organizationId: String, - datasetName: String, - folderId: Option[String]): Action[DataSource] = + def add(organizationId: String, datasetName: String, folderId: Option[String]): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { _ <- bool2Fox(dataSourceRepository.find(DataSourceId(datasetName, organizationId)).isEmpty) ?~> Messages( "dataSource.alreadyPresent") @@ -444,40 +422,34 @@ class DataSourceController @Inject()( layersToLink = None, initialTeams = List.empty, folderId = folderId, - ), - urlOrHeaderToken(token, request) + ) ) ?~> "dataset.upload.validation.failed" _ <- dataSourceService.updateDataSource(request.body.copy(id = DataSourceId(datasetName, organizationId)), expectExisting = false) - _ <- remoteWebknossosClient.reportUpload( - DataSourceId(datasetName, organizationId), - 0L, - needsConversion = false, - viaAddRoute = true, - userToken = urlOrHeaderToken(token, request)) ?~> "reportUpload.failed" + _ <- remoteWebknossosClient.reportUpload(DataSourceId(datasetName, organizationId), + 0L, + needsConversion = false, + viaAddRoute = true) ?~> "reportUpload.failed" } yield Ok } } - def createOrganizationDirectory(token: Option[String], organizationId: String): Action[AnyContent] = Action.async { - implicit request => - accessTokenService.validateAccessForSyncBlock(UserAccessRequest.administrateDataSources(organizationId), token) { - val newOrganizationDirectory = new File(f"${dataSourceService.dataBaseDir}/$organizationId") - newOrganizationDirectory.mkdirs() - if (newOrganizationDirectory.isDirectory) - Ok - else - BadRequest - } + def createOrganizationDirectory(organizationId: String): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContextForSyncBlock( + UserAccessRequest.administrateDataSources(organizationId)) { + val newOrganizationDirectory = new File(f"${dataSourceService.dataBaseDir}/$organizationId") + newOrganizationDirectory.mkdirs() + if (newOrganizationDirectory.isDirectory) + Ok + else + BadRequest + } } - def measureUsedStorage(token: Option[String], - organizationId: String, - datasetName: Option[String] = None): Action[AnyContent] = + def measureUsedStorage(organizationId: String, datasetName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationId)) { for { before <- Fox.successful(System.currentTimeMillis()) usedStorageInBytes: List[DirectoryStorageReport] <- storageUsageService.measureStorage(organizationId, @@ -492,13 +464,9 @@ class DataSourceController @Inject()( } } - def reload(token: Option[String], - organizationId: String, - datasetName: String, - layerName: Option[String] = None): Action[AnyContent] = + def reload(organizationId: String, datasetName: String, layerName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationId)) { val (closedAgglomerateFileHandleCount, clearedBucketProviderCount, removedChunksCount) = binaryDataServiceHolder.binaryDataService.clearCache(organizationId, datasetName, layerName) val closedMeshFileHandleCount = meshFileService.clearCache(organizationId, datasetName, layerName) @@ -519,11 +487,10 @@ class DataSourceController @Inject()( } } - def deleteOnDisk(token: Option[String], organizationId: String, datasetName: String): Action[AnyContent] = + def deleteOnDisk(organizationId: String, datasetName: String): Action[AnyContent] = Action.async { implicit request => val dataSourceId = DataSourceId(datasetName, organizationId) - accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.deleteDataSource(dataSourceId)) { for { _ <- binaryDataServiceHolder.binaryDataService.deleteOnDisk( organizationId, @@ -534,29 +501,24 @@ class DataSourceController @Inject()( } } - def compose(token: Option[String]): Action[ComposeRequest] = + def compose(): Action[ComposeRequest] = Action.async(validateJson[ComposeRequest]) { implicit request => - val userToken = urlOrHeaderToken(token, request) - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationId), token) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.administrateDataSources(request.body.organizationId)) { for { - _ <- Fox.serialCombined(request.body.layers.map(_.datasetId).toList)( - id => - accessTokenService.assertUserAccess( - UserAccessRequest.readDataSources(DataSourceId(id.name, id.owningOrganization)), - userToken)) - dataSource <- composeService.composeDataset(request.body, userToken) + _ <- Fox.serialCombined(request.body.layers.map(_.datasetId).toList)(id => + accessTokenService.assertUserAccess( + UserAccessRequest.readDataSources(DataSourceId(id.name, id.owningOrganization)))) + dataSource <- composeService.composeDataset(request.body) _ <- dataSourceRepository.updateDataSource(dataSource) } yield Ok } } - def listConnectomeFiles(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String): Action[AnyContent] = + def listConnectomeFiles(organizationId: String, datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { val connectomeFileNames = connectomeFileService.exploreConnectomeFiles(organizationId, datasetName, dataLayerName) for { @@ -572,13 +534,12 @@ class DataSourceController @Inject()( } } - def getSynapsesForAgglomerates(token: Option[String], - organizationId: String, + def getSynapsesForAgglomerates(organizationId: String, datasetName: String, dataLayerName: String): Action[ByAgglomerateIdsRequest] = Action.async(validateJson[ByAgglomerateIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -588,14 +549,13 @@ class DataSourceController @Inject()( } } - def getSynapticPartnerForSynapses(token: Option[String], - organizationId: String, + def getSynapticPartnerForSynapses(organizationId: String, datasetName: String, dataLayerName: String, direction: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -607,13 +567,12 @@ class DataSourceController @Inject()( } } - def getSynapsePositions(token: Option[String], - organizationId: String, + def getSynapsePositions(organizationId: String, datasetName: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -623,13 +582,10 @@ class DataSourceController @Inject()( } } - def getSynapseTypes(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String): Action[BySynapseIdsRequest] = + def getSynapseTypes(organizationId: String, datasetName: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -639,13 +595,10 @@ class DataSourceController @Inject()( } } - def checkSegmentIndexFile(token: Option[String], - organizationId: String, - dataSetName: String, - dataLayerName: String): Action[AnyContent] = + def checkSegmentIndexFile(organizationId: String, dataSetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(dataSetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(dataSetName, organizationId))) { val segmentIndexFileOpt = segmentIndexFileService.getSegmentIndexFile(organizationId, dataSetName, dataLayerName).toOption Future.successful(Ok(Json.toJson(segmentIndexFileOpt.isDefined))) @@ -656,14 +609,13 @@ class DataSourceController @Inject()( * Query the segment index file for a single segment * @return List of bucketPositions as positions (not indices) of 32³ buckets in mag */ - def getSegmentIndex(token: Option[String], - organizationId: String, + def getSegmentIndex(organizationId: String, datasetName: String, dataLayerName: String, segmentId: String): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { segmentIds <- segmentIdsForAgglomerateIdIfNeeded( organizationId, @@ -673,8 +625,7 @@ class DataSourceController @Inject()( request.body.editableMappingTracingId, segmentId.toLong, mappingNameForMeshFile = None, - omitMissing = false, - urlOrHeaderToken(token, request) + omitMissing = false ) fileMag <- segmentIndexFileService.readFileMag(organizationId, datasetName, dataLayerName) topLeftsNested: Seq[Array[Vec3Int]] <- Fox.serialCombined(segmentIds)(sId => @@ -696,13 +647,12 @@ class DataSourceController @Inject()( * Query the segment index file for multiple segments * @return List of bucketPositions as indices of 32³ buckets */ - def querySegmentIndex(token: Option[String], - organizationId: String, + def querySegmentIndex(organizationId: String, datasetName: String, dataLayerName: String): Action[GetMultipleSegmentIndexParameters] = Action.async(validateJson[GetMultipleSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { segmentIdsAndBucketPositions <- Fox.serialCombined(request.body.segmentIds) { segmentOrAgglomerateId => for { @@ -714,8 +664,7 @@ class DataSourceController @Inject()( request.body.editableMappingTracingId, segmentOrAgglomerateId, mappingNameForMeshFile = None, - omitMissing = true, // assume agglomerate ids not present in the mapping belong to user-brushed segments - urlOrHeaderToken(token, request) + omitMissing = true // assume agglomerate ids not present in the mapping belong to user-brushed segments ) fileMag <- segmentIndexFileService.readFileMag(organizationId, datasetName, dataLayerName) topLeftsNested: Seq[Array[Vec3Int]] <- Fox.serialCombined(segmentIds)(sId => @@ -730,13 +679,12 @@ class DataSourceController @Inject()( } } - def getSegmentVolume(token: Option[String], - organizationId: String, + def getSegmentVolume(organizationId: String, datasetName: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetName, dataLayerName) volumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => @@ -753,13 +701,12 @@ class DataSourceController @Inject()( } } - def getSegmentBoundingBox(token: Option[String], - organizationId: String, + def getSegmentBoundingBox(organizationId: String, datasetName: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetName, dataLayerName) boxes <- Fox.serialCombined(request.body.segmentIds) { segmentId => @@ -775,9 +722,10 @@ class DataSourceController @Inject()( } // Called directly by wk side - def exploreRemoteDataset(token: Option[String]): Action[ExploreRemoteDatasetRequest] = + def exploreRemoteDataset(): Action[ExploreRemoteDatasetRequest] = Action.async(validateJson[ExploreRemoteDatasetRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationId), token) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.administrateDataSources(request.body.organizationId)) { val reportMutable = ListBuffer[String]() val hasLocalFilesystemRequest = request.body.layerParameters.exists(param => new URI(param.remoteUri).getScheme == DataVaultService.schemeFile) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala index f4777fdb4b9..e801528a4b3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala @@ -35,8 +35,8 @@ class ExportsController @Inject()(webknossosClient: DSRemoteWebknossosClient, override def allowRemoteOrigin: Boolean = true - def download(token: Option[String], jobId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.downloadJobExport(jobId), urlOrHeaderToken(token, request)) { + def download(jobId: String): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.downloadJobExport(jobId)) { for { exportProperties <- webknossosClient.getJobExportProperties(jobId) fullPath = exportProperties.fullPathIn(dataBaseDir) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index f52cc9047ff..55ef4f56d1a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator @@ -50,13 +51,12 @@ class ZarrStreamingController @Inject()( * Uses the OME-NGFF standard (see https://ngff.openmicroscopy.org/latest/) */ def requestZAttrs( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -68,13 +68,12 @@ class ZarrStreamingController @Inject()( } def requestZarrJson( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -89,17 +88,14 @@ class ZarrStreamingController @Inject()( } } - def zAttrsWithAnnotationPrivateLink(token: Option[String], - accessToken: String, - dataLayerName: String = ""): Action[AnyContent] = + def zAttrsWithAnnotationPrivateLink(accessToken: String, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => { + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => { remoteTracingstoreClient - .getOmeNgffHeader(annotationLayer.tracingId, annotationSource.tracingStoreUrl, relevantToken) + .getOmeNgffHeader(annotationLayer.tracingId, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(ngffMetadata => Ok(Json.toJson(ngffMetadata))) }, orElse = annotationSource => @@ -115,17 +111,15 @@ class ZarrStreamingController @Inject()( ) } - def zarrJsonWithAnnotationPrivateLink(token: Option[String], - accessToken: String, - dataLayerName: String = ""): Action[AnyContent] = + def zarrJsonWithAnnotationPrivateLink(accessToken: String, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => { + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => { remoteTracingstoreClient - .getZarrJsonGroupHeaderWithNgff(annotationLayer.tracingId, annotationSource.tracingStoreUrl, relevantToken) + .getZarrJsonGroupHeaderWithNgff(annotationLayer.tracingId, annotationSource.tracingStoreUrl)( + relevantTokenContext) .map(header => Ok(Json.toJson(header))) }, orElse = annotationSource => @@ -148,13 +142,12 @@ class ZarrStreamingController @Inject()( * Note that the result here is not necessarily equal to the file used in the underlying storage. */ def requestDataSource( - token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { dataSource <- dataSourceRepository.findUsable(DataSourceId(datasetName, organizationId)).toFox ~> NOT_FOUND dataLayers = dataSource.dataLayers @@ -201,14 +194,12 @@ class ZarrStreamingController @Inject()( } } - def dataSourceWithAnnotationPrivateLink(token: Option[String], - accessToken: String, - zarrVersion: Int): Action[AnyContent] = + def dataSourceWithAnnotationPrivateLink(accessToken: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND - relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) - else urlOrHeaderToken(token, request) + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) ~> NOT_FOUND + relevantTokenContext = if (annotationSource.accessViaPrivateLink) TokenContext(Some(accessToken)) + else tokenContextForRequest volumeAnnotationLayers = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume) dataSource <- dataSourceRepository .findUsable(DataSourceId(annotationSource.datasetName, annotationSource.organizationId)) @@ -221,44 +212,37 @@ class ZarrStreamingController @Inject()( remoteTracingstoreClient.getVolumeLayerAsZarrLayer(l.tracingId, Some(l.name), annotationSource.tracingStoreUrl, - relevantToken, - zarrVersion)) + zarrVersion)(relevantTokenContext)) allLayer = dataSourceLayers ++ annotationLayers zarrSource = GenericDataSource[DataLayer](dataSource.id, allLayer, dataSource.scale) } yield Ok(Json.toJson(zarrSource)) } def requestRawZarrCube( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, coordinates: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { rawZarrCube(organizationId, datasetName, dataLayerName, mag, coordinates) } } - def rawZarrCubePrivateLink(token: Option[String], - accessToken: String, + def rawZarrCubePrivateLink(accessToken: String, dataLayerName: String, mag: String, coordinates: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getRawZarrCube(annotationLayer.tracingId, - mag, - coordinates, - annotationSource.tracingStoreUrl, - relevantToken) + .getRawZarrCube(annotationLayer.tracingId, mag, coordinates, annotationSource.tracingStoreUrl)( + relevantTokenContext) .map(Ok(_)), orElse = annotationSource => rawZarrCube(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag, coordinates) @@ -301,14 +285,14 @@ class ZarrStreamingController @Inject()( _ <- bool2Fox(notFoundIndices.isEmpty) ~> "zarr.chunkNotFound" ~> NOT_FOUND } yield Ok(data) - def requestZArray(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String, - mag: String, + def requestZArray( + organizationId: String, + datasetName: String, + dataLayerName: String, + mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { zArray(organizationId, datasetName, dataLayerName, mag) } } @@ -323,14 +307,14 @@ class ZarrStreamingController @Inject()( zarrHeader = ZarrHeader.fromLayer(dataLayer, magParsed) } yield Ok(Json.toJson(zarrHeader)) - def requestZarrJsonForMag(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String, - mag: String, + def requestZarrJsonForMag( + organizationId: String, + datasetName: String, + dataLayerName: String, + mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { zarrJsonForMag(organizationId, datasetName, dataLayerName, mag) } } @@ -345,66 +329,58 @@ class ZarrStreamingController @Inject()( zarrHeader = Zarr3ArrayHeader.fromDataLayer(dataLayer, magParsed) } yield Ok(Json.toJson(zarrHeader)) - def zArrayPrivateLink(token: Option[String], - accessToken: String, - dataLayerName: String, - mag: String): Action[AnyContent] = Action.async { implicit request => - ifIsAnnotationLayerOrElse( - token, - accessToken, - dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => - remoteTracingstoreClient - .getZArray(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl, relevantToken) - .map(z => Ok(Json.toJson(z))), - orElse = - annotationSource => zArray(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag) - ) + def zArrayPrivateLink(accessToken: String, dataLayerName: String, mag: String): Action[AnyContent] = Action.async { + implicit request => + ifIsAnnotationLayerOrElse( + accessToken, + dataLayerName, + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => + remoteTracingstoreClient + .getZArray(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) + .map(z => Ok(Json.toJson(z))), + orElse = + annotationSource => zArray(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag) + ) } - def zarrJsonPrivateLink(token: Option[String], - accessToken: String, - dataLayerName: String, - mag: String): Action[AnyContent] = Action.async { implicit request => - ifIsAnnotationLayerOrElse( - token, - accessToken, - dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => - remoteTracingstoreClient - .getZarrJson(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl, relevantToken) - .map(z => Ok(Json.toJson(z))), - orElse = annotationSource => - zarrJsonForMag(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag) - ) + def zarrJsonPrivateLink(accessToken: String, dataLayerName: String, mag: String): Action[AnyContent] = Action.async { + implicit request => + ifIsAnnotationLayerOrElse( + accessToken, + dataLayerName, + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => + remoteTracingstoreClient + .getZarrJson(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) + .map(z => Ok(Json.toJson(z))), + orElse = annotationSource => + zarrJsonForMag(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag) + ) } private def ifIsAnnotationLayerOrElse( - token: Option[String], accessToken: String, dataLayerName: String, - ifIsAnnotationLayer: (AnnotationLayer, AnnotationSource, Option[String]) => Fox[Result], + ifIsAnnotationLayer: (AnnotationLayer, AnnotationSource, TokenContext) => Fox[Result], orElse: AnnotationSource => Fox[Result])(implicit request: Request[Any]): Fox[Result] = for { - annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND - relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) - else urlOrHeaderToken(token, request) + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) ~> NOT_FOUND + relevantTokenContext = if (annotationSource.accessViaPrivateLink) TokenContext(Some(accessToken)) + else tokenContextForRequest layer = annotationSource.getAnnotationLayer(dataLayerName) result <- layer match { - case Some(annotationLayer) => ifIsAnnotationLayer(annotationLayer, annotationSource, relevantToken) + case Some(annotationLayer) => ifIsAnnotationLayer(annotationLayer, annotationSource, relevantTokenContext) case None => orElse(annotationSource) } } yield result - def requestDataLayerMagFolderContents(token: Option[String], - organizationId: String, + def requestDataLayerMagFolderContents(organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { dataLayerMagFolderContents(organizationId, datasetName, dataLayerName, mag, zarrVersion) } } @@ -428,23 +404,20 @@ class ZarrStreamingController @Inject()( additionalEntries )).withHeaders() - def dataLayerMagFolderContentsPrivateLink(token: Option[String], - accessToken: String, + def dataLayerMagFolderContentsPrivateLink(accessToken: String, dataLayerName: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient .getDataLayerMagFolderContents(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl, - relevantToken, - zarrVersion) + zarrVersion)(relevantTokenContext) .map( layers => Ok( @@ -462,13 +435,12 @@ class ZarrStreamingController @Inject()( ) } - def requestDataLayerFolderContents(token: Option[String], - organizationId: String, + def requestDataLayerFolderContents(organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { dataLayerFolderContents(organizationId, datasetName, dataLayerName, zarrVersion) } } @@ -492,21 +464,17 @@ class ZarrStreamingController @Inject()( additionalFiles ++ mags.map(_.toMagLiteral(allowScalar = true)) )).withHeaders() - def dataLayerFolderContentsPrivateLink(token: Option[String], - accessToken: String, + def dataLayerFolderContentsPrivateLink(accessToken: String, dataLayerName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getDataLayerFolderContents(annotationLayer.tracingId, - annotationSource.tracingStoreUrl, - relevantToken, - zarrVersion) + .getDataLayerFolderContents(annotationLayer.tracingId, annotationSource.tracingStoreUrl, zarrVersion)( + relevantTokenContext) .map( layers => Ok( @@ -523,13 +491,12 @@ class ZarrStreamingController @Inject()( ) } - def requestDataSourceFolderContents(token: Option[String], - organizationId: String, + def requestDataSourceFolderContents(organizationId: String, datasetName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { dataSource <- dataSourceRepository.findUsable(DataSourceId(datasetName, organizationId)).toFox ?~> Messages( "dataSource.notFound") ~> NOT_FOUND @@ -545,12 +512,10 @@ class ZarrStreamingController @Inject()( } } - def dataSourceFolderContentsPrivateLink(token: Option[String], - accessToken: String, - zarrVersion: Int): Action[AnyContent] = + def dataSourceFolderContentsPrivateLink(accessToken: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) dataSource <- dataSourceRepository .findUsable(DataSourceId(annotationSource.datasetName, annotationSource.organizationId)) .toFox ?~> Messages("dataSource.notFound") ~> NOT_FOUND @@ -572,28 +537,24 @@ class ZarrStreamingController @Inject()( )) } - def requestZGroup(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { - Ok(zGroupJson) + def requestZGroup(organizationId: String, datasetName: String, dataLayerName: String = ""): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContextForSyncBlock( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + Ok(zGroupJson) + } } - } private def zGroupJson: JsValue = Json.toJson(NgffGroupHeader(zarr_format = 2)) - def zGroupPrivateLink(token: Option[String], accessToken: String, dataLayerName: String): Action[AnyContent] = + def zGroupPrivateLink(accessToken: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getZGroup(annotationLayer.tracingId, annotationSource.tracingStoreUrl, relevantToken) + .getZGroup(annotationLayer.tracingId, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(Ok(_)), orElse = _ => Fox.successful(Ok(zGroupJson)) ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala index a6d0c65c8c5..b9552941679 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.datastore.models.annotation import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.Annotation.AnnotationLayerProto import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType @@ -10,35 +11,23 @@ import scalapb.GeneratedMessage import scala.concurrent.ExecutionContext +// TODO can this be moved back to wk-core backend? case class AnnotationLayer( tracingId: String, typ: AnnotationLayerType, name: String, stats: JsObject, -) - -object AnnotationLayerStatistics { - - def zeroedForTyp(typ: AnnotationLayerType): JsObject = typ match { - case AnnotationLayerType.Skeleton => - Json.obj( - "treeCount" -> 0, - "nodeCount" -> 0, - "edgeCount" -> 0, - "branchPointCount" -> 0 - ) - case AnnotationLayerType.Volume => - Json.obj( - "segmentCount" -> 0 - ) - } - - def unknown: JsObject = Json.obj() +) { + def toProto: AnnotationLayerProto = + AnnotationLayerProto(tracingId, name, AnnotationLayerType.toProto(typ)) } object AnnotationLayer extends FoxImplicits { implicit val jsonFormat: OFormat[AnnotationLayer] = Json.format[AnnotationLayer] + def fromProto(p: AnnotationLayerProto): AnnotationLayer = + AnnotationLayer(p.tracingId, AnnotationLayerType.fromProto(p.`type`), p.name, AnnotationLayerStatistics.unknown) + val defaultSkeletonLayerName: String = "Skeleton" val defaultVolumeLayerName: String = "Volume" @@ -63,6 +52,25 @@ object AnnotationLayer extends FoxImplicits { } } +object AnnotationLayerStatistics { + + def zeroedForType(typ: AnnotationLayerType): JsObject = typ match { + case AnnotationLayerType.Skeleton => + Json.obj( + "treeCount" -> 0, + "nodeCount" -> 0, + "edgeCount" -> 0, + "branchPointCount" -> 0 + ) + case AnnotationLayerType.Volume => + Json.obj( + "segmentCount" -> 0 + ) + } + + def unknown: JsObject = Json.obj() +} + case class FetchedAnnotationLayer(tracingId: String, name: String, tracing: Either[SkeletonTracing, VolumeTracing], diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala index 0a9576b91aa..2593bedce4f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala @@ -1,8 +1,23 @@ package com.scalableminds.webknossos.datastore.models.annotation import com.scalableminds.util.enumeration.ExtendedEnumeration +import com.scalableminds.webknossos.datastore.Annotation.AnnotationLayerTypeProto object AnnotationLayerType extends ExtendedEnumeration { type AnnotationLayerType = Value val Skeleton, Volume = Value + + def toProto(annotationLayerType: AnnotationLayerType): AnnotationLayerTypeProto = + annotationLayerType match { + case Skeleton => AnnotationLayerTypeProto.Skeleton + case Volume => AnnotationLayerTypeProto.Volume + } + + def fromProto(p: AnnotationLayerTypeProto): AnnotationLayerType = + p match { + case AnnotationLayerTypeProto.Skeleton => Skeleton + case AnnotationLayerTypeProto.Volume => Volume + case AnnotationLayerTypeProto.Unrecognized(_) => + Volume // unrecognized should never happen, artifact of proto code generation + } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index e1b36d40e42..3830553d25b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.rpc +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.mvc.MimeTypes import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging @@ -10,6 +11,7 @@ import play.api.libs.ws._ import scalapb.{GeneratedMessage, GeneratedMessageCompanion} import java.io.File +import java.nio.charset.StandardCharsets import scala.concurrent.ExecutionContext import scala.concurrent.duration._ @@ -26,6 +28,9 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: this } + def withTokenFromContext(implicit tc: TokenContext): RPCRequest = + addQueryStringOptional("token", tc.userTokenOpt) + def addHttpHeaders(hdrs: (String, String)*): RPCRequest = { request = request.addHttpHeaders(hdrs: _*) this @@ -109,7 +114,7 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: parseJsonResponse(performRequest) } - def postWithJsonResponse[T: Reads]: Fox[T] = { + def postWithJsonResponse[T: Reads](): Fox[T] = { request = request.withMethod("POST") parseJsonResponse(performRequest) } @@ -161,10 +166,16 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: parseProtoResponse(performRequest)(companion) } - def postJson[J: Writes](body: J = Json.obj()): Unit = { + def postJson[J: Writes](body: J = Json.obj()): Fox[Unit] = { request = request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("POST") - performRequest + performRequest.map(_ => ()) + } + + def postProto[T <: GeneratedMessage](body: T): Fox[Unit] = { + request = + request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> protobufMimeType).withBody(body.toByteArray).withMethod("POST") + performRequest.map(_ => ()) } def postProtoWithJsonResponse[T <: GeneratedMessage, J: Reads](body: T): Fox[J] = { @@ -180,6 +191,11 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: parseProtoResponse(performRequest)(companion) } + def postWithProtoResponse[T <: GeneratedMessage]()(companion: GeneratedMessageCompanion[T]): Fox[T] = { + request = request.withMethod("POST") + parseProtoResponse(performRequest)(companion) + } + private def performRequest: Fox[WSResponse] = { if (verbose) { logger.debug( @@ -193,7 +209,7 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: Full(result) } else { val errorMsg = s"Unsuccessful WS request to $url (ID: $id)." + - s"Status: ${result.status}. Response: ${result.bodyAsBytes.map(_.toChar).mkString.take(2000)}" + s"Status: ${result.status}. Response: ${new String(result.bodyAsBytes.toArray, StandardCharsets.UTF_8).take(2000)}" logger.error(errorMsg) Failure(errorMsg.take(400)) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index 2e89f193607..b44d93c0468 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.services import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.enumeration.ExtendedEnumeration import com.scalableminds.util.tools.Fox @@ -19,7 +20,7 @@ object AccessMode extends ExtendedEnumeration { object AccessResourceType extends ExtendedEnumeration { type AccessResourceType = Value - val datasource, tracing, webknossos, jobExport = Value + val datasource, tracing, annotation, webknossos, jobExport = Value } case class UserAccessAnswer(granted: Boolean, msg: Option[String] = None) @@ -42,9 +43,16 @@ object UserAccessRequest { def readTracing(tracingId: String): UserAccessRequest = UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.read) + def writeTracing(tracingId: String): UserAccessRequest = UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.write) + def readAnnotation(annotationId: String): UserAccessRequest = + UserAccessRequest(DataSourceId(annotationId, ""), AccessResourceType.annotation, AccessMode.read) + + def writeAnnotation(annotationId: String): UserAccessRequest = + UserAccessRequest(DataSourceId(annotationId, ""), AccessResourceType.annotation, AccessMode.write) + def downloadJobExport(jobId: String): UserAccessRequest = UserAccessRequest(DataSourceId(jobId, ""), AccessResourceType.jobExport, AccessMode.read) @@ -59,28 +67,27 @@ trait AccessTokenService { private lazy val accessAnswersCache: AlfuCache[(UserAccessRequest, Option[String]), UserAccessAnswer] = AlfuCache(timeToLive = AccessExpiration, timeToIdle = AccessExpiration) - def validateAccessForSyncBlock(accessRequest: UserAccessRequest, token: Option[String])(block: => Result)( - implicit ec: ExecutionContext): Fox[Result] = - validateAccess(accessRequest, token) { + def validateAccessFromTokenContextForSyncBlock(accessRequest: UserAccessRequest)( + block: => Result)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Result] = + validateAccessFromTokenContext(accessRequest) { Future.successful(block) } - def validateAccess(accessRequest: UserAccessRequest, token: Option[String])(block: => Future[Result])( - implicit ec: ExecutionContext): Fox[Result] = + def validateAccessFromTokenContext(accessRequest: UserAccessRequest)( + block: => Future[Result])(implicit ec: ExecutionContext, tc: TokenContext): Fox[Result] = for { - userAccessAnswer <- hasUserAccess(accessRequest, token) ?~> "Failed to check data access, token may be expired, consider reloading." + userAccessAnswer <- hasUserAccess(accessRequest) ?~> "Failed to check data access, token may be expired, consider reloading." result <- executeBlockOnPositiveAnswer(userAccessAnswer, block) } yield result - private def hasUserAccess(accessRequest: UserAccessRequest, token: Option[String])( - implicit ec: ExecutionContext): Fox[UserAccessAnswer] = - accessAnswersCache.getOrLoad((accessRequest, token), - _ => remoteWebknossosClient.requestUserAccess(token, accessRequest)) + private def hasUserAccess(accessRequest: UserAccessRequest)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[UserAccessAnswer] = + accessAnswersCache.getOrLoad((accessRequest, tc.userTokenOpt), + _ => remoteWebknossosClient.requestUserAccess(accessRequest)) - def assertUserAccess(accessRequest: UserAccessRequest, token: Option[String])( - implicit ec: ExecutionContext): Fox[Unit] = + def assertUserAccess(accessRequest: UserAccessRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = for { - userAccessAnswer <- hasUserAccess(accessRequest, token) ?~> "Failed to check data access, token may be expired, consider reloading." + userAccessAnswer <- hasUserAccess(accessRequest) ?~> "Failed to check data access, token may be expired, consider reloading." _ <- Fox.bool2Fox(userAccessAnswer.granted) ?~> userAccessAnswer.msg.getOrElse("Access forbidden.") } yield () diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala index 25cb1775999..903d60e0d93 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.services import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox @@ -49,14 +50,13 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService - def loadFor(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Array[Byte]] = + def loadFor(organizationId: String, datasetName: String, dataLayerName: String, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext, + m: MessagesProvider, + tc: TokenContext): Fox[Array[Byte]] = fullMeshRequest.meshFileName match { case Some(_) => - loadFullMeshFromMeshfile(token, organizationId, datasetName, dataLayerName, fullMeshRequest) + loadFullMeshFromMeshfile(organizationId, datasetName, dataLayerName, fullMeshRequest) case None => loadFullMeshFromAdHoc(organizationId, datasetName, dataLayerName, fullMeshRequest) } @@ -113,12 +113,12 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, } yield allVertices } - private def loadFullMeshFromMeshfile( - token: Option[String], - organizationId: String, - datasetName: String, - layerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Array[Byte]] = + private def loadFullMeshFromMeshfile(organizationId: String, + datasetName: String, + layerName: String, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, + m: MessagesProvider, + tc: TokenContext): Fox[Array[Byte]] = for { meshFileName <- fullMeshRequest.meshFileName.toFox ?~> "meshFileName.needed" before = Instant.now @@ -134,8 +134,7 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, fullMeshRequest.editableMappingTracingId, fullMeshRequest.segmentId, mappingNameForMeshFile, - omitMissing = false, - token + omitMissing = false ) chunkInfos: WebknossosSegmentInfo <- meshFileService.listMeshChunksForSegmentsMerged(organizationId, datasetName, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala index 2924c0687e4..5bd69d4d7c9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala @@ -1,8 +1,8 @@ package com.scalableminds.webknossos.datastore.services import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.dataformats.layers.ZarrSegmentationLayer import com.scalableminds.webknossos.datastore.datareaders.zarr.{NgffMetadata, ZarrHeader} import com.scalableminds.webknossos.datastore.datareaders.zarr3.{Zarr3ArrayHeader, Zarr3GroupHeader} @@ -21,88 +21,63 @@ object EditableMappingSegmentListResult { class DSRemoteTracingstoreClient @Inject()( rpc: RPC, - config: DataStoreConfig, val lifecycle: ApplicationLifecycle, ) extends LazyLogging with FoxImplicits { + private def getZarrVersionDependantSubPath = (zarrVersion: Int) => if (zarrVersion == 2) "zarr" else "zarr3_experimental" - def getZArray(tracingId: String, mag: String, tracingStoreUri: String, token: Option[String]): Fox[ZarrHeader] = - rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/$mag/.zarray") - .addQueryStringOptional("token", token) + def getZArray(tracingId: String, mag: String, tracingStoreUri: String)(implicit tc: TokenContext): Fox[ZarrHeader] = + rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/$mag/.zarray").withTokenFromContext .getWithJsonResponse[ZarrHeader] - def getZarrJson(tracingId: String, - mag: String, - tracingStoreUri: String, - token: Option[String]): Fox[Zarr3ArrayHeader] = - rpc(s"$tracingStoreUri/tracings/volume/zarr3_experimental/$tracingId/$mag/zarr.json") - .addQueryStringOptional("token", token) + def getZarrJson(tracingId: String, mag: String, tracingStoreUri: String)( + implicit tc: TokenContext): Fox[Zarr3ArrayHeader] = + rpc(s"$tracingStoreUri/tracings/volume/zarr3_experimental/$tracingId/$mag/zarr.json").withTokenFromContext .getWithJsonResponse[Zarr3ArrayHeader] def getVolumeLayerAsZarrLayer(tracingId: String, tracingName: Option[String], tracingStoreUri: String, - token: Option[String], - zarrVersion: Int): Fox[ZarrSegmentationLayer] = { + zarrVersion: Int)(implicit tc: TokenContext): Fox[ZarrSegmentationLayer] = { val zarrVersionDependantSubPath = getZarrVersionDependantSubPath(zarrVersion) - rpc(s"$tracingStoreUri/tracings/volume/$zarrVersionDependantSubPath/$tracingId/zarrSource") - .addQueryStringOptional("token", token) + rpc(s"$tracingStoreUri/tracings/volume/$zarrVersionDependantSubPath/$tracingId/zarrSource").withTokenFromContext .addQueryStringOptional("tracingName", tracingName) .getWithJsonResponse[ZarrSegmentationLayer] } - def getOmeNgffHeader(tracingId: String, tracingStoreUri: String, token: Option[String]): Fox[NgffMetadata] = - rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zattrs") - .addQueryStringOptional("token", token) + def getOmeNgffHeader(tracingId: String, tracingStoreUri: String)(implicit tc: TokenContext): Fox[NgffMetadata] = + rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zattrs").withTokenFromContext .getWithJsonResponse[NgffMetadata] - def getZarrJsonGroupHeaderWithNgff(tracingId: String, - tracingStoreUri: String, - token: Option[String]): Fox[Zarr3GroupHeader] = - rpc(s"$tracingStoreUri/tracings/volume/zarr3_experimental/$tracingId/zarr.json") - .addQueryStringOptional("token", token) + def getZarrJsonGroupHeaderWithNgff(tracingId: String, tracingStoreUri: String)( + implicit tc: TokenContext): Fox[Zarr3GroupHeader] = + rpc(s"$tracingStoreUri/tracings/volume/zarr3_experimental/$tracingId/zarr.json").withTokenFromContext .getWithJsonResponse[Zarr3GroupHeader] - def getRawZarrCube(tracingId: String, - mag: String, - cxyz: String, - tracingStoreUri: String, - token: Option[String]): Fox[Array[Byte]] = - rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/$mag/$cxyz").silent - .addQueryStringOptional("token", token) - .getWithBytesResponse + def getRawZarrCube(tracingId: String, mag: String, cxyz: String, tracingStoreUri: String)( + implicit tc: TokenContext): Fox[Array[Byte]] = + rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/$mag/$cxyz").silent.withTokenFromContext.getWithBytesResponse - def getDataLayerMagFolderContents(tracingId: String, - mag: String, - tracingStoreUri: String, - token: Option[String], - zarrVersion: Int): Fox[List[String]] = - rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId/$mag") - .addQueryStringOptional("token", token) + def getDataLayerMagFolderContents(tracingId: String, mag: String, tracingStoreUri: String, zarrVersion: Int)( + implicit tc: TokenContext): Fox[List[String]] = + rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId/$mag").withTokenFromContext .getWithJsonResponse[List[String]] - def getDataLayerFolderContents(tracingId: String, - tracingStoreUri: String, - token: Option[String], - zarrVersion: Int): Fox[List[String]] = - rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId") - .addQueryStringOptional("token", token) + def getDataLayerFolderContents(tracingId: String, tracingStoreUri: String, zarrVersion: Int)( + implicit tc: TokenContext): Fox[List[String]] = + rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId").withTokenFromContext .getWithJsonResponse[List[String]] - def getZGroup(tracingId: String, tracingStoreUri: String, token: Option[String]): Fox[JsObject] = - rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zgroup") - .addQueryStringOptional("token", token) - .getWithJsonResponse[JsObject] + def getZGroup(tracingId: String, tracingStoreUri: String)(implicit tc: TokenContext): Fox[JsObject] = + rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zgroup").withTokenFromContext.getWithJsonResponse[JsObject] - def getEditableMappingSegmentIdsForAgglomerate(tracingStoreUri: String, - tracingId: String, - agglomerateId: Long, - token: Option[String]): Fox[EditableMappingSegmentListResult] = + def getEditableMappingSegmentIdsForAgglomerate(tracingStoreUri: String, tracingId: String, agglomerateId: Long)( + implicit tc: TokenContext): Fox[EditableMappingSegmentListResult] = rpc(s"$tracingStoreUri/tracings/mapping/$tracingId/segmentsForAgglomerate") .addQueryString("agglomerateId" -> agglomerateId.toString) - .addQueryStringOptional("token", token) + .withTokenFromContext .silent .getWithJsonResponse[EditableMappingSegmentListResult] } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index 0703f638076..b4bcafed613 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -3,6 +3,7 @@ package com.scalableminds.webknossos.datastore.services import org.apache.pekko.actor.ActorSystem import com.google.inject.Inject import com.google.inject.name.Named +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.DataStoreConfig @@ -33,7 +34,7 @@ object TracingStoreInfo { } trait RemoteWebknossosClient { - def requestUserAccess(token: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] + def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] } class DSRemoteWebknossosClient @Inject()( @@ -68,21 +69,17 @@ class DSRemoteWebknossosClient @Inject()( .addQueryString("key" -> dataStoreKey) .put(dataSource) - def getUnfinishedUploadsForUser(userTokenOpt: Option[String], organizationName: String): Fox[List[UnfinishedUpload]] = + def getUnfinishedUploadsForUser(organizationName: String)(implicit tc: TokenContext): Fox[List[UnfinishedUpload]] = for { - userToken <- option2Fox(userTokenOpt) ?~> "reserveUpload.noUserToken" unfinishedUploads <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/getUnfinishedUploadsForUser") .addQueryString("key" -> dataStoreKey) - .addQueryString("token" -> userToken) .addQueryString("organizationName" -> organizationName) + .withTokenFromContext .getWithJsonResponse[List[UnfinishedUpload]] } yield unfinishedUploads - def reportUpload(dataSourceId: DataSourceId, - datasetSizeBytes: Long, - needsConversion: Boolean, - viaAddRoute: Boolean, - userToken: Option[String]): Fox[Unit] = + def reportUpload(dataSourceId: DataSourceId, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean)( + implicit tc: TokenContext): Fox[Unit] = for { _ <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reportDatasetUpload") .addQueryString("key" -> dataStoreKey) @@ -90,7 +87,7 @@ class DSRemoteWebknossosClient @Inject()( .addQueryString("needsConversion" -> needsConversion.toString) .addQueryString("viaAddRoute" -> viaAddRoute.toString) .addQueryString("datasetSizeBytes" -> datasetSizeBytes.toString) - .addQueryStringOptional("token", userToken) + .withTokenFromContext .post() } yield () @@ -100,12 +97,11 @@ class DSRemoteWebknossosClient @Inject()( .silent .put(dataSources) - def reserveDataSourceUpload(info: ReserveUploadInformation, userTokenOpt: Option[String]): Fox[Unit] = + def reserveDataSourceUpload(info: ReserveUploadInformation)(implicit tc: TokenContext): Fox[Unit] = for { - userToken <- option2Fox(userTokenOpt) ?~> "reserveUpload.noUserToken" _ <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reserveUpload") .addQueryString("key" -> dataStoreKey) - .addQueryString("token" -> userToken) + .withTokenFromContext .post(info) } yield () @@ -118,10 +114,10 @@ class DSRemoteWebknossosClient @Inject()( .addQueryString("key" -> dataStoreKey) .getWithJsonResponse[JobExportProperties] - override def requestUserAccess(userToken: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] = + override def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/validateUserAccess") .addQueryString("key" -> dataStoreKey) - .addQueryStringOptional("token", userToken) + .withTokenFromContext .postJsonWithJsonResponse[UserAccessRequest, UserAccessAnswer](accessRequest) private lazy val tracingstoreUriCache: AlfuCache[String, String] = AlfuCache() @@ -141,13 +137,13 @@ class DSRemoteWebknossosClient @Inject()( private lazy val annotationSourceCache: AlfuCache[(String, Option[String]), AnnotationSource] = AlfuCache(timeToLive = 5 seconds, timeToIdle = 5 seconds) - def getAnnotationSource(accessToken: String, userToken: Option[String]): Fox[AnnotationSource] = + def getAnnotationSource(accessToken: String)(implicit tc: TokenContext): Fox[AnnotationSource] = annotationSourceCache.getOrLoad( - (accessToken, userToken), + (accessToken, tc.userTokenOpt), _ => rpc(s"$webknossosUri/api/annotations/source/$accessToken") .addQueryString("key" -> dataStoreKey) - .addQueryStringOptional("userToken", userToken) + .withTokenFromContext .getWithJsonResponse[AnnotationSource] ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala index 391c234fc6b..14279974079 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.services +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} import com.scalableminds.webknossos.datastore.storage.AgglomerateFileKey @@ -22,7 +23,7 @@ trait MeshMappingHelper { agglomerateId: Long, mappingNameForMeshFile: Option[String], omitMissing: Boolean, // If true, failing lookups in the agglomerate file will just return empty list. - token: Option[String])(implicit ec: ExecutionContext): Fox[List[Long]] = + )(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Long]] = (targetMappingName, editableMappingTracingId) match { case (None, None) => // No mapping selected, assume id matches meshfile @@ -58,8 +59,7 @@ trait MeshMappingHelper { tracingstoreUri <- dsRemoteWebknossosClient.getTracingstoreUri segmentIdsResult <- dsRemoteTracingstoreClient.getEditableMappingSegmentIdsForAgglomerate(tracingstoreUri, tracingId, - agglomerateId, - token) + agglomerateId) segmentIds <- if (segmentIdsResult.agglomerateIdIsPresent) Fox.successful(segmentIdsResult.segmentIds) else // the agglomerate id is not present in the editable mapping. Fetch its info from the base mapping. diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala index 26d16943db4..efed327ef37 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.services.uploading +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.io.PathUtils import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.dataformats.layers.{ @@ -67,7 +68,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, private def uploadDirectory(organizationId: String, name: String): Path = dataBaseDir.resolve(organizationId).resolve(name) - def composeDataset(composeRequest: ComposeRequest, userToken: Option[String]): Fox[DataSource] = + def composeDataset(composeRequest: ComposeRequest)(implicit tc: TokenContext): Fox[DataSource] = for { _ <- dataSourceService.assertDataDirWritable(composeRequest.organizationId) reserveUploadInfo = ReserveUploadInformation("", @@ -78,7 +79,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, None, List(), Some(composeRequest.targetFolderId)) - _ <- remoteWebknossosClient.reserveDataSourceUpload(reserveUploadInfo, userToken) ?~> "Failed to reserve upload." + _ <- remoteWebknossosClient.reserveDataSourceUpload(reserveUploadInfo) ?~> "Failed to reserve upload." directory = uploadDirectory(composeRequest.organizationId, composeRequest.newDatasetName) _ = PathUtils.ensureDirectory(directory) dataSource <- createDatasource(composeRequest, composeRequest.organizationId) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/slacknotification/SlackClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/slacknotification/SlackClient.scala index 99491288037..45f2d4e0766 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/slacknotification/SlackClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/slacknotification/SlackClient.scala @@ -45,6 +45,7 @@ class SlackClient(rpc: RPC, slackUri: String, name: String, verboseLoggingEnable rpc(slackUri).postJson( Json.obj("attachments" -> Json.arr(jsonMessage)) ) + () } else { logger.warn( s"Not sending slack notification as rate limit of $messagesSentSinceReset was reached. Message was: $jsonMessage") diff --git a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes index 17ebff8ea57..e51fe51b72f 100644 --- a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes +++ b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes @@ -5,122 +5,122 @@ GET /health @com.scalableminds.webknossos.datastore.controllers.Application.health # Read image data -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(organizationId: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(organizationId: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(organizationId: String, datasetName: String, dataLayerName: String) # Knossos compatible routes -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(organizationId: String, datasetName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) # Zarr2 compatible routes -GET /zarr/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(token: Option[String], organizationId: String, datasetName: String, dataLayerName="") -GET /zarr/:organizationId/:datasetName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /zarr/:organizationId/:datasetName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String) -GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, coordinates: String) - -GET /annotations/zarr/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zGroupPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName="") -GET /annotations/zarr/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zAttrsWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zGroupPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zArrayPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(organizationId: String, datasetName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(organizationId: String, datasetName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(organizationId: String, datasetName: String, dataLayerName="") +GET /zarr/:organizationId/:datasetName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(organizationId: String, datasetName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(organizationId: String, datasetName: String, dataLayerName: String) +GET /zarr/:organizationId/:datasetName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(organizationId: String, datasetName: String, dataLayerName: String) +GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(organizationId: String, datasetName: String, dataLayerName: String, mag: String) +GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(organizationId: String, datasetName: String, dataLayerName: String, mag: String, coordinates: String) + +GET /annotations/zarr/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zGroupPrivateLink(accessTokenOrId: String, dataLayerName="") +GET /annotations/zarr/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zAttrsWithAnnotationPrivateLink(accessTokenOrId: String, dataLayerName: String) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zGroupPrivateLink(accessTokenOrId: String, dataLayerName: String) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zArrayPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Zarr3 compatible routes -GET /zarr3_experimental/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, coordinates: String) - -GET /annotations/zarr3_experimental/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr3_experimental/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(organizationId: String, datasetName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(organizationId: String, datasetName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(organizationId: String, datasetName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(organizationId: String, datasetName: String, dataLayerName: String) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(organizationId: String, datasetName: String, dataLayerName: String, mag: String) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(organizationId: String, datasetName: String, dataLayerName: String, mag: String, coordinates: String) + +GET /annotations/zarr3_experimental/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonWithAnnotationPrivateLink(accessTokenOrId: String, dataLayerName: String) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Segmentation mappings -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(organizationId: String, datasetName: String, dataLayerName: String) # Agglomerate files -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listAgglomerates(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/skeleton/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.generateAgglomerateSkeleton(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForAllSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForAllSegmentIds(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, segmentId: Long) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listAgglomerates(organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/skeleton/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.generateAgglomerateSkeleton(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForAllSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForAllSegmentIds(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, segmentId: Long) # Mesh files -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegment(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String]) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunk(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegment(organizationId: String, datasetName: String, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String]) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunk(organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(organizationId: String, datasetName: String, dataLayerName: String) # Connectome files -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listConnectomeFiles(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/positions @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsePositions(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/types @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapseTypes(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/:direction @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapticPartnerForSynapses(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, direction: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsesForAgglomerates(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listConnectomeFiles(organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/positions @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsePositions(organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/types @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapseTypes(organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/:direction @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapticPartnerForSynapses(organizationId: String, datasetName: String, dataLayerName: String, direction: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsesForAgglomerates(organizationId: String, datasetName: String, dataLayerName: String) # Ad-Hoc Meshing -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(organizationId: String, datasetName: String, dataLayerName: String) # Segment-Index files -GET /datasets/:organizationId/:dataSetName/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String) -POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String) -POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String, segmentId: String) -POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String) -POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String) +GET /datasets/:organizationId/:dataSetName/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(organizationId: String, dataSetName: String, dataLayerName: String) +POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(organizationId: String, dataSetName: String, dataLayerName: String) +POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(organizationId: String, dataSetName: String, dataLayerName: String, segmentId: String) +POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(organizationId: String, dataSetName: String, dataLayerName: String) +POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(organizationId: String, dataSetName: String, dataLayerName: String) # DataSource management -GET /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.testChunk(token: Option[String], resumableChunkNumber: Int, resumableIdentifier: String) -POST /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.uploadChunk(token: Option[String]) -GET /datasets/getUnfinishedUploads @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getUnfinishedUploads(token: Option[String], organizationName: String) -POST /datasets/reserveUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveUpload(token: Option[String]) -POST /datasets/reserveManualUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveManualUpload(token: Option[String]) -POST /datasets/finishUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.finishUpload(token: Option[String]) -POST /datasets/cancelUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.cancelUpload(token: Option[String]) -GET /datasets/measureUsedStorage/:organizationId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(token: Option[String], organizationId: String, datasetName: Option[String]) -GET /datasets/:organizationId/:datasetName/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(token: Option[String], organizationId: String, datasetName: String) -POST /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(token: Option[String], organizationId: String, datasetName: String) -PUT /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(token: Option[String], organizationId: String, datasetName: String, folderId: Option[String]) -DELETE /datasets/:organizationId/:datasetName/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(token: Option[String], organizationId: String, datasetName: String) -POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose(token: Option[String]) -POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset(token: Option[String]) +GET /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.testChunk(resumableChunkNumber: Int, resumableIdentifier: String) +POST /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.uploadChunk() +GET /datasets/getUnfinishedUploads @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getUnfinishedUploads(organizationName: String) +POST /datasets/reserveUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveUpload() +POST /datasets/reserveManualUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveManualUpload() +POST /datasets/finishUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.finishUpload() +POST /datasets/cancelUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.cancelUpload() +GET /datasets/measureUsedStorage/:organizationId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(organizationId: String, datasetName: Option[String]) +GET /datasets/:organizationId/:datasetName/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(organizationId: String, datasetName: String) +POST /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(organizationId: String, datasetName: String) +PUT /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(organizationId: String, datasetName: String, folderId: Option[String]) +DELETE /datasets/:organizationId/:datasetName/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(organizationId: String, datasetName: String) +POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose() +POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset() # Actions -POST /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking(token: Option[String]) -POST /triggers/createOrganizationDirectory @com.scalableminds.webknossos.datastore.controllers.DataSourceController.createOrganizationDirectory(token: Option[String], organizationId: String) -POST /triggers/reload/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(token: Option[String], organizationId: String, datasetName: String, layerName: Option[String]) +POST /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking() +POST /triggers/createOrganizationDirectory @com.scalableminds.webknossos.datastore.controllers.DataSourceController.createOrganizationDirectory(organizationId: String) +POST /triggers/reload/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(organizationId: String, datasetName: String, layerName: Option[String]) # Exports -GET /exports/:jobId/download @com.scalableminds.webknossos.datastore.controllers.ExportsController.download(token: Option[String], jobId: String) +GET /exports/:jobId/download @com.scalableminds.webknossos.datastore.controllers.ExportsController.download(jobId: String) diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto new file mode 100644 index 00000000000..4aea0922056 --- /dev/null +++ b/webknossos-datastore/proto/Annotation.proto @@ -0,0 +1,44 @@ +syntax = "proto2"; + +package com.scalableminds.webknossos.datastore; + +enum AnnotationLayerTypeProto { + Skeleton = 1; + Volume = 2; +} + +message AnnotationProto { + optional string name = 1; + optional string description = 2; + required int64 version = 3; + repeated AnnotationLayerProto annotationLayers = 4; + required int64 earliestAccessibleVersion = 5; +} + +message AnnotationLayerProto { + required string tracingId = 1; + required string name = 2; + required AnnotationLayerTypeProto type = 4; +} + +message AddLayerAnnotationUpdateAction { + required string name = 1; + required string tracingId = 2; + required AnnotationLayerTypeProto type = 5; +} + +message DeleteLayerAnnotationUpdateAction { + required string tracingId = 1; +} + +message UpdateLayerMetadataAnnotationUpdateAction { + required string tracingId = 1; + required string name = 2; +} + +message UpdateMetadataAnnotationUpdateAction { + optional string name = 1; + optional string description = 2; +} + +// TODO restoreLayer? diff --git a/webknossos-datastore/proto/SegmentToAgglomerateProto.proto b/webknossos-datastore/proto/SegmentToAgglomerateProto.proto index 519276323c3..6bb61fdf783 100644 --- a/webknossos-datastore/proto/SegmentToAgglomerateProto.proto +++ b/webknossos-datastore/proto/SegmentToAgglomerateProto.proto @@ -7,6 +7,6 @@ message SegmentAgglomeratePair { required int64 agglomerateId = 2; } -message SegmentToAgglomerateProto { +message SegmentToAgglomerateChunkProto { repeated SegmentAgglomeratePair segmentToAgglomerate = 1; } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala index 564e51d671a..15f8dd30475 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox @@ -38,36 +39,28 @@ class TSRemoteDatastoreClient @Inject()( private lazy val largestAgglomerateIdCache: AlfuCache[(RemoteFallbackLayer, String, Option[String]), Long] = AlfuCache(timeToLive = 10 minutes) - def getAgglomerateSkeleton(userToken: Option[String], - remoteFallbackLayer: RemoteFallbackLayer, - mappingName: String, - agglomerateId: Long): Fox[Array[Byte]] = + def getAgglomerateSkeleton(remoteFallbackLayer: RemoteFallbackLayer, mappingName: String, agglomerateId: Long)( + implicit tc: TokenContext): Fox[Array[Byte]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/agglomerates/$mappingName/skeleton/$agglomerateId") - .addQueryStringOptional("token", userToken) - .getWithBytesResponse + result <- rpc(s"$remoteLayerUri/agglomerates/$mappingName/skeleton/$agglomerateId").withTokenFromContext.getWithBytesResponse } yield result - def getData(remoteFallbackLayer: RemoteFallbackLayer, - dataRequests: List[WebknossosDataRequest], - userToken: Option[String]): Fox[(Array[Byte], List[Int])] = + def getData(remoteFallbackLayer: RemoteFallbackLayer, dataRequests: List[WebknossosDataRequest])( + implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - response <- rpc(s"$remoteLayerUri/data").addQueryStringOptional("token", userToken).silent.post(dataRequests) + response <- rpc(s"$remoteLayerUri/data").withTokenFromContext.silent.post(dataRequests) _ <- bool2Fox(Status.isSuccessful(response.status)) bytes = response.bodyAsBytes.toArray indices <- parseMissingBucketHeader(response.header(missingBucketsHeader)) ?~> "failed to parse missing bucket header" } yield (bytes, indices) - def getVoxelAtPosition(userToken: Option[String], - remoteFallbackLayer: RemoteFallbackLayer, - pos: Vec3Int, - mag: Vec3Int): Fox[Array[Byte]] = + def getVoxelAtPosition(remoteFallbackLayer: RemoteFallbackLayer, pos: Vec3Int, mag: Vec3Int)( + implicit tc: TokenContext): Fox[Array[Byte]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/data") - .addQueryStringOptional("token", userToken) + result <- rpc(s"$remoteLayerUri/data").withTokenFromContext .addQueryString("x" -> pos.x.toString) .addQueryString("y" -> pos.y.toString) .addQueryString("z" -> pos.z.toString) @@ -81,33 +74,25 @@ class TSRemoteDatastoreClient @Inject()( def getAgglomerateIdsForSegmentIds(remoteFallbackLayer: RemoteFallbackLayer, mappingName: String, - segmentIdsOrdered: List[Long], - userToken: Option[String]): Fox[List[Long]] = + segmentIdsOrdered: List[Long])(implicit tc: TokenContext): Fox[List[Long]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) segmentIdsOrderedProto = ListOfLong(items = segmentIdsOrdered) - result <- rpc(s"$remoteLayerUri/agglomerates/$mappingName/agglomeratesForSegments") - .addQueryStringOptional("token", userToken) - .silent + result <- rpc(s"$remoteLayerUri/agglomerates/$mappingName/agglomeratesForSegments").withTokenFromContext.silent .postProtoWithProtoResponse[ListOfLong, ListOfLong](segmentIdsOrderedProto)(ListOfLong) } yield result.items.toList - def getAgglomerateGraph(remoteFallbackLayer: RemoteFallbackLayer, - baseMappingName: String, - agglomerateId: Long, - userToken: Option[String]): Fox[AgglomerateGraph] = + def getAgglomerateGraph(remoteFallbackLayer: RemoteFallbackLayer, baseMappingName: String, agglomerateId: Long)( + implicit tc: TokenContext): Fox[AgglomerateGraph] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/agglomerates/$baseMappingName/agglomerateGraph/$agglomerateId").silent - .addQueryStringOptional("token", userToken) - .silent + result <- rpc(s"$remoteLayerUri/agglomerates/$baseMappingName/agglomerateGraph/$agglomerateId").silent.withTokenFromContext.silent .getWithProtoResponse[AgglomerateGraph](AgglomerateGraph) } yield result - def getLargestAgglomerateId(remoteFallbackLayer: RemoteFallbackLayer, - mappingName: String, - userToken: Option[String]): Fox[Long] = { - val cacheKey = (remoteFallbackLayer, mappingName, userToken) + def getLargestAgglomerateId(remoteFallbackLayer: RemoteFallbackLayer, mappingName: String)( + implicit tc: TokenContext): Fox[Long] = { + val cacheKey = (remoteFallbackLayer, mappingName, tc.userTokenOpt) largestAgglomerateIdCache.getOrLoad( cacheKey, k => @@ -121,26 +106,20 @@ class TSRemoteDatastoreClient @Inject()( ) } - def hasSegmentIndexFile(remoteFallbackLayer: RemoteFallbackLayer, userToken: Option[String]): Fox[Boolean] = + def hasSegmentIndexFile(remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Boolean] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - hasIndexFile <- rpc(s"$remoteLayerUri/hasSegmentIndex") - .addQueryStringOptional("token", userToken) - .silent - .getWithJsonResponse[Boolean] + hasIndexFile <- rpc(s"$remoteLayerUri/hasSegmentIndex").withTokenFromContext.silent.getWithJsonResponse[Boolean] } yield hasIndexFile def querySegmentIndex(remoteFallbackLayer: RemoteFallbackLayer, segmentId: Long, mag: Vec3Int, mappingName: Option[String], // should be the baseMappingName in case of editable mappings - editableMappingTracingId: Option[String], - userToken: Option[String]): Fox[Seq[Vec3Int]] = + editableMappingTracingId: Option[String])(implicit tc: TokenContext): Fox[Seq[Vec3Int]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - positions <- rpc(s"$remoteLayerUri/segmentIndex/$segmentId") - .addQueryStringOptional("token", userToken) - .silent + positions <- rpc(s"$remoteLayerUri/segmentIndex/$segmentId").withTokenFromContext.silent .postJsonWithJsonResponse[GetSegmentIndexParameters, Seq[Vec3Int]](GetSegmentIndexParameters( mag, cubeSize = Vec3Int.ones, // Don't use the cubeSize parameter here (since we want to calculate indices later anyway) @@ -157,13 +136,10 @@ class TSRemoteDatastoreClient @Inject()( segmentIds: Seq[Long], mag: Vec3Int, mappingName: Option[String], // should be the baseMappingName in case of editable mappings - editableMappingTracingId: Option[String], - userToken: Option[String]): Fox[Seq[(Long, Seq[Vec3Int])]] = + editableMappingTracingId: Option[String])(implicit tc: TokenContext): Fox[Seq[(Long, Seq[Vec3Int])]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/segmentIndex") - .addQueryStringOptional("token", userToken) - .silent + result <- rpc(s"$remoteLayerUri/segmentIndex").withTokenFromContext.silent .postJsonWithJsonResponse[GetMultipleSegmentIndexParameters, Seq[SegmentIndexData]]( GetMultipleSegmentIndexParameters(segmentIds.toList, mag, @@ -173,25 +149,22 @@ class TSRemoteDatastoreClient @Inject()( } yield result.map(data => (data.segmentId, data.positions)) - def loadFullMeshStl(token: Option[String], - remoteFallbackLayer: RemoteFallbackLayer, - fullMeshRequest: FullMeshRequest): Fox[Array[Byte]] = + def loadFullMeshStl(remoteFallbackLayer: RemoteFallbackLayer, fullMeshRequest: FullMeshRequest)( + implicit tc: TokenContext): Fox[Array[Byte]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/meshes/fullMesh.stl") - .addQueryStringOptional("token", token) + result <- rpc(s"$remoteLayerUri/meshes/fullMesh.stl").withTokenFromContext .postJsonWithBytesResponse(fullMeshRequest) } yield result - def voxelSizeForTracingWithCache(tracingId: String, token: Option[String]): Fox[VoxelSize] = - voxelSizeCache.getOrLoad(tracingId, tId => voxelSizeForTracing(tId, token)) + def voxelSizeForTracingWithCache(tracingId: String)(implicit tc: TokenContext): Fox[VoxelSize] = + voxelSizeCache.getOrLoad(tracingId, tId => voxelSizeForTracing(tId)) - private def voxelSizeForTracing(tracingId: String, token: Option[String]): Fox[VoxelSize] = + private def voxelSizeForTracing(tracingId: String)(implicit tc: TokenContext): Fox[VoxelSize] = for { dataSourceId <- remoteWebknossosClient.getDataSourceIdForTracing(tracingId) dataStoreUri <- dataStoreUriWithCache(dataSourceId.team, dataSourceId.name) - result <- rpc(s"$dataStoreUri/data/datasets/${dataSourceId.team}/${dataSourceId.name}/readInboxDataSource") - .addQueryStringOptional("token", token) + result <- rpc(s"$dataStoreUri/data/datasets/${dataSourceId.team}/${dataSourceId.name}/readInboxDataSource").withTokenFromContext .getWithJsonResponse[InboxDataSource] scale <- result.voxelSizeOpt ?~> "could not determine voxel size of dataset" } yield scale diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 5d07e4363ad..f500843ebbc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -1,9 +1,14 @@ package com.scalableminds.webknossos.tracingstore import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType} import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, DataSourceLike} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{ @@ -12,21 +17,24 @@ import com.scalableminds.webknossos.datastore.services.{ UserAccessAnswer, UserAccessRequest } +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.typesafe.scalalogging.LazyLogging import play.api.inject.ApplicationLifecycle import play.api.libs.json.{JsObject, Json, OFormat} import play.api.libs.ws.WSResponse import scala.concurrent.ExecutionContext +import scala.concurrent.duration.DurationInt -case class TracingUpdatesReport(tracingId: String, - timestamps: List[Instant], - statistics: Option[JsObject], - significantChangesCount: Int, - viewChangesCount: Int, - userToken: Option[String]) -object TracingUpdatesReport { - implicit val jsonFormat: OFormat[TracingUpdatesReport] = Json.format[TracingUpdatesReport] +case class AnnotationUpdatesReport(annotationId: String, + // TODO stats per tracing id? coordinate with frontend + timestamps: List[Instant], + statistics: Option[JsObject], + significantChangesCount: Int, + viewChangesCount: Int, + userToken: Option[String]) +object AnnotationUpdatesReport { + implicit val jsonFormat: OFormat[AnnotationUpdatesReport] = Json.format[AnnotationUpdatesReport] } class TSRemoteWebknossosClient @Inject()( @@ -42,17 +50,20 @@ class TSRemoteWebknossosClient @Inject()( private val webknossosUri: String = config.Tracingstore.WebKnossos.uri private lazy val dataSourceIdByTracingIdCache: AlfuCache[String, DataSourceId] = AlfuCache() + private lazy val annotationIdByTracingIdCache: AlfuCache[String, String] = + AlfuCache(maxCapacity = 10000, timeToLive = 5 minutes) - def reportTracingUpdates(tracingUpdatesReport: TracingUpdatesReport): Fox[WSResponse] = + def reportAnnotationUpdates(tracingUpdatesReport: AnnotationUpdatesReport): Fox[WSResponse] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/handleTracingUpdateReport") .addQueryString("key" -> tracingStoreKey) .silent .post(Json.toJson(tracingUpdatesReport)) - def getDataSourceForTracing(tracingId: String): Fox[DataSourceLike] = + def getDataSourceForTracing(tracingId: String)(implicit tc: TokenContext): Fox[DataSourceLike] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataSource") .addQueryString("tracingId" -> tracingId) .addQueryString("key" -> tracingStoreKey) + .withTokenFromContext .getWithJsonResponse[DataSourceLike] def getDataStoreUriForDataSource(organizationId: String, datasetName: String): Fox[String] = @@ -72,10 +83,52 @@ class TSRemoteWebknossosClient @Inject()( .getWithJsonResponse[DataSourceId] ) - override def requestUserAccess(token: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] = + // TODO what about temporary/compound tracings? + def getAnnotationIdForTracing(tracingId: String)(implicit ec: ExecutionContext): Fox[String] = + annotationIdByTracingIdCache.getOrLoad( + tracingId, + tracingId => + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/annotationId") + .addQueryString("tracingId" -> tracingId) + .addQueryString("key" -> tracingStoreKey) + .getWithJsonResponse[String] + ) ?~> "annotation.idForTracing.failed" + + def updateAnnotationLayers(annotationId: String, annotationLayers: List[AnnotationLayer]): Fox[Unit] = + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/updateAnnotationLayers") + .addQueryString("annotationId" -> annotationId) + .addQueryString("key" -> tracingStoreKey) + .postJson(annotationLayers) + + def updateAnnotation(annotationId: String, annotationProto: AnnotationProto): Fox[Unit] = + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/updateAnnotation") + .addQueryString("annotationId" -> annotationId) + .addQueryString("key" -> tracingStoreKey) + .postProto(annotationProto) + + def createTracingFor(annotationId: String, + layerParameters: AnnotationLayerParameters, + previousVersion: Long): Fox[Either[SkeletonTracing, VolumeTracing]] = { + val req = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/createTracing") + .addQueryString("annotationId" -> annotationId) + .addQueryString("previousVersion" -> previousVersion.toString) // used for fetching old precedence layers + .addQueryString("key" -> tracingStoreKey) + layerParameters.typ match { + case AnnotationLayerType.Volume => + req + .postJsonWithProtoResponse[AnnotationLayerParameters, VolumeTracing](layerParameters)(VolumeTracing) + .map(Right(_)) + case AnnotationLayerType.Skeleton => + req + .postJsonWithProtoResponse[AnnotationLayerParameters, SkeletonTracing](layerParameters)(SkeletonTracing) + .map(Left(_)) + } + } + + override def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/validateUserAccess") .addQueryString("key" -> tracingStoreKey) - .addQueryStringOptional("token", token) + .withTokenFromContext .postJsonWithJsonResponse[UserAccessRequest, UserAccessAnswer](accessRequest) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala index cd6fb91fc9d..49ac749ec1d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala @@ -4,6 +4,7 @@ import org.apache.pekko.actor.ActorSystem import com.google.inject.AbstractModule import com.google.inject.name.Names import com.scalableminds.webknossos.datastore.services.AdHocMeshServiceHolder +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.TracingDataStore import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService @@ -25,5 +26,8 @@ class TracingStoreModule extends AbstractModule { bind(classOf[EditableMappingService]).asEagerSingleton() bind(classOf[TSSlackNotificationService]).asEagerSingleton() bind(classOf[AdHocMeshServiceHolder]).asEagerSingleton() + bind(classOf[AnnotationTransactionService]).asEagerSingleton() + bind(classOf[TSAnnotationService]).asEagerSingleton() } + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala new file mode 100644 index 00000000000..6fd848fc7b7 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala @@ -0,0 +1,41 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} +import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService + +import scala.concurrent.ExecutionContext + +trait AnnotationReversion { + + def volumeTracingService: VolumeTracingService + + def revertDistributedElements(currentAnnotationWithTracings: AnnotationWithTracings, + sourceAnnotationWithTracings: AnnotationWithTracings, + sourceVersion: Long, + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = + for { + _ <- Fox.serialCombined(sourceAnnotationWithTracings.getVolumes) { + // Only volume data for volume layers present in the *source annotation* needs to be reverted. + case (tracingId, sourceTracing) => + for { + tracingBeforeRevert <- currentAnnotationWithTracings.getVolume(tracingId).toFox + _ <- Fox.runIf(!sourceTracing.getHasEditableMapping)( + volumeTracingService + .revertVolumeData(tracingId, sourceVersion, sourceTracing, newVersion: Long, tracingBeforeRevert)) + _ <- Fox.runIf(sourceTracing.getHasEditableMapping)( + revertEditableMappingFields(currentAnnotationWithTracings, sourceVersion, tracingId)) + } yield () + } + } yield () + + private def revertEditableMappingFields(currentAnnotationWithTracings: AnnotationWithTracings, + sourceVersion: Long, + tracingId: String)(implicit ec: ExecutionContext): Fox[Unit] = + for { + updater <- currentAnnotationWithTracings.getEditableMappingUpdater(tracingId).toFox + _ <- updater.revertToVersion(sourceVersion) + _ <- updater.flushBuffersToFossil() + } yield () +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala new file mode 100644 index 00000000000..6bd9a1e7294 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -0,0 +1,291 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.time.Instant +import com.scalableminds.util.tools.{Fox, JsonHelper} +import com.scalableminds.util.tools.Fox.bool2Fox +import com.scalableminds.webknossos.tracingstore.{ + TSRemoteWebknossosClient, + TracingStoreRedisStore, + AnnotationUpdatesReport +} +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore, TracingId} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + BucketMutatingVolumeUpdateAction, + UpdateBucketVolumeAction, + VolumeTracingService +} +import com.typesafe.scalalogging.LazyLogging +import play.api.http.Status.CONFLICT +import play.api.libs.json.Json + +import javax.inject.Inject +import scala.concurrent.ExecutionContext +import scala.concurrent.duration._ + +class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRedisStore, + uncommittedUpdatesStore: TracingStoreRedisStore, + volumeTracingService: VolumeTracingService, + tracingDataStore: TracingDataStore, + remoteWebknossosClient: TSRemoteWebknossosClient, + annotationService: TSAnnotationService) + extends KeyValueStoreImplicits + with LazyLogging { + + private val transactionGroupExpiry: FiniteDuration = 24 hours + private val handledGroupCacheExpiry: FiniteDuration = 24 hours + + private def transactionGroupKey(annotationId: String, + transactionId: String, + transactionGroupIndex: Int, + version: Long) = + s"transactionGroup___${annotationId}___${transactionId}___${transactionGroupIndex}___$version" + + private def handledGroupKey(annotationId: String, transactionId: String, version: Long, transactionGroupIndex: Int) = + s"handledGroup___${annotationId}___${transactionId}___${version}___$transactionGroupIndex" + + private def patternFor(annotationId: String, transactionId: String) = + s"transactionGroup___${annotationId}___${transactionId}___*" + + private def saveUncommitted(annotationId: String, + transactionId: String, + transactionGroupIndex: Int, + version: Long, + updateGroup: UpdateActionGroup, + expiry: FiniteDuration)(implicit ec: ExecutionContext): Fox[Unit] = + for { + _ <- Fox.runIf(transactionGroupIndex > 0)( + Fox.assertTrue( + uncommittedUpdatesStore.contains(transactionGroupKey( + annotationId, + transactionId, + transactionGroupIndex - 1, + version))) ?~> s"Incorrect transaction index. Got: $transactionGroupIndex but ${transactionGroupIndex - 1} does not exist" ~> CONFLICT) + _ <- uncommittedUpdatesStore.insert( + transactionGroupKey(annotationId, transactionId, transactionGroupIndex, version), + Json.toJson(updateGroup).toString(), + Some(expiry)) + } yield () + + private def handleUpdateGroupForTransaction( + annotationId: String, + previousVersionFox: Fox[Long], + updateGroup: UpdateActionGroup)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = + for { + previousCommittedVersion: Long <- previousVersionFox + result <- if (previousCommittedVersion + 1 == updateGroup.version) { + if (updateGroup.transactionGroupCount == updateGroup.transactionGroupIndex + 1) { + // Received the last group of this transaction + commitWithPending(annotationId, updateGroup) + } else { + for { + _ <- saveUncommitted(annotationId, + updateGroup.transactionId, + updateGroup.transactionGroupIndex, + updateGroup.version, + updateGroup, + transactionGroupExpiry) + _ <- saveToHandledGroupIdStore(annotationId, + updateGroup.transactionId, + updateGroup.version, + updateGroup.transactionGroupIndex) + } yield previousCommittedVersion // no updates have been committed, do not yield version increase + } + } else { + failUnlessAlreadyHandled(updateGroup, annotationId, previousCommittedVersion) + } + } yield result + + // For an update group (that is the last of a transaction), fetch all previous uncommitted for the same transaction + // and commit them all. + private def commitWithPending(annotationId: String, updateGroup: UpdateActionGroup)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = + for { + previousActionGroupsToCommit <- getAllUncommittedFor(annotationId, updateGroup.transactionId) + _ <- bool2Fox( + previousActionGroupsToCommit + .exists(_.transactionGroupIndex == 0) || updateGroup.transactionGroupCount == 1) ?~> s"Trying to commit a transaction without a group that has transactionGroupIndex 0." + concatenatedGroup = concatenateUpdateGroupsOfTransaction(previousActionGroupsToCommit, updateGroup) + commitResult <- commitUpdates(annotationId, List(concatenatedGroup)) + _ <- removeAllUncommittedFor(annotationId, updateGroup.transactionId) + } yield commitResult + + private def removeAllUncommittedFor(tracingId: String, transactionId: String): Fox[Unit] = + uncommittedUpdatesStore.removeAllConditional(patternFor(tracingId, transactionId)) + + private def getAllUncommittedFor(annotationId: String, transactionId: String): Fox[List[UpdateActionGroup]] = + for { + raw: Seq[String] <- uncommittedUpdatesStore.findAllConditional(patternFor(annotationId, transactionId)) + parsed: Seq[UpdateActionGroup] = raw.flatMap(itemAsString => + JsonHelper.jsResultToOpt(Json.parse(itemAsString).validate[UpdateActionGroup])) + } yield parsed.toList.sortBy(_.transactionGroupIndex) + + private def saveToHandledGroupIdStore(annotationId: String, + transactionId: String, + version: Long, + transactionGroupIndex: Int): Fox[Unit] = { + val key = handledGroupKey(annotationId, transactionId, version, transactionGroupIndex) + handledGroupIdStore.insert(key, "()", Some(handledGroupCacheExpiry)) + } + + private def handledGroupIdStoreContains(annotationId: String, + transactionId: String, + version: Long, + transactionGroupIndex: Int): Fox[Boolean] = + handledGroupIdStore.contains(handledGroupKey(annotationId, transactionId, version, transactionGroupIndex)) + + private def concatenateUpdateGroupsOfTransaction(previousActionGroups: List[UpdateActionGroup], + lastActionGroup: UpdateActionGroup): UpdateActionGroup = + if (previousActionGroups.isEmpty) lastActionGroup + else { + val allActionGroups = previousActionGroups :+ lastActionGroup + UpdateActionGroup( + version = lastActionGroup.version, + timestamp = lastActionGroup.timestamp, + authorId = lastActionGroup.authorId, + actions = allActionGroups.flatMap(_.actions), + stats = lastActionGroup.stats, // the latest stats do count + info = lastActionGroup.info, // frontend sets this identically for all groups of transaction + transactionId = f"${lastActionGroup.transactionId}-concatenated", + transactionGroupCount = 1, + transactionGroupIndex = 0, + ) + } + + def handleSingleUpdateAction(annotationId: String, currentVersion: Long, updateAction: UpdateAction)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = { + val wrapped = List( + UpdateActionGroup( + currentVersion + 1, + System.currentTimeMillis(), + None, + List(updateAction), + None, + None, + "dummyTransactionId", + 1, + 0 + )) + handleUpdateGroups(annotationId, wrapped) + } + + def handleUpdateGroups(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = + if (updateGroups.forall(_.transactionGroupCount == 1)) { + commitUpdates(annotationId, updateGroups) + } else { + updateGroups.foldLeft(annotationService.currentMaterializableVersion(annotationId)) { + (currentCommittedVersionFox, updateGroup) => + handleUpdateGroupForTransaction(annotationId, currentCommittedVersionFox, updateGroup) + } + } + + // Perform version check and commit the passed updates + private def commitUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = + for { + _ <- reportUpdates(annotationId, updateGroups) + currentCommittedVersion: Fox[Long] = annotationService.currentMaterializableVersion(annotationId) + _ = logger.info(s"trying to commit ${updateGroups + .map(_.actions.length) + .sum} actions in ${updateGroups.length} groups (versions ${updateGroups.map(_.version).mkString(",")})") + newVersion <- updateGroups.foldLeft(currentCommittedVersion) { (previousVersion, updateGroup) => + previousVersion.flatMap { prevVersion: Long => + if (prevVersion + 1 == updateGroup.version) { + for { + _ <- handleUpdateGroup(annotationId, updateGroup) + _ <- saveToHandledGroupIdStore(annotationId, + updateGroup.transactionId, + updateGroup.version, + updateGroup.transactionGroupIndex) + } yield updateGroup.version + } else failUnlessAlreadyHandled(updateGroup, annotationId, prevVersion) + } + } + _ <- applyImmediatelyIfNeeded(annotationId, updateGroups.flatMap(_.actions), newVersion) + } yield newVersion + + private def applyImmediatelyIfNeeded(annotationId: String, updates: List[UpdateAction], newVersion: Long)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Unit] = + if (containsApplyImmediatelyUpdateActions(updates)) { + annotationService.get(annotationId, Some(newVersion)).map(_ => ()) + } else Fox.successful(()) + + private def containsApplyImmediatelyUpdateActions(updates: List[UpdateAction]) = updates.exists { + case _: ApplyImmediatelyUpdateAction => true + case _ => false + } + + private def handleUpdateGroup(annotationId: String, updateActionGroup: UpdateActionGroup)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Unit] = + for { + updateActionsJson <- Fox.successful(Json.toJson(preprocessActionsForStorage(updateActionGroup))) + _ <- tracingDataStore.annotationUpdates.put(annotationId, updateActionGroup.version, updateActionsJson) + bucketMutatingActions = findBucketMutatingActions(updateActionGroup) + actionsGrouped: Map[String, List[BucketMutatingVolumeUpdateAction]] = bucketMutatingActions.groupBy( + _.actionTracingId) + _ <- Fox.serialCombined(actionsGrouped.keys.toList) { volumeTracingId => + for { + tracing <- annotationService.findVolume(annotationId, volumeTracingId) + _ <- volumeTracingService.applyBucketMutatingActions(volumeTracingId, + tracing, + bucketMutatingActions, + updateActionGroup.version) + } yield () + } + } yield () + + private def findBucketMutatingActions(updateActionGroup: UpdateActionGroup): List[BucketMutatingVolumeUpdateAction] = + updateActionGroup.actions.flatMap { + case a: BucketMutatingVolumeUpdateAction => Some(a) + case _ => None + } + + private def preprocessActionsForStorage(updateActionGroup: UpdateActionGroup): List[UpdateAction] = { + val actionsWithInfo = updateActionGroup.actions.map( + _.addTimestamp(updateActionGroup.timestamp).addAuthorId(updateActionGroup.authorId)) match { + case Nil => List[UpdateAction]() + //to the first action in the group, attach the group's info + case first :: rest => first.addInfo(updateActionGroup.info) :: rest + } + actionsWithInfo.map { + case a: UpdateBucketVolumeAction => a.withoutBase64Data + case a: AddLayerAnnotationAction => a.copy(tracingId = Some(TracingId.generate)) + case a => a + } + } + + /* If this update group has already been “handled” (successfully saved as either committed or uncommitted), + * ignore it silently. This is in case the frontend sends a retry if it believes a save to be unsuccessful + * despite the backend receiving it just fine. + */ + private def failUnlessAlreadyHandled(updateGroup: UpdateActionGroup, tracingId: String, previousVersion: Long)( + implicit ec: ExecutionContext): Fox[Long] = { + val errorMessage = s"Incorrect version. Expected: ${previousVersion + 1}; Got: ${updateGroup.version}" + for { + _ <- Fox.assertTrue( + handledGroupIdStoreContains(tracingId, + updateGroup.transactionId, + updateGroup.version, + updateGroup.transactionGroupIndex)) ?~> errorMessage ~> CONFLICT + } yield updateGroup.version + } + + private def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])( + implicit tc: TokenContext): Fox[Unit] = + for { + _ <- remoteWebknossosClient.reportAnnotationUpdates( + AnnotationUpdatesReport( + annotationId, + timestamps = updateGroups.map(g => Instant(g.timestamp)), + statistics = updateGroups.flatMap(_.stats).lastOption, // TODO statistics per tracing/layer + significantChangesCount = updateGroups.map(_.significantChangesCount).sum, + viewChangesCount = updateGroups.map(_.viewChangesCount).sum, + tc.userTokenOpt + )) + } yield () + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala new file mode 100644 index 00000000000..5dc0eb5e273 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -0,0 +1,147 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType +import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis +import com.scalableminds.webknossos.tracingstore.tracings.volume.MagRestrictions +import play.api.libs.json.Json.WithDefaultValues +import play.api.libs.json.{Json, OFormat} + +case class AnnotationLayerParameters(typ: AnnotationLayerType, + fallbackLayerName: Option[String], + autoFallbackLayer: Boolean = false, + mappingName: Option[String] = None, + magRestrictions: Option[MagRestrictions], + name: Option[String], + additionalAxes: Option[Seq[AdditionalAxis]]) { + def getNameWithDefault: String = name.getOrElse(AnnotationLayer.defaultNameForType(typ)) +} +object AnnotationLayerParameters { + implicit val jsonFormat: OFormat[AnnotationLayerParameters] = + Json.using[WithDefaultValues].format[AnnotationLayerParameters] +} + +trait AnnotationUpdateAction extends UpdateAction + +case class AddLayerAnnotationAction(layerParameters: AnnotationLayerParameters, + tracingId: Option[String] = None, // filled in by backend eagerly on save + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class DeleteLayerAnnotationAction(tracingId: String, + layerName: String, // Just stored for nicer-looking history + `type`: AnnotationLayerType, // Just stored for nicer-looking history + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class UpdateLayerMetadataAnnotationAction(tracingId: String, + layerName: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class UpdateMetadataAnnotationAction(name: Option[String], + description: Option[String], + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class RevertToVersionAnnotationAction(sourceVersion: Long, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +// Used only in tasks by admin to undo the work done of the annotator +case class ResetToBaseAnnotationAction(actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class UpdateTdCameraAnnotationAction(actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction { + + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) + + override def isViewOnlyChange: Boolean = true +} + +object AddLayerAnnotationAction { + implicit val jsonFormat: OFormat[AddLayerAnnotationAction] = Json.format[AddLayerAnnotationAction] +} +object DeleteLayerAnnotationAction { + implicit val jsonFormat: OFormat[DeleteLayerAnnotationAction] = Json.format[DeleteLayerAnnotationAction] +} +object UpdateLayerMetadataAnnotationAction { + implicit val jsonFormat: OFormat[UpdateLayerMetadataAnnotationAction] = + Json.format[UpdateLayerMetadataAnnotationAction] +} +object UpdateMetadataAnnotationAction { + implicit val jsonFormat: OFormat[UpdateMetadataAnnotationAction] = + Json.format[UpdateMetadataAnnotationAction] +} +object RevertToVersionAnnotationAction { + implicit val jsonFormat: OFormat[RevertToVersionAnnotationAction] = + Json.format[RevertToVersionAnnotationAction] +} +object ResetToBaseAnnotationAction { + implicit val jsonFormat: OFormat[ResetToBaseAnnotationAction] = + Json.format[ResetToBaseAnnotationAction] +} +object UpdateTdCameraAnnotationAction { + implicit val jsonFormat: OFormat[UpdateTdCameraAnnotationAction] = Json.format[UpdateTdCameraAnnotationAction] +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala new file mode 100644 index 00000000000..15563fab4a0 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -0,0 +1,163 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} +import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} +import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType} +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + EditableMappingUpdateAction, + EditableMappingUpdater +} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.SkeletonUpdateAction +import com.scalableminds.webknossos.tracingstore.tracings.volume.ApplyableVolumeUpdateAction +import com.typesafe.scalalogging.LazyLogging +import net.liftweb.common.{Box, Failure, Full} + +import scala.concurrent.ExecutionContext + +case class AnnotationWithTracings( + annotation: AnnotationProto, + tracingsById: Map[String, Either[SkeletonTracing, VolumeTracing]], + editableMappingsByTracingId: Map[String, (EditableMappingInfo, EditableMappingUpdater)]) + extends LazyLogging { + + def getSkeleton(tracingId: String): Box[SkeletonTracing] = + for { + tracingEither <- tracingsById.get(tracingId) + skeletonTracing <- tracingEither match { + case Left(st: SkeletonTracing) => Full(st) + case _ => Failure(f"Tried to access tracing $tracingId as skeleton, but is volume") + } + } yield skeletonTracing + + def getVolumes: List[(String, VolumeTracing)] = + tracingsById.view.flatMap { + case (id, Right(vt: VolumeTracing)) => Some(id, vt) + case _ => None + }.toList + + def getSkeletons: List[(String, SkeletonTracing)] = + tracingsById.view.flatMap { + case (id, Left(st: SkeletonTracing)) => Some(id, st) + case _ => None + }.toList + + def getEditableMappingsInfo: List[(String, EditableMappingInfo)] = + editableMappingsByTracingId.view.flatMap { + case (id, (info: EditableMappingInfo, _)) => Some(id, info) + case _ => None + }.toList + + def getVolume(tracingId: String): Box[VolumeTracing] = + for { + tracingEither <- tracingsById.get(tracingId) + volumeTracing <- tracingEither match { + case Right(vt: VolumeTracing) => Full(vt) + case _ => Failure(f"Tried to access tracing $tracingId as volume, but is skeleton") + } + } yield volumeTracing + + def volumesThatHaveEditableMapping: List[(VolumeTracing, String)] = + tracingsById.view.flatMap { + case (id, Right(vt: VolumeTracing)) if vt.getHasEditableMapping => Some((vt, id)) + case _ => None + }.toList + + def getEditableMappingInfo(tracingId: String): Box[EditableMappingInfo] = + for { + (info, _) <- editableMappingsByTracingId.get(tracingId) + } yield info + + def getEditableMappingUpdater(tracingId: String): Option[EditableMappingUpdater] = + for { + (_, updater) <- editableMappingsByTracingId.get(tracingId) + } yield updater + + def version: Long = annotation.version + + def addLayer(a: AddLayerAnnotationAction, + tracingId: String, + tracing: Either[SkeletonTracing, VolumeTracing]): AnnotationWithTracings = + this.copy( + annotation = annotation.copy( + annotationLayers = annotation.annotationLayers :+ AnnotationLayerProto( + tracingId, + a.layerParameters.name.getOrElse(AnnotationLayer.defaultNameForType(a.layerParameters.typ)), + `type` = AnnotationLayerType.toProto(a.layerParameters.typ) + )), + tracingsById = tracingsById.updated(tracingId, tracing) + ) + + def deleteTracing(a: DeleteLayerAnnotationAction): AnnotationWithTracings = + this.copy( + annotation = annotation.copy(annotationLayers = annotation.annotationLayers.filter(_.tracingId != a.tracingId)), + tracingsById = tracingsById.removed(a.tracingId) + ) + + def updateLayerMetadata(a: UpdateLayerMetadataAnnotationAction): AnnotationWithTracings = + this.copy(annotation = annotation.copy(annotationLayers = annotation.annotationLayers.map(l => + if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l))) + + def updateMetadata(a: UpdateMetadataAnnotationAction): AnnotationWithTracings = + this.copy(annotation = annotation.copy(name = a.name, description = a.description)) + + def withVersion(newVersion: Long): AnnotationWithTracings = { + val tracingsUpdated = tracingsById.view.mapValues { + case Left(t: SkeletonTracing) => Left(t.withVersion(newVersion)) + case Right(t: VolumeTracing) => Right(t.withVersion(newVersion)) + } + this.copy(annotation = annotation.copy(version = newVersion), tracingsById = tracingsUpdated.toMap) + } + + def withNewUpdaters(materializedVersion: Long, targetVersion: Long): AnnotationWithTracings = { + val editableMappingsUpdated = editableMappingsByTracingId.view.mapValues { + case (mapping, updater) => (mapping, updater.newWithTargetVersion(materializedVersion, targetVersion)) + } + this.copy(editableMappingsByTracingId = editableMappingsUpdated.toMap) + } + + def addEditableMapping(volumeTracingId: String, + editableMappingInfo: EditableMappingInfo, + updater: EditableMappingUpdater): AnnotationWithTracings = + this.copy(editableMappingsByTracingId = + editableMappingsByTracingId.updated(volumeTracingId, (editableMappingInfo, updater))) + + def applySkeletonAction(a: SkeletonUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + skeletonTracing <- getSkeleton(a.actionTracingId) + updated = a.applyOn(skeletonTracing) + } yield this.copy(tracingsById = tracingsById.updated(a.actionTracingId, Left(updated))) + + def applyVolumeAction(a: ApplyableVolumeUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + volumeTracing <- getVolume(a.actionTracingId) + updated = a.applyOn(volumeTracing) + } yield + AnnotationWithTracings(annotation, + tracingsById.updated(a.actionTracingId, Right(updated)), + editableMappingsByTracingId) + + def applyEditableMappingAction(a: EditableMappingUpdateAction)( + implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + updater: EditableMappingUpdater <- getEditableMappingUpdater(a.actionTracingId).toFox + info <- getEditableMappingInfo(a.actionTracingId).toFox + updated <- updater.applyOneUpdate(info, a) + } yield + this.copy( + editableMappingsByTracingId = editableMappingsByTracingId.updated(a.actionTracingId, (updated, updater))) + + def flushBufferedUpdates()(implicit ec: ExecutionContext): Fox[Unit] = { + val updaters = editableMappingsByTracingId.values.map(_._2).toList + for { + _ <- Fox.serialCombined(updaters)(updater => updater.flushBuffersToFossil()) + } yield () + } + + def skeletonStats: String = + f"skeleton with ${getSkeletons.map(_._2).map(_.trees.map(_.nodes.length).sum).mkString} nodes" + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala new file mode 100644 index 00000000000..26fa3731a30 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -0,0 +1,745 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.cache.AlfuCache +import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox, option2Fox} +import com.scalableminds.webknossos.datastore.Annotation.{ + AnnotationLayerProto, + AnnotationLayerTypeProto, + AnnotationProto +} +import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + EditableMappingLayer, + EditableMappingService, + EditableMappingUpdateAction, + EditableMappingUpdater +} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ + CreateNodeSkeletonAction, + DeleteNodeSkeletonAction, + SkeletonUpdateAction, + UpdateTracingSkeletonAction +} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + ApplyableVolumeUpdateAction, + BucketMutatingVolumeUpdateAction, + MagRestrictions, + UpdateMappingNameVolumeAction, + VolumeTracingService, +} +import com.scalableminds.webknossos.tracingstore.tracings.{ + FallbackDataHelper, + KeyValueStoreImplicits, + TracingDataStore, + TracingId, + TracingSelector, + VersionedKeyValuePair +} +import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} +import com.typesafe.scalalogging.LazyLogging +import net.liftweb.common.{Empty, Full} +import play.api.libs.json.{JsObject, JsValue, Json} + +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknossosClient, + editableMappingService: EditableMappingService, + val volumeTracingService: VolumeTracingService, + skeletonTracingService: SkeletonTracingService, + val remoteDatastoreClient: TSRemoteDatastoreClient, + tracingDataStore: TracingDataStore) + extends KeyValueStoreImplicits + with FallbackDataHelper + with ProtoGeometryImplicits + with AnnotationReversion + with UpdateGroupHandling + with LazyLogging { + + private lazy val materializedAnnotationWithTracingCache = + // annotation id, version + AlfuCache[(String, Long), AnnotationWithTracings](maxCapacity = 1000) + + def get(annotationId: String, version: Option[Long])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationProto] = + for { + withTracings <- getWithTracings(annotationId, version) + } yield withTracings.annotation + + def getMultiple(annotationIds: Seq[String])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Seq[AnnotationProto]] = + Fox.serialCombined(annotationIds) { annotationId => + get(annotationId, None) + } + + private def getWithTracings(annotationId: String, version: Option[Long])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = + for { + newestMaterialized <- getNewestMaterialized(annotationId) + targetVersion <- determineTargetVersion(annotationId, newestMaterialized, version) ?~> "determineTargetVersion.failed" + // When requesting any other than the newest version, do not consider the changes final + reportChangesToWk = version.isEmpty || version.contains(targetVersion) + updatedAnnotation <- materializedAnnotationWithTracingCache.getOrLoad( + (annotationId, targetVersion), + _ => getWithTracingsVersioned(annotationId, targetVersion, reportChangesToWk = reportChangesToWk) + ) + } yield updatedAnnotation + + private def getWithTracingsVersioned(annotationId: String, version: Long, reportChangesToWk: Boolean)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = + for { + annotationWithVersion <- tracingDataStore.annotations.get(annotationId, Some(version))( + fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" + _ = logger.info( + s"cache miss for $annotationId v$version, applying updates from ${annotationWithVersion.version} to $version...") + annotation = annotationWithVersion.value + updated <- applyPendingUpdates(annotation, annotationId, version, reportChangesToWk) ?~> "applyUpdates.failed" + } yield updated + + def currentMaterializableVersion(annotationId: String): Fox[Long] = + tracingDataStore.annotationUpdates.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + + def currentMaterializedVersion(annotationId: String): Fox[Long] = + tracingDataStore.annotations.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + + private def getNewestMaterialized(annotationId: String): Fox[AnnotationProto] = + for { + keyValuePair <- tracingDataStore.annotations.get[AnnotationProto](annotationId, mayBeEmpty = Some(true))( + fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" + } yield keyValuePair.value + + private def findPendingUpdates(annotationId: String, existingVersion: Long, desiredVersion: Long)( + implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = + if (desiredVersion == existingVersion) Fox.successful(List()) + else { + tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + Some(desiredVersion), + Some(existingVersion + 1))(fromJsonBytes[List[UpdateAction]]) + } + + // TODO option to dry apply? + private def applyUpdate( + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + updateAction: UpdateAction, + targetVersion: Long // Note: this is not the target version of this one update, but of all pending + )(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = + for { + updated <- updateAction match { + case a: AddLayerAnnotationAction => + addLayer(annotationId, annotationWithTracings, a, targetVersion) + case a: DeleteLayerAnnotationAction => + Fox.successful(annotationWithTracings.deleteTracing(a)) + case a: UpdateLayerMetadataAnnotationAction => + Fox.successful(annotationWithTracings.updateLayerMetadata(a)) + case a: UpdateMetadataAnnotationAction => + Fox.successful(annotationWithTracings.updateMetadata(a)) + case a: SkeletonUpdateAction => + annotationWithTracings.applySkeletonAction(a) ?~> "applySkeletonAction.failed" + case a: UpdateMappingNameVolumeAction if a.isEditable.contains(true) => + for { + withNewEditableMapping <- addEditableMapping(annotationId, annotationWithTracings, a, targetVersion) + withApplyedVolumeAction <- withNewEditableMapping.applyVolumeAction(a) + } yield withApplyedVolumeAction + case a: ApplyableVolumeUpdateAction => + annotationWithTracings.applyVolumeAction(a) + case a: EditableMappingUpdateAction => + annotationWithTracings.applyEditableMappingAction(a) + case a: RevertToVersionAnnotationAction => + revertToVersion(annotationId, annotationWithTracings, a, targetVersion) + case _: ResetToBaseAnnotationAction => + resetToBase(annotationId, annotationWithTracings, targetVersion) + case _: BucketMutatingVolumeUpdateAction => + Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. + case _ => Fox.failure(s"Received unsupported AnnotationUpdateAction action ${Json.toJson(updateAction)}") + } + } yield updated + + private def addLayer(annotationId: String, + annotationWithTracings: AnnotationWithTracings, + action: AddLayerAnnotationAction, + targetVersion: Long)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + tracingId <- action.tracingId.toFox ?~> "add layer action has no tracingId" + _ <- bool2Fox( + !annotationWithTracings.annotation.annotationLayers + .exists(_.name == action.layerParameters.getNameWithDefault)) ?~> "addLayer.nameInUse" + _ <- bool2Fox( + !annotationWithTracings.annotation.annotationLayers.exists( + _.`type` == AnnotationLayerTypeProto.Skeleton && action.layerParameters.typ == AnnotationLayerType.Skeleton)) ?~> "addLayer.onlyOneSkeletonAllowed" + tracing <- remoteWebknossosClient.createTracingFor(annotationId, + action.layerParameters, + previousVersion = targetVersion - 1) + updated = annotationWithTracings.addLayer(action, tracingId, tracing) + } yield updated + + private def revertToVersion( + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + revertAction: RevertToVersionAnnotationAction, + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = + // Note: works only if revert actions are in separate update groups + for { + _ <- bool2Fox(revertAction.sourceVersion >= annotationWithTracings.annotation.earliestAccessibleVersion) ?~> f"Trying to revert to ${revertAction.sourceVersion}, but earliest accessible is ${annotationWithTracings.annotation.earliestAccessibleVersion}" + sourceAnnotation: AnnotationWithTracings <- getWithTracings(annotationId, Some(revertAction.sourceVersion)) + _ = logger.info( + s"reverting to suorceVersion ${revertAction.sourceVersion}. got sourceAnnotation with version ${sourceAnnotation.version} with ${sourceAnnotation.skeletonStats}") + _ <- revertDistributedElements(annotationWithTracings, sourceAnnotation, revertAction.sourceVersion, newVersion) + } yield sourceAnnotation + + private def resetToBase(annotationId: String, annotationWithTracings: AnnotationWithTracings, newVersion: Long)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = { + // Note: works only if reset actions are in separate update groups + val sourceVersion = 0L // Tasks are always created with as v0 currently + logger.info(s"Resetting annotation $annotationId to base (v$sourceVersion)") + for { + sourceAnnotation: AnnotationWithTracings <- getWithTracings(annotationId, Some(sourceVersion)) + _ <- revertDistributedElements(annotationWithTracings, sourceAnnotation, sourceVersion, newVersion) + } yield sourceAnnotation + } + + def updateActionLog(annotationId: String, newestVersion: Long, oldestVersion: Long)( + implicit ec: ExecutionContext): Fox[JsValue] = { + def versionedTupleToJson(tuple: (Long, List[UpdateAction])): JsObject = + Json.obj( + "version" -> tuple._1, + "value" -> Json.toJson(tuple._2) + ) + + val batchRanges = batchRangeInclusive(oldestVersion, newestVersion, batchSize = 100) + for { + updateActionBatches <- Fox.serialCombined(batchRanges.toList) { batchRange => + val batchFrom = batchRange._1 + val batchTo = batchRange._2 + tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + Some(batchTo), + Some(batchFrom))(fromJsonBytes[List[UpdateAction]]) + } + } yield Json.toJson(updateActionBatches.flatten.map(versionedTupleToJson)) + } + + def findEditableMappingInfo(annotationId: String, tracingId: String, version: Option[Long] = None)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[EditableMappingInfo] = + for { + annotation <- getWithTracings(annotationId, version) ?~> "getWithTracings.failed" + tracing <- annotation.getEditableMappingInfo(tracingId) ?~> "getEditableMapping.failed" + } yield tracing + + // TODO move the functions that construct the AnnotationWithTracigns elsewhere to keep this file smaller? + private def addEditableMapping( + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + action: UpdateMappingNameVolumeAction, + targetVersion: Long)(implicit tc: TokenContext, ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + editableMappingInfo <- getEditableMappingInfoFromStore(action.actionTracingId, annotationWithTracings.version) + volumeTracing <- annotationWithTracings.getVolume(action.actionTracingId).toFox + updater <- editableMappingUpdaterFor(annotationId, + action.actionTracingId, + volumeTracing, + editableMappingInfo.value, + annotationWithTracings.version, + targetVersion) + } yield annotationWithTracings.addEditableMapping(action.actionTracingId, editableMappingInfo.value, updater) + + private def applyPendingUpdates( + annotation: AnnotationProto, + annotationId: String, + targetVersion: Long, + reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = + for { + updateGroupsAsSaved <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" + updatesGroupsRegrouped = regroupByIsolationSensitiveActions(updateGroupsAsSaved) + annotationWithTracings <- findTracingsForAnnotation(annotation) ?~> "findTracingsForUpdates.failed" + annotationWithTracingsAndMappings <- findEditableMappingsForAnnotation( + annotationId, + annotationWithTracings, + annotation.version, + targetVersion) // Note: this targetVersion is overwritten for each update group, see annotation.withNewUpdaters + updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, + annotationId, + updatesGroupsRegrouped, + reportChangesToWk) ?~> "applyUpdates.inner.failed" + } yield updated + + private def findEditableMappingsForAnnotation( + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + currentMaterializedVersion: Long, + targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext) = { + val volumeWithEditableMapping = annotationWithTracings.volumesThatHaveEditableMapping + logger.info(s"fetching editable mappings ${volumeWithEditableMapping.map(_._2).mkString(",")}") + for { + idInfoUpdaterTuples <- Fox.serialCombined(volumeWithEditableMapping) { + case (volumeTracing, volumeTracingId) => + for { + editableMappingInfo <- getEditableMappingInfoFromStore(volumeTracingId, annotationWithTracings.version) + updater <- editableMappingUpdaterFor(annotationId, + volumeTracingId, + volumeTracing, + editableMappingInfo.value, + currentMaterializedVersion, + targetVersion) + } yield (editableMappingInfo.key, (editableMappingInfo.value, updater)) + } + } yield annotationWithTracings.copy(editableMappingsByTracingId = idInfoUpdaterTuples.toMap) + } + + private def getEditableMappingInfoFromStore(volumeTracingId: String, + version: Long): Fox[VersionedKeyValuePair[EditableMappingInfo]] = + tracingDataStore.editableMappingsInfo.get(volumeTracingId, version = Some(version))( + fromProtoBytes[EditableMappingInfo]) + + private def editableMappingUpdaterFor( + annotationId: String, + tracingId: String, + volumeTracing: VolumeTracing, + editableMappingInfo: EditableMappingInfo, + currentMaterializedVersion: Long, + targetVersion: Long)(implicit tc: TokenContext, ec: ExecutionContext): Fox[EditableMappingUpdater] = + for { + remoteFallbackLayer <- remoteFallbackLayerFromVolumeTracing(volumeTracing, tracingId) + } yield + new EditableMappingUpdater( + annotationId, + tracingId, + editableMappingInfo.baseMappingName, + currentMaterializedVersion, + targetVersion, + remoteFallbackLayer, + tc, + remoteDatastoreClient, + editableMappingService, + this, + tracingDataStore, + relyOnAgglomerateIds = false // TODO should we? + ) + + private def findTracingsForAnnotation(annotation: AnnotationProto)( + implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { + val skeletonTracingIds = + annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId) + + val volumeTracingIds = + annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume).map(_.tracingId) + + logger.info(s"fetching volumes $volumeTracingIds and skeletons $skeletonTracingIds") + for { + skeletonTracings <- Fox.serialCombined(skeletonTracingIds.toList)( + id => + tracingDataStore.skeletons.get[SkeletonTracing](id, Some(annotation.version), mayBeEmpty = Some(true))( + fromProtoBytes[SkeletonTracing])) + volumeTracings <- Fox.serialCombined(volumeTracingIds.toList)( + id => + tracingDataStore.volumes + .get[VolumeTracing](id, Some(annotation.version), mayBeEmpty = Some(true))(fromProtoBytes[VolumeTracing])) + _ = logger.info(s"fetched ${skeletonTracings.length} skeletons and ${volumeTracings.length} volumes") + skeletonTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = skeletonTracingIds + .zip(skeletonTracings.map(versioned => Left[SkeletonTracing, VolumeTracing](versioned.value))) + .toMap + volumeTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = volumeTracingIds + .zip(volumeTracings.map(versioned => Right[SkeletonTracing, VolumeTracing](versioned.value))) + .toMap + } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap, Map.empty) + } + + private def applyUpdatesGrouped( + annotation: AnnotationWithTracings, + annotationId: String, + updateGroups: List[(Long, List[UpdateAction])], + reportChangesToWk: Boolean + )(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { + def updateGroupedIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], + remainingUpdateGroups: List[(Long, List[UpdateAction])]): Fox[AnnotationWithTracings] = + annotationWithTracingsFox.futureBox.flatMap { + case Empty => Fox.empty + case Full(annotationWithTracings) => + remainingUpdateGroups match { + case List() => Fox.successful(annotationWithTracings) + case updateGroup :: tail => + updateGroupedIter( + applyUpdates(annotationWithTracings, annotationId, updateGroup._2, updateGroup._1, reportChangesToWk), + tail) + } + case _ => annotationWithTracingsFox + } + + updateGroupedIter(Some(annotation), updateGroups) + } + + private def applyUpdates( + annotation: AnnotationWithTracings, + annotationId: String, + updates: List[UpdateAction], + targetVersion: Long, + reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { + + logger.info(s"applying ${updates.length} to go from v${annotation.version} to v$targetVersion") + + // TODO can we make this tail recursive? + def updateIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], + remainingUpdates: List[UpdateAction]): Fox[AnnotationWithTracings] = + annotationWithTracingsFox.futureBox.flatMap { + case Empty => Fox.empty + case Full(annotationWithTracings) => + remainingUpdates match { + case List() => Fox.successful(annotationWithTracings) + case update :: tail => + updateIter(applyUpdate(annotationId, annotationWithTracings, update, targetVersion), tail) + } + case _ => annotationWithTracingsFox + } + + if (updates.isEmpty) Full(annotation) + else { + for { + updated <- updateIter(Some(annotation.withNewUpdaters(annotation.version, targetVersion)), updates) + updatedWithNewVerson = updated.withVersion(targetVersion) + _ = logger.info(s"flushing v$targetVersion, with ${updated.skeletonStats}") + _ <- updatedWithNewVerson.flushBufferedUpdates() + _ <- flushUpdatedTracings(updatedWithNewVerson) + _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) + _ <- Fox.runIf(reportChangesToWk)(remoteWebknossosClient.updateAnnotation( + annotationId, + updatedWithNewVerson.annotation)) // TODO perf: skip if annotation is identical + } yield updatedWithNewVerson + } + } + + private def flushUpdatedTracings(annotationWithTracings: AnnotationWithTracings)(implicit ec: ExecutionContext) = + // TODO skip some flushes to save disk space (for non-updated layers) + for { + _ <- Fox.serialCombined(annotationWithTracings.getVolumes) { + case (volumeTracingId, volumeTracing) => + tracingDataStore.volumes.put(volumeTracingId, volumeTracing.version, volumeTracing) + } + _ <- Fox.serialCombined(annotationWithTracings.getSkeletons) { + case (skeletonTracingId, skeletonTracing: SkeletonTracing) => + tracingDataStore.skeletons.put(skeletonTracingId, skeletonTracing.version, skeletonTracing) + } + _ <- Fox.serialCombined(annotationWithTracings.getEditableMappingsInfo) { + case (volumeTracingId, editableMappingInfo) => + tracingDataStore.editableMappingsInfo.put(volumeTracingId, + annotationWithTracings.version, + editableMappingInfo) + } + } yield () + + private def flushAnnotationInfo(annotationId: String, annotationWithTracings: AnnotationWithTracings) = + tracingDataStore.annotations.put(annotationId, annotationWithTracings.version, annotationWithTracings.annotation) + + private def determineTargetVersion(annotationId: String, + newestMaterializedAnnotation: AnnotationProto, + requestedVersionOpt: Option[Long]): Fox[Long] = + /* + * Determines the newest saved version from the updates column. + * if there are no updates at all, assume annotation is brand new (possibly created from NML, + * hence the emptyFallbck newestMaterializedAnnotation.version) + */ + for { + newestUpdateVersion <- tracingDataStore.annotationUpdates.getVersion(annotationId, + mayBeEmpty = Some(true), + emptyFallback = + Some(newestMaterializedAnnotation.version)) + targetVersion = requestedVersionOpt match { + case None => newestUpdateVersion + case Some(requestedVersion) => + math.max(newestMaterializedAnnotation.earliestAccessibleVersion, + math.min(requestedVersion, newestUpdateVersion)) + } + } yield targetVersion + + def updateActionStatistics(tracingId: String): Fox[JsObject] = + for { + updateActionGroups <- tracingDataStore.annotationUpdates.getMultipleVersions(tracingId)( + fromJsonBytes[List[UpdateAction]]) + updateActions = updateActionGroups.flatten + } yield { + Json.obj( + "updateTracingActionCount" -> updateActions.count { + case _: UpdateTracingSkeletonAction => true + case _ => false + }, + "createNodeActionCount" -> updateActions.count { + case _: CreateNodeSkeletonAction => true + case _ => false + }, + "deleteNodeActionCount" -> updateActions.count { + case _: DeleteNodeSkeletonAction => true + case _ => false + } + ) + } + + def editableMappingLayer(annotationId: String, tracingId: String, tracing: VolumeTracing)( + implicit tc: TokenContext): EditableMappingLayer = + EditableMappingLayer( + tracingId, + tracing.boundingBox, + resolutions = tracing.mags.map(vec3IntFromProto).toList, + largestSegmentId = Some(0L), + elementClass = tracing.elementClass, + tc, + tracing = tracing, + annotationId = annotationId, + tracingId = tracingId, + annotationService = this, + editableMappingService = editableMappingService + ) + + def baseMappingName(annotationId: String, tracingId: String, tracing: VolumeTracing)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Option[String]] = + if (tracing.getHasEditableMapping) + for { + editableMappingInfo <- findEditableMappingInfo(annotationId, tracingId) + } yield Some(editableMappingInfo.baseMappingName) + else Fox.successful(tracing.mappingName) + + private def batchRangeInclusive(from: Long, to: Long, batchSize: Long): Seq[(Long, Long)] = + (0L to ((to - from) / batchSize)).map { batchIndex => + val batchFrom = batchIndex * batchSize + from + val batchTo = Math.min(to, (batchIndex + 1) * batchSize + from - 1) + (batchFrom, batchTo) + } + + def findVolume(annotationId: String, + tracingId: String, + version: Option[Long] = None, + useCache: Boolean = true, // TODO + applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[VolumeTracing] = + for { + annotation <- getWithTracings(annotationId, version) // TODO is applyUpdates still needed? + tracing <- annotation.getVolume(tracingId) + } yield tracing + + def findSkeleton( + annotationId: String, + tracingId: String, + version: Option[Long] = None, + useCache: Boolean = true, // TODO + applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[SkeletonTracing] = + if (tracingId == TracingId.dummy) + Fox.successful(skeletonTracingService.dummyTracing) + else { + for { + annotation <- getWithTracings(annotationId, version) // TODO is applyUpdates still needed? + tracing <- annotation.getSkeleton(tracingId) + } yield tracing + } + + def findMultipleVolumes(selectors: Seq[Option[TracingSelector]], + useCache: Boolean = true, + applyUpdates: Boolean = false)(implicit tc: TokenContext, + ec: ExecutionContext): Fox[List[Option[VolumeTracing]]] = + Fox.combined { + selectors.map { + case Some(selector) => + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) + tracing <- findVolume(annotationId, selector.tracingId, selector.version, useCache, applyUpdates) + .map(Some(_)) + } yield tracing + case None => Fox.successful(None) + } + } + + // TODO build variant without TracingSelector and Option? + def findMultipleSkeletons(selectors: Seq[Option[TracingSelector]], + useCache: Boolean = true, + applyUpdates: Boolean = false)(implicit tc: TokenContext, + ec: ExecutionContext): Fox[List[Option[SkeletonTracing]]] = + Fox.combined { + selectors.map { + case Some(selector) => + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) // TODO perf skip that if we already have it? + tracing <- findSkeleton(annotationId, selector.tracingId, selector.version, useCache, applyUpdates) + .map(Some(_)) + } yield tracing + case None => Fox.successful(None) + } + } + + def duplicate( + annotationId: String, + newAnnotationId: String, + version: Option[Long], + isFromTask: Boolean, + datasetBoundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = + for { + v0Annotation <- get(annotationId, Some(0L)) + + // Duplicate updates + tracingIdMap <- duplicateUpdates(annotationId, newAnnotationId, v0Annotation.annotationLayers.map(_.tracingId)) + + // Duplicate v0 + v0NewLayers <- Fox.serialCombined(v0Annotation.annotationLayers)(layer => + duplicateLayer(annotationId, layer, tracingIdMap, v0Annotation.version, isFromTask, datasetBoundingBox)) + v0DuplicatedAnnotation = v0Annotation.copy(annotationLayers = v0NewLayers, + earliestAccessibleVersion = v0Annotation.version) + + _ <- tracingDataStore.annotations.put(newAnnotationId, v0Annotation.version, v0DuplicatedAnnotation) + + // Duplicate current + currentAnnotation <- get(annotationId, version) + newLayers <- Fox.serialCombined(currentAnnotation.annotationLayers)(layer => + duplicateLayer(annotationId, layer, tracingIdMap, currentAnnotation.version, isFromTask, datasetBoundingBox)) + duplicatedAnnotation = currentAnnotation.copy(annotationLayers = newLayers, + earliestAccessibleVersion = currentAnnotation.version) + _ <- tracingDataStore.annotations.put(newAnnotationId, currentAnnotation.version, duplicatedAnnotation) + + } yield duplicatedAnnotation + + private def duplicateUpdates(annotationId: String, newAnnotationId: String, v0TracingIds: Seq[String])( + implicit ec: ExecutionContext): Fox[Map[String, String]] = { + val tracingIdMapMutable = scala.collection.mutable.Map[String, String]() + v0TracingIds.foreach { v0TracingId => + tracingIdMapMutable.put(v0TracingId, TracingId.generate) + } + // TODO memory: batch + + for { + updateLists: Seq[(Long, List[UpdateAction])] <- tracingDataStore.annotationUpdates + .getMultipleVersionsAsVersionValueTuple(annotationId)(fromJsonBytes[List[UpdateAction]]) + _ <- Fox.serialCombined(updateLists) { + case (version, updateList) => + for { + updateListAdapted <- Fox.serialCombined(updateList) { + case a: AddLayerAnnotationAction => + for { + actionTracingId <- a.tracingId ?~> "duplicating addLayer without tracingId" + _ = if (!tracingIdMapMutable.contains(actionTracingId)) { + a.tracingId.foreach(actionTracingId => tracingIdMapMutable.put(actionTracingId, TracingId.generate)) + } + mappedTracingId <- tracingIdMapMutable.get(actionTracingId) ?~> "duplicating action for unknown layer" + } yield a.copy(tracingId = Some(mappedTracingId)) + case a: LayerUpdateAction => + for { + mappedTracingId <- tracingIdMapMutable.get(a.actionTracingId) ?~> "duplicating action for unknown layer" + } yield a.withActionTracingId(mappedTracingId) + } + _ <- tracingDataStore.annotationUpdates.put(newAnnotationId, version, Json.toJson(updateListAdapted)) + } yield () + } + } yield tracingIdMapMutable.toMap + } + + private def duplicateLayer(annotationId: String, + layer: AnnotationLayerProto, + tracingIdMap: Map[String, String], + version: Long, + isFromTask: Boolean, + datasetBoundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationLayerProto] = + for { + newTracingId <- tracingIdMap.get(layer.tracingId) ?~> "duplicate unknown layer" + _ <- layer.`type` match { + case AnnotationLayerTypeProto.Volume => + duplicateVolumeTracing(annotationId, + layer.tracingId, + version, + newTracingId, + version, + isFromTask, + None, + datasetBoundingBox, + MagRestrictions.empty, + None, + None) + case AnnotationLayerTypeProto.Skeleton => + duplicateSkeletonTracing(annotationId, + layer.tracingId, + version, + newTracingId, + version, + isFromTask, + None, + None, + None) + case AnnotationLayerTypeProto.Unrecognized(num) => Fox.failure(f"unrecognized annotation layer type: $num") + } + } yield layer.copy(tracingId = newTracingId) + + def duplicateVolumeTracing( + sourceAnnotationId: String, + sourceTracingId: String, + sourceVersion: Long, + newTracingId: String, + newVersion: Long, + isFromTask: Boolean, + boundingBox: Option[BoundingBox], + datasetBoundingBox: Option[BoundingBox], + magRestrictions: MagRestrictions, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = + for { + sourceTracing <- findVolume(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) + newTracing <- volumeTracingService.adaptVolumeForDuplicate(sourceTracingId, + newTracingId, + sourceTracing, + isFromTask, + boundingBox, + datasetBoundingBox, + magRestrictions, + editPosition, + editRotation, + newVersion) + _ <- tracingDataStore.volumes.put(newTracingId, newVersion, newTracing) + _ <- Fox.runIf(!newTracing.getHasEditableMapping)( + volumeTracingService.duplicateVolumeData(sourceTracingId, sourceTracing, newTracingId, newTracing)) + _ <- Fox.runIf(newTracing.getHasEditableMapping)( + duplicateEditableMapping(sourceAnnotationId, sourceTracingId, newTracingId, sourceVersion, newVersion)) + } yield newTracingId + + private def duplicateEditableMapping(sourceAnnotationId: String, + sourceTracingId: String, + newTracingId: String, + sourceVersion: Long, + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = + for { + editableMappingInfo <- findEditableMappingInfo(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) + _ <- tracingDataStore.editableMappingsInfo.put(newTracingId, newVersion, toProtoBytes(editableMappingInfo)) + _ <- editableMappingService.duplicateSegmentToAgglomerate(sourceTracingId, + newTracingId, + sourceVersion, + newVersion) + _ <- editableMappingService.duplicateAgglomerateToGraph(sourceTracingId, newTracingId, sourceVersion, newVersion) + } yield () + + def duplicateSkeletonTracing( + sourceAnnotationId: String, + sourceTracingId: String, + sourceVersion: Long, + newTracingId: String, + newVersion: Long, + isFromTask: Boolean, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = + for { + skeleton <- findSkeleton(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) + adaptedSkeleton = skeletonTracingService.adaptSkeletonForDuplicate(skeleton, + isFromTask, + editPosition, + editRotation, + boundingBox, + newVersion) + _ <- tracingDataStore.skeletons.put(newTracingId, newVersion, adaptedSkeleton) + } yield newTracingId + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala new file mode 100644 index 00000000000..e10789e0210 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -0,0 +1,244 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + MergeAgglomerateUpdateAction, + SplitAgglomerateUpdateAction +} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ + CreateEdgeSkeletonAction, + CreateNodeSkeletonAction, + CreateTreeSkeletonAction, + DeleteEdgeSkeletonAction, + DeleteNodeSkeletonAction, + DeleteTreeSkeletonAction, + MergeTreeSkeletonAction, + MoveTreeComponentSkeletonAction, + UpdateNodeSkeletonAction, + UpdateTracingSkeletonAction, + UpdateTreeEdgesVisibilitySkeletonAction, + UpdateTreeGroupVisibilitySkeletonAction, + UpdateTreeGroupsSkeletonAction, + UpdateTreeSkeletonAction, + UpdateTreeVisibilitySkeletonAction, + UpdateUserBoundingBoxVisibilitySkeletonAction, + UpdateUserBoundingBoxesSkeletonAction +} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + CompactVolumeUpdateAction, + CreateSegmentVolumeAction, + DeleteSegmentDataVolumeAction, + DeleteSegmentVolumeAction, + ImportVolumeDataVolumeAction, + RemoveFallbackLayerVolumeAction, + UpdateBucketVolumeAction, + UpdateMappingNameVolumeAction, + UpdateSegmentGroupsVolumeAction, + UpdateSegmentVolumeAction, + UpdateTracingVolumeAction, + UpdateUserBoundingBoxVisibilityVolumeAction, + UpdateUserBoundingBoxesVolumeAction +} +import play.api.libs.json.{Format, JsError, JsObject, JsPath, JsResult, JsValue, Json, OFormat, Reads} + +trait UpdateAction { + def actionTimestamp: Option[Long] + + def addTimestamp(timestamp: Long): UpdateAction + + def addInfo(info: Option[String]): UpdateAction + + def addAuthorId(authorId: Option[String]): UpdateAction + + def isViewOnlyChange: Boolean = false +} + +trait ApplyImmediatelyUpdateAction extends UpdateAction + +trait LayerUpdateAction extends UpdateAction { + def actionTracingId: String + def withActionTracingId(newTracingId: String): LayerUpdateAction +} + +object UpdateAction { + + implicit object updateActionFormat extends Format[UpdateAction] { + override def reads(json: JsValue): JsResult[UpdateAction] = { + val jsonValue = (json \ "value").as[JsObject] + (json \ "name").as[String] match { + // Skeleton + case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) + case "deleteTree" => deserialize[DeleteTreeSkeletonAction](jsonValue) + case "updateTree" => deserialize[UpdateTreeSkeletonAction](jsonValue) + case "mergeTree" => deserialize[MergeTreeSkeletonAction](jsonValue) + case "moveTreeComponent" => deserialize[MoveTreeComponentSkeletonAction](jsonValue) + case "createNode" => deserialize[CreateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) + case "deleteNode" => deserialize[DeleteNodeSkeletonAction](jsonValue) + case "updateNode" => deserialize[UpdateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) + case "createEdge" => deserialize[CreateEdgeSkeletonAction](jsonValue) + case "deleteEdge" => deserialize[DeleteEdgeSkeletonAction](jsonValue) + case "updateTreeGroups" => deserialize[UpdateTreeGroupsSkeletonAction](jsonValue) + case "updateSkeletonTracing" => deserialize[UpdateTracingSkeletonAction](jsonValue) + case "updateTreeVisibility" => deserialize[UpdateTreeVisibilitySkeletonAction](jsonValue) + case "updateTreeGroupVisibility" => deserialize[UpdateTreeGroupVisibilitySkeletonAction](jsonValue) + case "updateTreeEdgesVisibility" => deserialize[UpdateTreeEdgesVisibilitySkeletonAction](jsonValue) + case "updateUserBoundingBoxesInSkeletonTracing" => deserialize[UpdateUserBoundingBoxesSkeletonAction](jsonValue) + case "updateUserBoundingBoxVisibilityInSkeletonTracing" => + deserialize[UpdateUserBoundingBoxVisibilitySkeletonAction](jsonValue) + + // Volume + case "updateBucket" => deserialize[UpdateBucketVolumeAction](jsonValue) + case "updateVolumeTracing" => deserialize[UpdateTracingVolumeAction](jsonValue) + case "updateUserBoundingBoxesInVolumeTracing" => + deserialize[UpdateUserBoundingBoxesVolumeAction](jsonValue) + case "updateUserBoundingBoxVisibilityInVolumeTracing" => + deserialize[UpdateUserBoundingBoxVisibilityVolumeAction](jsonValue) + case "removeFallbackLayer" => deserialize[RemoveFallbackLayerVolumeAction](jsonValue) + case "importVolumeTracing" => deserialize[ImportVolumeDataVolumeAction](jsonValue) + case "createSegment" => deserialize[CreateSegmentVolumeAction](jsonValue) + case "updateSegment" => deserialize[UpdateSegmentVolumeAction](jsonValue) + case "updateSegmentGroups" => deserialize[UpdateSegmentGroupsVolumeAction](jsonValue) + case "deleteSegment" => deserialize[DeleteSegmentVolumeAction](jsonValue) + case "deleteSegmentData" => deserialize[DeleteSegmentDataVolumeAction](jsonValue) + case "updateMappingName" => deserialize[UpdateMappingNameVolumeAction](jsonValue) + + // Editable Mapping + case "mergeAgglomerate" => deserialize[MergeAgglomerateUpdateAction](jsonValue) + case "splitAgglomerate" => deserialize[SplitAgglomerateUpdateAction](jsonValue) + + // Annotation + case "addLayerToAnnotation" => deserialize[AddLayerAnnotationAction](jsonValue) + case "deleteLayerFromAnnotation" => deserialize[DeleteLayerAnnotationAction](jsonValue) + case "updateLayerMetadata" => deserialize[UpdateLayerMetadataAnnotationAction](jsonValue) + case "updateMetadataOfAnnotation" => deserialize[UpdateMetadataAnnotationAction](jsonValue) + case "revertToVersion" => deserialize[RevertToVersionAnnotationAction](jsonValue) + case "resetToBase" => deserialize[ResetToBaseAnnotationAction](jsonValue) + case "updateTdCamera" => deserialize[UpdateTdCameraAnnotationAction](jsonValue) + + case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") + } + } + + private def deserialize[T](json: JsValue, shouldTransformPositions: Boolean = false)( + implicit tjs: Reads[T]): JsResult[T] = + if (shouldTransformPositions) + json.transform(positionTransform).get.validate[T] + else + json.validate[T] + + private val positionTransform = + (JsPath \ "position").json.update(JsPath.read[List[Float]].map(position => Json.toJson(position.map(_.toInt)))) + + override def writes(a: UpdateAction): JsValue = a match { + // Skeleton + case s: CreateTreeSkeletonAction => + Json.obj("name" -> "createTree", "value" -> Json.toJson(s)(CreateTreeSkeletonAction.jsonFormat)) + case s: DeleteTreeSkeletonAction => + Json.obj("name" -> "deleteTree", "value" -> Json.toJson(s)(DeleteTreeSkeletonAction.jsonFormat)) + case s: UpdateTreeSkeletonAction => + Json.obj("name" -> "updateTree", "value" -> Json.toJson(s)(UpdateTreeSkeletonAction.jsonFormat)) + case s: MergeTreeSkeletonAction => + Json.obj("name" -> "mergeTree", "value" -> Json.toJson(s)(MergeTreeSkeletonAction.jsonFormat)) + case s: MoveTreeComponentSkeletonAction => + Json.obj("name" -> "moveTreeComponent", "value" -> Json.toJson(s)(MoveTreeComponentSkeletonAction.jsonFormat)) + case s: CreateNodeSkeletonAction => + Json.obj("name" -> "createNode", "value" -> Json.toJson(s)(CreateNodeSkeletonAction.jsonFormat)) + case s: DeleteNodeSkeletonAction => + Json.obj("name" -> "deleteNode", "value" -> Json.toJson(s)(DeleteNodeSkeletonAction.jsonFormat)) + case s: UpdateNodeSkeletonAction => + Json.obj("name" -> "updateNode", "value" -> Json.toJson(s)(UpdateNodeSkeletonAction.jsonFormat)) + case s: CreateEdgeSkeletonAction => + Json.obj("name" -> "createEdge", "value" -> Json.toJson(s)(CreateEdgeSkeletonAction.jsonFormat)) + case s: DeleteEdgeSkeletonAction => + Json.obj("name" -> "deleteEdge", "value" -> Json.toJson(s)(DeleteEdgeSkeletonAction.jsonFormat)) + case s: UpdateTreeGroupsSkeletonAction => + Json.obj("name" -> "updateTreeGroups", "value" -> Json.toJson(s)(UpdateTreeGroupsSkeletonAction.jsonFormat)) + case s: UpdateTracingSkeletonAction => + Json.obj("name" -> "updateSkeletonTracing", "value" -> Json.toJson(s)(UpdateTracingSkeletonAction.jsonFormat)) + case s: UpdateTreeVisibilitySkeletonAction => + Json.obj("name" -> "updateTreeVisibility", + "value" -> Json.toJson(s)(UpdateTreeVisibilitySkeletonAction.jsonFormat)) + case s: UpdateTreeGroupVisibilitySkeletonAction => + Json.obj("name" -> "updateTreeGroupVisibility", + "value" -> Json.toJson(s)(UpdateTreeGroupVisibilitySkeletonAction.jsonFormat)) + case s: UpdateTreeEdgesVisibilitySkeletonAction => + Json.obj("name" -> "updateTreeEdgesVisibility", + "value" -> Json.toJson(s)(UpdateTreeEdgesVisibilitySkeletonAction.jsonFormat)) + case s: UpdateUserBoundingBoxesSkeletonAction => + Json.obj("name" -> "updateUserBoundingBoxesInSkeletonTracing", + "value" -> Json.toJson(s)(UpdateUserBoundingBoxesSkeletonAction.jsonFormat)) + case s: UpdateUserBoundingBoxVisibilitySkeletonAction => + Json.obj("name" -> "updateUserBoundingBoxVisibilityInSkeletonTracing", + "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibilitySkeletonAction.jsonFormat)) + + // Volume + case s: UpdateBucketVolumeAction => + Json.obj("name" -> "updateBucket", "value" -> Json.toJson(s)(UpdateBucketVolumeAction.jsonFormat)) + case s: UpdateTracingVolumeAction => + Json.obj("name" -> "updateVolumeTracing", "value" -> Json.toJson(s)(UpdateTracingVolumeAction.jsonFormat)) + case s: UpdateUserBoundingBoxesVolumeAction => + Json.obj("name" -> "updateUserBoundingBoxesInVolumeTracing", + "value" -> Json.toJson(s)(UpdateUserBoundingBoxesVolumeAction.jsonFormat)) + case s: UpdateUserBoundingBoxVisibilityVolumeAction => + Json.obj("name" -> "updateUserBoundingBoxVisibilityInVolumeTracing", + "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibilityVolumeAction.jsonFormat)) + case s: RemoveFallbackLayerVolumeAction => + Json.obj("name" -> "removeFallbackLayer", "value" -> Json.toJson(s)(RemoveFallbackLayerVolumeAction.jsonFormat)) + case s: ImportVolumeDataVolumeAction => + Json.obj("name" -> "importVolumeTracing", "value" -> Json.toJson(s)(ImportVolumeDataVolumeAction.jsonFormat)) + case s: CreateSegmentVolumeAction => + Json.obj("name" -> "createSegment", "value" -> Json.toJson(s)(CreateSegmentVolumeAction.jsonFormat)) + case s: UpdateSegmentVolumeAction => + Json.obj("name" -> "updateSegment", "value" -> Json.toJson(s)(UpdateSegmentVolumeAction.jsonFormat)) + case s: DeleteSegmentVolumeAction => + Json.obj("name" -> "deleteSegment", "value" -> Json.toJson(s)(DeleteSegmentVolumeAction.jsonFormat)) + case s: UpdateSegmentGroupsVolumeAction => + Json.obj("name" -> "updateSegmentGroups", "value" -> Json.toJson(s)(UpdateSegmentGroupsVolumeAction.jsonFormat)) + case s: CompactVolumeUpdateAction => Json.toJson(s)(CompactVolumeUpdateAction.compactVolumeUpdateActionFormat) + case s: UpdateMappingNameVolumeAction => + Json.obj("name" -> "updateMappingName", "value" -> Json.toJson(s)(UpdateMappingNameVolumeAction.jsonFormat)) + + // Editable Mapping + case s: SplitAgglomerateUpdateAction => + Json.obj("name" -> "splitAgglomerate", "value" -> Json.toJson(s)(SplitAgglomerateUpdateAction.jsonFormat)) + case s: MergeAgglomerateUpdateAction => + Json.obj("name" -> "mergeAgglomerate", "value" -> Json.toJson(s)(MergeAgglomerateUpdateAction.jsonFormat)) + + // Annotation + case s: AddLayerAnnotationAction => + Json.obj("name" -> "addLayerToAnnotation", "value" -> Json.toJson(s)(AddLayerAnnotationAction.jsonFormat)) + case s: DeleteLayerAnnotationAction => + Json.obj("name" -> "deleteLayerFromAnnotation", + "value" -> Json.toJson(s)(DeleteLayerAnnotationAction.jsonFormat)) + case s: UpdateLayerMetadataAnnotationAction => + Json.obj("name" -> "updateLayerMetadata", + "value" -> Json.toJson(s)(UpdateLayerMetadataAnnotationAction.jsonFormat)) + case s: UpdateMetadataAnnotationAction => + Json.obj("name" -> "updateMetadataOfAnnotation", + "value" -> Json.toJson(s)(UpdateMetadataAnnotationAction.jsonFormat)) + case s: RevertToVersionAnnotationAction => + Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionAnnotationAction.jsonFormat)) + case s: ResetToBaseAnnotationAction => + Json.obj("name" -> "resetToBase", "value" -> Json.toJson(s)(ResetToBaseAnnotationAction.jsonFormat)) + case s: UpdateTdCameraAnnotationAction => + Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCameraAnnotationAction.jsonFormat)) + } + } +} + +case class UpdateActionGroup(version: Long, + timestamp: Long, + authorId: Option[String], + actions: List[UpdateAction], + stats: Option[JsObject], + info: Option[String], + transactionId: String, + transactionGroupCount: Int, + transactionGroupIndex: Int) { + + def significantChangesCount: Int = actions.count(!_.isViewOnlyChange) + def viewChangesCount: Int = actions.count(_.isViewOnlyChange) +} + +object UpdateActionGroup { + implicit val jsonFormat: OFormat[UpdateActionGroup] = Json.format[UpdateActionGroup] +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala new file mode 100644 index 00000000000..de77458a9e3 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala @@ -0,0 +1,32 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import collections.SequenceUtils + +trait UpdateGroupHandling { + + def regroupByIsolationSensitiveActions( + updateActionGroupsWithVersions: List[(Long, List[UpdateAction])]): List[(Long, List[UpdateAction])] = { + val splitGroupLists: List[List[(Long, List[UpdateAction])]] = + SequenceUtils.splitAndIsolate(updateActionGroupsWithVersions.reverse)(actionGroup => + actionGroup._2.exists(updateAction => isIsolationSensitiveAction(updateAction))) + // TODO assert that the *groups* that contain revert actions contain nothing else + // TODO test this + + splitGroupLists.flatMap { groupsToConcatenate: List[(Long, List[UpdateAction])] => + concatenateUpdateActionGroups(groupsToConcatenate) + } + } + + private def concatenateUpdateActionGroups( + groups: List[(Long, List[UpdateAction])]): Option[(Long, List[UpdateAction])] = { + val updates = groups.flatMap(_._2) + val targetVersionOpt: Option[Long] = groups.map(_._1).lastOption + targetVersionOpt.map(targetVersion => (targetVersion, updates)) + } + + private def isIsolationSensitiveAction(a: UpdateAction): Boolean = a match { + case _: RevertToVersionAnnotationAction => true + case _: AddLayerAnnotationAction => true + case _ => false + } +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala new file mode 100644 index 00000000000..a1e6fc9a3b7 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -0,0 +1,211 @@ +package com.scalableminds.webknossos.tracingstore.controllers + +import com.google.inject.Inject +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph +import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.controllers.Controller +import com.scalableminds.webknossos.datastore.services.{EditableMappingSegmentListResult, UserAccessRequest} +import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + EditableMappingService, + MinCutParameters, + NeighborsParameters +} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{UpdateMappingNameVolumeAction, VolumeTracingService} +import net.liftweb.common.{Box, Empty, Failure, Full} +import play.api.libs.json.Json +import play.api.mvc.{Action, AnyContent, PlayBodyParsers} + +import scala.concurrent.ExecutionContext + +class EditableMappingController @Inject()(volumeTracingService: VolumeTracingService, + annotationService: TSAnnotationService, + remoteWebknossosClient: TSRemoteWebknossosClient, + accessTokenService: TracingStoreAccessTokenService, + editableMappingService: EditableMappingService, + annotationTransactionService: AnnotationTransactionService)( + implicit ec: ExecutionContext, + bodyParsers: PlayBodyParsers) + extends Controller { + + def makeMappingEditable(tracingId: String): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" + _ <- assertMappingIsNotLocked(tracing) + _ <- bool2Fox(volumeTracingService.volumeBucketsAreEmpty(tracingId)) ?~> "annotation.volumeBucketsNotEmpty" + editableMappingInfo <- editableMappingService.create(tracingId, baseMappingName = tracingMappingName) + volumeUpdate = UpdateMappingNameVolumeAction(Some(tracingId), + isEditable = Some(true), + isLocked = Some(true), + actionTracingId = tracingId, + actionTimestamp = Some(System.currentTimeMillis())) + _ <- annotationTransactionService + .handleSingleUpdateAction( // TODO replace this route by the update action only? + annotationId, + tracing.version, + volumeUpdate) + infoJson = editableMappingService.infoJson(tracingId = tracingId, editableMappingInfo = editableMappingInfo) + } yield Ok(infoJson) + } + } + } + + private def assertMappingIsNotLocked(volumeTracing: VolumeTracing): Fox[Unit] = + bool2Fox(!volumeTracing.mappingIsLocked.getOrElse(false)) ?~> "annotation.mappingIsLocked" + + /*// TODO integrate all of this into annotation update + + def updateEditableMapping( + annotationId: String, + tracingId: String): Action[List[UpdateActionGroup]] = + Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => + accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId)) { + for { + tracing <- annotationService.findVolume(annotationId, tracingId) + mappingName <- tracing.mappingName.toFox + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + currentVersion <- editableMappingService.getClosestMaterializableVersionOrZero(mappingName, None) + _ <- bool2Fox(request.body.length == 1) ?~> "Editable mapping update request must contain exactly one update group" + updateGroup <- request.body.headOption.toFox + _ <- bool2Fox(updateGroup.version == currentVersion + 1) ?~> "version mismatch" + report = TracingUpdatesReport( + annotationId, // TODO integrate all of this into annotation update + timestamps = List(Instant(updateGroup.timestamp)), + statistics = None, + significantChangesCount = updateGroup.actions.length, + viewChangesCount = 0, + tokenContextForRequest.userTokenOpt + ) + _ <- remoteWebknossosClient.reportTracingUpdates(report) + remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + _ <- editableMappingService.update(mappingName, updateGroup, updateGroup.version, remoteFallbackLayer) + } yield Ok + } + } + */ + + def editableMappingInfo(tracingId: String, version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId, version) + infoJson = editableMappingService.infoJson(tracingId = tracingId, editableMappingInfo = editableMappingInfo) + } yield Ok(infoJson) + } + } + } + + def segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService + .getAgglomerateGraphForId(tracingId, tracing.version, agglomerateId) + .futureBox + segmentIds <- agglomerateGraphBox match { + case Full(agglomerateGraph) => Fox.successful(agglomerateGraph.segments) + case Empty => Fox.successful(List.empty) + case f: Failure => f.toFox + } + agglomerateIdIsPresent = agglomerateGraphBox.isDefined + } yield Ok(Json.toJson(EditableMappingSegmentListResult(segmentIds.toList, agglomerateIdIsPresent))) + } + } + } + + def agglomerateIdsForSegments(tracingId: String): Action[ListOfLong] = + Action.async(validateProto[ListOfLong]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId, version = None) + relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( + request.body.items.toSet, + editableMappingInfo, + tracing.version, + tracingId, + remoteFallbackLayer) + agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) + } yield Ok(ListOfLong(agglomerateIdsSorted).toByteArray) + } + } + } + + def agglomerateGraphMinCut(tracingId: String): Action[MinCutParameters] = + Action.async(validateJson[MinCutParameters]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId) + edges <- editableMappingService.agglomerateGraphMinCut(tracingId, + tracing.version, + editableMappingInfo, + request.body, + remoteFallbackLayer) + } yield Ok(Json.toJson(edges)) + } + } + } + + def agglomerateGraphNeighbors(tracingId: String): Action[NeighborsParameters] = + Action.async(validateJson[NeighborsParameters]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId) + (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(tracingId, + editableMappingInfo, + tracing.version, + request.body, + remoteFallbackLayer) + } yield Ok(Json.obj("segmentId" -> segmentId, "neighbors" -> Json.toJson(edges))) + } + } + } + + def agglomerateSkeleton(tracingId: String, agglomerateId: Long): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + agglomerateSkeletonBytes <- editableMappingService.getAgglomerateSkeletonWithFallback(tracingId, + tracing.version, + editableMappingInfo, + remoteFallbackLayer, + agglomerateId) + } yield Ok(agglomerateSkeletonBytes) + } + } +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index f56b6d0d26e..431bc0ead7e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -6,23 +6,26 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService +import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingSelector} import com.scalableminds.webknossos.tracingstore.tracings.skeleton._ -import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} -import net.liftweb.common.Empty import play.api.i18n.Messages import play.api.libs.json.Json +import com.scalableminds.webknossos.datastore.controllers.Controller import play.api.mvc.{Action, AnyContent, PlayBodyParsers} +import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import scala.concurrent.ExecutionContext -class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingService, - val remoteWebknossosClient: TSRemoteWebknossosClient, - val accessTokenService: TracingStoreAccessTokenService, - val slackNotificationService: TSSlackNotificationService)( +class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracingService, + remoteWebknossosClient: TSRemoteWebknossosClient, + annotationService: TSAnnotationService, + accessTokenService: TracingStoreAccessTokenService, + slackNotificationService: TSSlackNotificationService)( implicit val ec: ExecutionContext, val bodyParsers: PlayBodyParsers) - extends TracingController[SkeletonTracing, SkeletonTracings] { + extends Controller { implicit val tracingsCompanion: SkeletonTracings.type = SkeletonTracings @@ -35,72 +38,107 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer implicit def unpackMultiple(tracings: SkeletonTracings): List[Option[SkeletonTracing]] = tracings.tracings.toList.map(_.tracing) - def mergedFromContents(token: Option[String], persist: Boolean): Action[SkeletonTracings] = - Action.async(validateProto[SkeletonTracings]) { implicit request => + def save(): Action[SkeletonTracing] = Action.async(validateProto[SkeletonTracing]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val tracing = request.body + skeletonTracingService.save(tracing, None, 0).map { newId => + Ok(Json.toJson(newId)) + } + } + } + } + } + + def saveMultiple(): Action[SkeletonTracings] = Action.async(validateProto[SkeletonTracings]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[SkeletonTracing] => + tracingOpt match { + case Some(tracing) => skeletonTracingService.save(tracing, None, 0).map(Some(_)) + case _ => Fox.successful(None) + } + }) + savedIds.map(id => Ok(Json.toJson(id))) + } + } + } + } + + def get(tracingId: String, version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { - val tracings: List[Option[SkeletonTracing]] = request.body + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - mergedTracing <- Fox.box2Fox(tracingService.merge(tracings.flatten, MergedVolumeStats.empty(), Empty)) - processedTracing = tracingService.remapTooLargeTreeIds(mergedTracing) - newId <- tracingService.save(processedTracing, None, processedTracing.version, toCache = !persist) - } yield Ok(Json.toJson(newId)) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findSkeleton(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( + "tracing.notFound") + } yield Ok(tracing.toByteArray).as(protobufMimeType) } } } - def duplicate(token: Option[String], - tracingId: String, - version: Option[Long], - fromTask: Option[Boolean], - editPosition: Option[String], - editRotation: Option[String], - boundingBox: Option[String]): Action[AnyContent] = - Action.async { implicit request => + def getMultiple: Action[List[Option[TracingSelector]]] = + Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { - tracing <- tracingService.find(tracingId, version, applyUpdates = true) ?~> Messages("tracing.notFound") - editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) - editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) - boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) - newId <- tracingService.duplicate(tracing, - fromTask.getOrElse(false), - editPositionParsed, - editRotationParsed, - boundingBoxParsed) + tracings <- annotationService.findMultipleSkeletons(request.body, applyUpdates = true) } yield { - Ok(Json.toJson(newId)) + Ok(tracings.toByteArray).as(protobufMimeType) } } } } - def updateActionLog(token: Option[String], - tracingId: String, - newestVersion: Option[Long], - oldestVersion: Option[Long]): Action[AnyContent] = Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - updateLog <- tracingService.updateActionLog(tracingId, newestVersion, oldestVersion) - } yield { - Ok(updateLog) + def mergedFromContents(persist: Boolean): Action[SkeletonTracings] = + Action.async(validateProto[SkeletonTracings]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val tracings: List[Option[SkeletonTracing]] = request.body + for { + mergedTracing <- Fox.box2Fox(skeletonTracingService.merge(tracings.flatten)) + processedTracing = skeletonTracingService.remapTooLargeTreeIds(mergedTracing) + newId <- skeletonTracingService.save(processedTracing, + None, + processedTracing.version, + toTemporaryStore = !persist) + } yield Ok(Json.toJson(newId)) } } } - } - def updateActionStatistics(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { - implicit request => + // Used in task creation. History is dropped. Caller is responsible to create and save a matching AnnotationProto object + def duplicate(tracingId: String, + editPosition: Option[String], + editRotation: Option[String], + boundingBox: Option[String]): Action[AnyContent] = + Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - statistics <- tracingService.updateActionStatistics(tracingId) - } yield { - Ok(statistics) + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) + editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) + boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) + newestSourceVersion <- annotationService.currentMaterializableVersion(annotationId) + newTracingId <- annotationService.duplicateSkeletonTracing( + annotationId, + sourceTracingId = tracingId, + sourceVersion = newestSourceVersion, + newTracingId = TracingId.generate, + newVersion = 0, + editPosition = editPositionParsed, + editRotation = editRotationParsed, + boundingBox = boundingBoxParsed, + isFromTask = false + ) + } yield Ok(Json.toJson(newTracingId)) } } } - } + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala new file mode 100644 index 00000000000..0a8fb5081fe --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -0,0 +1,229 @@ +package com.scalableminds.webknossos.tracingstore.controllers + +import collections.SequenceUtils +import com.google.inject.Inject +import com.scalableminds.util.geometry.BoundingBox +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.Annotation.{ + AnnotationLayerProto, + AnnotationLayerTypeProto, + AnnotationProto +} +import com.scalableminds.webknossos.datastore.controllers.Controller +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer +import com.scalableminds.webknossos.datastore.services.UserAccessRequest +import com.scalableminds.webknossos.tracingstore.tracings.{ + KeyValueStoreImplicits, + TracingDataStore, + TracingId, + TracingSelector +} +import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService +import com.scalableminds.webknossos.tracingstore.annotation.{ + AnnotationTransactionService, + ResetToBaseAnnotationAction, + TSAnnotationService, + UpdateActionGroup +} +import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService +import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService +import net.liftweb.common.{Empty, Failure, Full} +import play.api.i18n.Messages +import play.api.libs.json.Json +import play.api.mvc.{Action, AnyContent, PlayBodyParsers} + +import scala.concurrent.ExecutionContext + +class TSAnnotationController @Inject()( + accessTokenService: TracingStoreAccessTokenService, + slackNotificationService: TSSlackNotificationService, + annotationService: TSAnnotationService, + annotationTransactionService: AnnotationTransactionService, + skeletonTracingService: SkeletonTracingService, + volumeTracingService: VolumeTracingService, + tracingDataStore: TracingDataStore)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) + extends Controller + with KeyValueStoreImplicits { + + def save(annotationId: String): Action[AnnotationProto] = + Action.async(validateProto[AnnotationProto]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + _ <- tracingDataStore.annotations.put(annotationId, 0L, request.body) + } yield Ok + } + } + } + + def update(annotationId: String): Action[List[UpdateActionGroup]] = + Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeAnnotation(annotationId)) { + for { + _ <- annotationTransactionService.handleUpdateGroups(annotationId, request.body) + } yield Ok + } + } + } + } + + def updateActionLog(annotationId: String, + newestVersion: Option[Long] = None, + oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { + for { + newestMaterializableVersion <- annotationService.currentMaterializableVersion(annotationId) + updateLog <- annotationService.updateActionLog(annotationId, + newestVersion.getOrElse(newestMaterializableVersion), + oldestVersion.getOrElse(0)) + } yield Ok(updateLog) + } + } + } + + def newestVersion(annotationId: String): Action[AnyContent] = Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { + for { + newestVersion <- annotationService.currentMaterializableVersion(annotationId) + } yield JsonOk(Json.obj("version" -> newestVersion)) + } + } + } + + def updateActionStatistics(tracingId: String): Action[AnyContent] = Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + statistics <- annotationService.updateActionStatistics(tracingId) + } yield Ok(statistics) + } + } + } + + def get(annotationId: String, version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { + for { + annotationProto <- annotationService.get(annotationId, version) + } yield Ok(annotationProto.toByteArray).as(protobufMimeType) + } + } + } + } + + def duplicate(annotationId: String, + newAnnotationId: String, + version: Option[Long], + isFromTask: Boolean, + datasetBoundingBox: Option[String]): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { + for { + datasetBoundingBoxParsed <- Fox.runOptional(datasetBoundingBox)(BoundingBox.fromLiteral) + annotationProto <- annotationService.duplicate(annotationId, + newAnnotationId, + version, + isFromTask, + datasetBoundingBoxParsed) + } yield Ok(annotationProto.toByteArray).as(protobufMimeType) + } + } + } + } + + def resetToBase(annotationId: String): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + currentVersion <- annotationService.currentMaterializableVersion(annotationId) + _ <- annotationTransactionService.handleSingleUpdateAction(annotationId, + currentVersion, + ResetToBaseAnnotationAction()) + } yield Ok + } + } + } + } + + def mergedFromIds(persist: Boolean, newAnnotationId: String): Action[List[String]] = + Action.async(validateJson[List[String]]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + annotations: Seq[AnnotationProto] <- annotationService.getMultiple(request.body) ?~> Messages( + "annotation.notFound") + skeletonLayers = annotations.flatMap( + _.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton)) + volumeLayers = annotations.flatMap(_.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume)) + newSkeletonId = TracingId.generate + newVolumeId = TracingId.generate + mergedSkeletonName = SequenceUtils + .findUniqueElement(skeletonLayers.map(_.name)) + .getOrElse(AnnotationLayer.defaultSkeletonLayerName) + mergedVolumeName = SequenceUtils + .findUniqueElement(volumeLayers.map(_.name)) + .getOrElse(AnnotationLayer.defaultVolumeLayerName) + // TODO Merge updates? if so, iron out reverts? + // TODO Merge editable mappings + volumeTracings <- annotationService + .findMultipleVolumes(volumeLayers.map { l => + Some(TracingSelector(l.tracingId)) + }, applyUpdates = true) + .map(_.flatten) + mergedVolumeStats <- volumeTracingService.mergeVolumeData(volumeLayers.map(_.tracingId), + volumeTracings, + newVolumeId, + newVersion = 0L, + persist = persist) + mergeEditableMappingsResultBox <- volumeTracingService + .mergeEditableMappings(newVolumeId, volumeTracings.zip(volumeLayers.map(_.tracingId)), persist) + .futureBox + newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { + case Full(()) => Fox.successful(Some(newVolumeId)) + case Empty => Fox.successful(None) + case f: Failure => f.toFox + } + mergedVolumeOpt <- Fox.runIf(volumeTracings.nonEmpty)( + volumeTracingService.merge(volumeTracings, mergedVolumeStats, newEditableMappingIdOpt)) + _ <- Fox.runOptional(mergedVolumeOpt)( + volumeTracingService.save(_, Some(newVolumeId), version = 0, toTemporaryStore = !persist)) + skeletonTracings <- annotationService + .findMultipleSkeletons(skeletonLayers.map { l => + Some(TracingSelector(l.tracingId)) + }, applyUpdates = true) + .map(_.flatten) + mergedSkeletonOpt <- Fox.runIf(skeletonTracings.nonEmpty)( + skeletonTracingService.merge(skeletonTracings).toFox) + mergedSkeletonLayerOpt = mergedSkeletonOpt.map( + _ => + AnnotationLayerProto(name = mergedSkeletonName, + tracingId = newSkeletonId, + `type` = AnnotationLayerTypeProto.Skeleton)) + mergedVolumeLayerOpt = mergedVolumeOpt.map( + _ => + AnnotationLayerProto(name = mergedVolumeName, + tracingId = newVolumeId, + `type` = AnnotationLayerTypeProto.Volume)) + mergedLayers = Seq(mergedSkeletonLayerOpt, mergedVolumeLayerOpt).flatten + firstAnnotation <- annotations.headOption.toFox + mergedAnnotation = firstAnnotation.withAnnotationLayers(mergedLayers) + _ <- Fox.runOptional(mergedSkeletonOpt)( + skeletonTracingService.save(_, Some(newSkeletonId), version = 0L, toTemporaryStore = !persist)) + _ <- tracingDataStore.annotations.put(newAnnotationId, 0L, mergedAnnotation) + } yield Ok(mergedAnnotation.toByteArray).as(protobufMimeType) + } + } + } + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala deleted file mode 100644 index 4644d58e6a1..00000000000 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ /dev/null @@ -1,295 +0,0 @@ -package com.scalableminds.webknossos.tracingstore.controllers - -import com.scalableminds.util.time.Instant -import com.scalableminds.util.tools.Fox -import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} -import com.scalableminds.webknossos.datastore.controllers.Controller -import com.scalableminds.webknossos.datastore.services.UserAccessRequest -import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.{ - TracingSelector, - TracingService, - UpdateAction, - UpdateActionGroup -} -import com.scalableminds.webknossos.tracingstore.{ - TSRemoteWebknossosClient, - TracingStoreAccessTokenService, - TracingUpdatesReport -} -import net.liftweb.common.{Empty, Failure, Full} -import play.api.i18n.Messages -import play.api.libs.json.{Format, Json} -import play.api.mvc.{Action, AnyContent, PlayBodyParsers} -import scalapb.{GeneratedMessage, GeneratedMessageCompanion} - -import scala.concurrent.ExecutionContext -import scala.concurrent.duration._ - -trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends Controller { - - def tracingService: TracingService[T] - - def remoteWebknossosClient: TSRemoteWebknossosClient - - def accessTokenService: TracingStoreAccessTokenService - - def slackNotificationService: TSSlackNotificationService - - implicit val tracingCompanion: GeneratedMessageCompanion[T] = tracingService.tracingCompanion - - implicit val tracingsCompanion: GeneratedMessageCompanion[Ts] - - implicit def unpackMultiple(tracings: Ts): List[Option[T]] - - implicit def packMultiple(tracings: List[T]): Ts - - implicit def packMultipleOpt(tracings: List[Option[T]]): Ts - - implicit val updateActionJsonFormat: Format[UpdateAction[T]] = tracingService.updateActionJsonFormat - - implicit val ec: ExecutionContext - - implicit val bodyParsers: PlayBodyParsers - - override def allowRemoteOrigin: Boolean = true - - def save(token: Option[String]): Action[T] = Action.async(validateProto[T]) { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { - val tracing = request.body - tracingService.save(tracing, None, 0).map { newId => - Ok(Json.toJson(newId)) - } - } - } - } - } - - def saveMultiple(token: Option[String]): Action[Ts] = Action.async(validateProto[Ts]) { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { - val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[T] => - tracingOpt match { - case Some(tracing) => tracingService.save(tracing, None, 0).map(Some(_)) - case _ => Fox.successful(None) - } - }) - savedIds.map(id => Ok(Json.toJson(id))) - } - } - } - } - - def get(token: Option[String], tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { - implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId, version, applyUpdates = true) ?~> Messages("tracing.notFound") - } yield { - Ok(tracing.toByteArray).as(protobufMimeType) - } - } - } - } - - def getMultiple(token: Option[String]): Action[List[Option[TracingSelector]]] = - Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { - for { - tracings <- tracingService.findMultiple(request.body, applyUpdates = true) - } yield { - Ok(tracings.toByteArray).as(protobufMimeType) - } - } - } - } - - def newestVersion(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), token) { - for { - newestVersion <- tracingService.currentVersion(tracingId) ?~> "annotation.getNewestVersion.failed" - } yield { - JsonOk(Json.obj("version" -> newestVersion)) - } - } - } - } - - def update(token: Option[String], tracingId: String): Action[List[UpdateActionGroup[T]]] = - Action.async(validateJson[List[UpdateActionGroup[T]]]) { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { - val updateGroups = request.body - if (updateGroups.forall(_.transactionGroupCount == 1)) { - commitUpdates(tracingId, updateGroups, urlOrHeaderToken(token, request)).map(_ => Ok) - } else { - updateGroups - .foldLeft(tracingService.currentVersion(tracingId)) { (currentCommittedVersionFox, updateGroup) => - handleUpdateGroupForTransaction(tracingId, - currentCommittedVersionFox, - updateGroup, - urlOrHeaderToken(token, request)) - } - .map(_ => Ok) - } - } - } - } - } - - private val transactionGroupExpiry: FiniteDuration = 24 hours - - private def handleUpdateGroupForTransaction(tracingId: String, - previousVersionFox: Fox[Long], - updateGroup: UpdateActionGroup[T], - userToken: Option[String]): Fox[Long] = - for { - previousCommittedVersion: Long <- previousVersionFox - result <- if (previousCommittedVersion + 1 == updateGroup.version) { - if (updateGroup.transactionGroupCount == updateGroup.transactionGroupIndex + 1) { - // Received the last group of this transaction - commitWithPending(tracingId, updateGroup, userToken) - } else { - tracingService - .saveUncommitted(tracingId, - updateGroup.transactionId, - updateGroup.transactionGroupIndex, - updateGroup.version, - updateGroup, - transactionGroupExpiry) - .flatMap( - _ => - tracingService.saveToHandledGroupIdStore(tracingId, - updateGroup.transactionId, - updateGroup.version, - updateGroup.transactionGroupIndex)) - .map(_ => previousCommittedVersion) // no updates have been committed, do not yield version increase - } - } else { - failUnlessAlreadyHandled(updateGroup, tracingId, previousCommittedVersion) - } - } yield result - - // For an update group (that is the last of a transaction), fetch all previous uncommitted for the same transaction - // and commit them all. - private def commitWithPending(tracingId: String, - updateGroup: UpdateActionGroup[T], - userToken: Option[String]): Fox[Long] = - for { - previousActionGroupsToCommit <- tracingService.getAllUncommittedFor(tracingId, updateGroup.transactionId) - _ <- bool2Fox( - previousActionGroupsToCommit - .exists(_.transactionGroupIndex == 0) || updateGroup.transactionGroupCount == 1) ?~> s"Trying to commit a transaction without a group that has transactionGroupIndex 0." - concatenatedGroup = concatenateUpdateGroupsOfTransaction(previousActionGroupsToCommit, updateGroup) - commitResult <- commitUpdates(tracingId, List(concatenatedGroup), userToken) - _ <- tracingService.removeAllUncommittedFor(tracingId, updateGroup.transactionId) - } yield commitResult - - private def concatenateUpdateGroupsOfTransaction(previousActionGroups: List[UpdateActionGroup[T]], - lastActionGroup: UpdateActionGroup[T]): UpdateActionGroup[T] = - if (previousActionGroups.isEmpty) lastActionGroup - else { - val allActionGroups = previousActionGroups :+ lastActionGroup - UpdateActionGroup[T]( - version = lastActionGroup.version, - timestamp = lastActionGroup.timestamp, - authorId = lastActionGroup.authorId, - actions = allActionGroups.flatMap(_.actions), - stats = lastActionGroup.stats, // the latest stats do count - info = lastActionGroup.info, // frontend sets this identically for all groups of transaction - transactionId = f"${lastActionGroup.transactionId}-concatenated", - transactionGroupCount = 1, - transactionGroupIndex = 0, - ) - } - - // Perform version check and commit the passed updates - private def commitUpdates(tracingId: String, - updateGroups: List[UpdateActionGroup[T]], - userToken: Option[String]): Fox[Long] = { - val currentCommittedVersion: Fox[Long] = tracingService.currentVersion(tracingId) - val report = TracingUpdatesReport( - tracingId, - timestamps = updateGroups.map(g => Instant(g.timestamp)), - statistics = updateGroups.flatMap(_.stats).lastOption, - significantChangesCount = updateGroups.map(_.significantChangesCount).sum, - viewChangesCount = updateGroups.map(_.viewChangesCount).sum, - userToken - ) - remoteWebknossosClient.reportTracingUpdates(report).flatMap { _ => - updateGroups.foldLeft(currentCommittedVersion) { (previousVersion, updateGroup) => - previousVersion.flatMap { prevVersion: Long => - if (prevVersion + 1 == updateGroup.version) { - tracingService - .handleUpdateGroup(tracingId, updateGroup, prevVersion, userToken) - .flatMap( - _ => - tracingService.saveToHandledGroupIdStore(tracingId, - updateGroup.transactionId, - updateGroup.version, - updateGroup.transactionGroupIndex)) - .map(_ => updateGroup.version) - } else failUnlessAlreadyHandled(updateGroup, tracingId, prevVersion) - } - } - } - } - - /* If this update group has already been “handled” (successfully saved as either committed or uncommitted), - * ignore it silently. This is in case the frontend sends a retry if it believes a save to be unsuccessful - * despite the backend receiving it just fine. - */ - private def failUnlessAlreadyHandled(updateGroup: UpdateActionGroup[T], - tracingId: String, - previousVersion: Long): Fox[Long] = { - val errorMessage = s"Incorrect version. Expected: ${previousVersion + 1}; Got: ${updateGroup.version}" - for { - _ <- Fox.assertTrue( - tracingService.handledGroupIdStoreContains(tracingId, - updateGroup.transactionId, - updateGroup.version, - updateGroup.transactionGroupIndex)) ?~> errorMessage ~> CONFLICT - } yield updateGroup.version - } - - def mergedFromIds(token: Option[String], persist: Boolean): Action[List[Option[TracingSelector]]] = - Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { - for { - tracingOpts <- tracingService.findMultiple(request.body, applyUpdates = true) ?~> Messages( - "tracing.notFound") - tracingsWithIds = tracingOpts.zip(request.body).flatMap { - case (Some(tracing), Some(selector)) => Some((tracing, selector.tracingId)) - case _ => None - } - newId = tracingService.generateTracingId - mergedVolumeStats <- tracingService.mergeVolumeData(request.body.flatten, - tracingsWithIds.map(_._1), - newId, - newVersion = 0L, - toCache = !persist, - token) - newEditableMappingIdBox <- tracingService - .mergeEditableMappings(tracingsWithIds, urlOrHeaderToken(token, request)) - .futureBox - newEditableMappingIdOpt <- newEditableMappingIdBox match { - case Full(newEditableMappingId) => Fox.successful(Some(newEditableMappingId)) - case Empty => Fox.successful(None) - case f: Failure => f.toFox - } - mergedTracing <- Fox.box2Fox( - tracingService.merge(tracingsWithIds.map(_._1), mergedVolumeStats, newEditableMappingIdOpt)) - _ <- tracingService.save(mergedTracing, Some(newId), version = 0, toCache = !persist) - } yield Ok(Json.toJson(newId)) - } - } - } -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index cc9090c79ed..b3683464bff 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -2,57 +2,24 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} -import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.ExtendedTypes.ExtendedString import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph -import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} +import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto -import com.scalableminds.webknossos.datastore.helpers.{ - GetSegmentIndexParameters, - ProtoGeometryImplicits, - SegmentStatisticsParameters -} +import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} +import com.scalableminds.webknossos.datastore.helpers.{GetSegmentIndexParameters, ProtoGeometryImplicits, SegmentStatisticsParameters} import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer} -import com.scalableminds.webknossos.datastore.models.{ - LengthUnit, - VoxelSize, - WebknossosAdHocMeshRequest, - WebknossosDataRequest -} +import com.scalableminds.webknossos.datastore.models.{LengthUnit, VoxelSize, WebknossosAdHocMeshRequest, WebknossosDataRequest} import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.datastore.services.{ - EditableMappingSegmentListResult, - FullMeshRequest, - UserAccessRequest -} +import com.scalableminds.webknossos.datastore.services.{FullMeshRequest, UserAccessRequest} +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ - EditableMappingService, - EditableMappingUpdateActionGroup, - MinCutParameters, - NeighborsParameters -} -import com.scalableminds.webknossos.tracingstore.tracings.volume.{ - MergedVolumeStats, - MagRestrictions, - TSFullMeshService, - UpdateMappingNameAction, - VolumeDataZipFormat, - VolumeSegmentIndexService, - VolumeSegmentStatisticsService, - VolumeTracingService -} -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, UpdateActionGroup} -import com.scalableminds.webknossos.tracingstore.{ - TSRemoteDatastoreClient, - TSRemoteWebknossosClient, - TracingStoreAccessTokenService, - TracingStoreConfig, - TracingUpdatesReport -} -import net.liftweb.common.{Box, Empty, Failure, Full} +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ImportVolumeDataVolumeAction, MagRestrictions, MergedVolumeStats, TSFullMeshService, VolumeDataZipFormat, VolumeSegmentIndexService, VolumeSegmentStatisticsService, VolumeTracingService} +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingId, TracingSelector} +import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient, TracingStoreAccessTokenService, TracingStoreConfig} +import net.liftweb.common.Empty import play.api.i18n.Messages import play.api.libs.Files.TemporaryFile import play.api.libs.json.Json @@ -63,18 +30,20 @@ import java.nio.{ByteBuffer, ByteOrder} import scala.concurrent.ExecutionContext class VolumeTracingController @Inject()( - val tracingService: VolumeTracingService, + val volumeTracingService: VolumeTracingService, val config: TracingStoreConfig, val remoteDataStoreClient: TSRemoteDatastoreClient, val accessTokenService: TracingStoreAccessTokenService, + annotationService: TSAnnotationService, editableMappingService: EditableMappingService, val slackNotificationService: TSSlackNotificationService, val remoteWebknossosClient: TSRemoteWebknossosClient, + annotationTransactionService: AnnotationTransactionService, volumeSegmentStatisticsService: VolumeSegmentStatisticsService, volumeSegmentIndexService: VolumeSegmentIndexService, fullMeshService: TSFullMeshService, val rpc: RPC)(implicit val ec: ExecutionContext, val bodyParsers: PlayBodyParsers) - extends TracingController[VolumeTracing, VolumeTracings] + extends Controller with ProtoGeometryImplicits with KeyValueStoreImplicits { @@ -89,78 +58,134 @@ class VolumeTracingController @Inject()( implicit def unpackMultiple(tracings: VolumeTracings): List[Option[VolumeTracing]] = tracings.tracings.toList.map(_.tracing) - def initialData(token: Option[String], - tracingId: String, - minMag: Option[Int], - maxMag: Option[Int]): Action[AnyContent] = + def save(): Action[VolumeTracing] = Action.async(validateProto[VolumeTracing]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val tracing = request.body + volumeTracingService.save(tracing, None, 0).map { newId => + Ok(Json.toJson(newId)) + } + } + } + } + } + + def saveMultiple(): Action[VolumeTracings] = Action.async(validateProto[VolumeTracings]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[VolumeTracing] => + tracingOpt match { + case Some(tracing) => volumeTracingService.save(tracing, None, 0).map(Some(_)) + case _ => Fox.successful(None) + } + }) + savedIds.map(id => Ok(Json.toJson(id))) + } + } + } + } + + def get(tracingId: String, version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( + "tracing.notFound") + } yield Ok(tracing.toByteArray).as(protobufMimeType) + } + } + } + + def getMultiple: Action[List[Option[TracingSelector]]] = + Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + tracings <- annotationService.findMultipleVolumes(request.body, applyUpdates = true) + } yield { + Ok(tracings.toByteArray).as(protobufMimeType) + } + } + } + } + + def initialData(tracingId: String, minMag: Option[Int], maxMag: Option[Int]): Action[AnyContent] = Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") magRestrictions = MagRestrictions(minMag, maxMag) - mags <- tracingService.initializeWithData(tracingId, tracing, initialData, magRestrictions, token).toFox - _ <- tracingService.updateMagList(tracingId, tracing, mags) + mags <- volumeTracingService.initializeWithData(tracingId, tracing, initialData, magRestrictions).toFox + _ <- volumeTracingService.updateMagList(tracingId, tracing, mags) } yield Ok(Json.toJson(tracingId)) } } } } - def mergedFromContents(token: Option[String], persist: Boolean): Action[VolumeTracings] = + def mergedFromContents(persist: Boolean): Action[VolumeTracings] = Action.async(validateProto[VolumeTracings]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { _ <- Fox.successful(()) tracings = request.body shouldCreateSegmentIndex = volumeSegmentIndexService.shouldCreateSegmentIndexForMerged(tracings.flatten) - mt <- tracingService.merge(tracings.flatten, MergedVolumeStats.empty(shouldCreateSegmentIndex), Empty).toFox + mt <- volumeTracingService + .merge(tracings.flatten, MergedVolumeStats.empty(shouldCreateSegmentIndex), Empty) + .toFox // segment lists for multi-volume uploads are not supported yet, compare https://github.com/scalableminds/webknossos/issues/6887 mergedTracing = mt.copy(segments = List.empty) - newId <- tracingService.save(mergedTracing, None, mergedTracing.version, toCache = !persist) + newId <- volumeTracingService.save(mergedTracing, None, mergedTracing.version, toTemporaryStore = !persist) } yield Ok(Json.toJson(newId)) } } } - def initialDataMultiple(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { - implicit request => + def initialDataMultiple(tracingId: String): Action[AnyContent] = + Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - mags <- tracingService.initializeWithDataMultiple(tracingId, tracing, initialData, token).toFox - _ <- tracingService.updateMagList(tracingId, tracing, mags) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") + mags <- volumeTracingService.initializeWithDataMultiple(tracingId, tracing, initialData).toFox + _ <- volumeTracingService.updateMagList(tracingId, tracing, mags) } yield Ok(Json.toJson(tracingId)) } } } - } + } - def allDataZip(token: Option[String], - tracingId: String, + def allDataZip(tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSizeFactor: Option[String], voxelSizeUnit: Option[String]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId, version) ?~> Messages("tracing.notFound") + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId, version) ?~> Messages("tracing.notFound") volumeDataZipFormatParsed <- VolumeDataZipFormat.fromString(volumeDataZipFormat).toFox voxelSizeFactorParsedOpt <- Fox.runOptional(voxelSizeFactor)(Vec3Double.fromUriLiteral) voxelSizeUnitParsedOpt <- Fox.runOptional(voxelSizeUnit)(LengthUnit.fromString) voxelSize = voxelSizeFactorParsedOpt.map(voxelSizeParsed => VoxelSize.fromFactorAndUnitWithDefault(voxelSizeParsed, voxelSizeUnitParsedOpt)) - data <- tracingService.allDataZip( + data <- volumeTracingService.allDataZip( tracingId, tracing, volumeDataZipFormatParsed, @@ -171,15 +196,17 @@ class VolumeTracingController @Inject()( } } - def data(token: Option[String], tracingId: String): Action[List[WebknossosDataRequest]] = + def data(tracingId: String): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - (data, indices) <- if (tracing.getHasEditableMapping) - editableMappingService.volumeData(tracing, tracingId, request.body, urlOrHeaderToken(token, request)) - else tracingService.data(tracingId, tracing, request.body) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") + (data, indices) <- if (tracing.getHasEditableMapping) { + val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.volumeData(mappingLayer, request.body) + } else volumeTracingService.data(tracingId, tracing, request.body) } yield Ok(data).withHeaders(getMissingBucketsHeaders(indices): _*) } } @@ -191,120 +218,38 @@ class VolumeTracingController @Inject()( private def formatMissingBucketList(indices: List[Int]): String = "[" + indices.mkString(", ") + "]" - def duplicate(token: Option[String], - tracingId: String, - fromTask: Option[Boolean], - minMag: Option[Int], - maxMag: Option[Int], - downsample: Option[Boolean], - editPosition: Option[String], - editRotation: Option[String], - boundingBox: Option[String]): Action[AnyContent] = Action.async { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - val userToken = urlOrHeaderToken(token, request) - accessTokenService.validateAccess(UserAccessRequest.webknossos, userToken) { - for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - _ = logger.info(s"Duplicating volume tracing $tracingId...") - datasetBoundingBox = request.body.asJson.flatMap(_.validateOpt[BoundingBox].asOpt.flatten) - magRestrictions = MagRestrictions(minMag, maxMag) - editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) - editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) - boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) - remoteFallbackLayerOpt <- Fox.runIf(tracing.getHasEditableMapping)( - tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) - newEditableMappingId <- Fox.runIf(tracing.getHasEditableMapping)( - editableMappingService.duplicate(tracing.mappingName, version = None, remoteFallbackLayerOpt, userToken)) - (newId, newTracing) <- tracingService.duplicate( - tracingId, - tracing, - fromTask.getOrElse(false), - datasetBoundingBox, - magRestrictions, - editPositionParsed, - editRotationParsed, - boundingBoxParsed, - newEditableMappingId, - userToken - ) - _ <- Fox.runIfOptionTrue(downsample)(tracingService.downsample(newId, tracingId, newTracing, userToken)) - } yield Ok(Json.toJson(newId)) - } - } - } - } - - def importVolumeData(token: Option[String], tracingId: String): Action[MultipartFormData[TemporaryFile]] = + def importVolumeData(tracingId: String): Action[MultipartFormData[TemporaryFile]] = Action.async(parse.multipartFormData) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") currentVersion <- request.body.dataParts("currentVersion").headOption.flatMap(_.toIntOpt).toFox zipFile <- request.body.files.headOption.map(f => new File(f.ref.path.toString)).toFox - largestSegmentId <- tracingService.importVolumeData(tracingId, - tracing, - zipFile, - currentVersion, - urlOrHeaderToken(token, request)) + largestSegmentId <- volumeTracingService.importVolumeData(tracingId, tracing, zipFile, currentVersion) + _ <- annotationTransactionService.handleSingleUpdateAction( + annotationId, + tracing.version, + ImportVolumeDataVolumeAction(tracingId, Some(largestSegmentId))) } yield Ok(Json.toJson(largestSegmentId)) } } } - def addSegmentIndex(token: Option[String], tracingId: String, dryRun: Boolean): Action[AnyContent] = - Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) ?~> "tracing.notFound" - currentVersion <- tracingService.currentVersion(tracingId) - before = Instant.now - canAddSegmentIndex <- tracingService.checkIfSegmentIndexMayBeAdded(tracingId, tracing, token) - processedBucketCountOpt <- Fox.runIf(canAddSegmentIndex)( - tracingService.addSegmentIndex(tracingId, - tracing, - currentVersion, - urlOrHeaderToken(token, request), - dryRun)) ?~> "addSegmentIndex.failed" - currentVersionNew <- tracingService.currentVersion(tracingId) - _ <- Fox.runIf(!dryRun)(bool2Fox( - processedBucketCountOpt.isEmpty || currentVersionNew == currentVersion + 1L) ?~> "Version increment failed. Looks like someone edited the annotation layer in the meantime.") - duration = Instant.since(before) - _ = processedBucketCountOpt.foreach { processedBucketCount => - logger.info( - s"Added segment index (dryRun=$dryRun) for tracing $tracingId. Took $duration for $processedBucketCount buckets") - } - } yield Ok - } - } - } - - def updateActionLog(token: Option[String], - tracingId: String, - newestVersion: Option[Long] = None, - oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - updateLog <- tracingService.updateActionLog(tracingId, newestVersion, oldestVersion) - } yield Ok(updateLog) - } - } - } - - def requestAdHocMesh(token: Option[String], tracingId: String): Action[WebknossosAdHocMeshRequest] = + def requestAdHocMesh(tracingId: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { // The client expects the ad-hoc mesh as a flat float-array. Three consecutive floats form a 3D point, three // consecutive 3D points (i.e., nine floats) form a triangle. // There are no shared vertices between triangles. - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - (vertices, neighbors) <- if (tracing.getHasEditableMapping) - editableMappingService.createAdHocMesh(tracing, tracingId, request.body, urlOrHeaderToken(token, request)) - else tracingService.createAdHocMesh(tracingId, request.body, urlOrHeaderToken(token, request)) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") + (vertices: Array[Float], neighbors: List[Int]) <- if (tracing.getHasEditableMapping) { + val editableMappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.createAdHocMesh(editableMappingLayer, request.body) + } else volumeTracingService.createAdHocMesh(tracingId, tracing, request.body) } yield { // We need four bytes for each float val responseBuffer = ByteBuffer.allocate(vertices.length * 4).order(ByteOrder.LITTLE_ENDIAN) @@ -314,11 +259,12 @@ class VolumeTracingController @Inject()( } } - def loadFullMeshStl(token: Option[String], tracingId: String): Action[FullMeshRequest] = + def loadFullMeshStl(tracingId: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], tracingId, request.body) ?~> "mesh.file.loadChunk.failed" + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + data: Array[Byte] <- fullMeshService.loadFor(annotationId, tracingId, request.body) ?~> "mesh.file.loadChunk.failed" } yield Ok(data) } } @@ -329,265 +275,64 @@ class VolumeTracingController @Inject()( private def formatNeighborList(neighbors: List[Int]): String = "[" + neighbors.mkString(", ") + "]" - def findData(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + def findData(tracingId: String): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - positionOpt <- tracingService.findData(tracingId) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + positionOpt <- volumeTracingService.findData(tracingId, tracing) } yield { Ok(Json.obj("position" -> positionOpt, "mag" -> positionOpt.map(_ => Vec3Int.ones))) } } } - def agglomerateSkeleton(token: Option[String], tracingId: String, agglomerateId: Long): Action[AnyContent] = - Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" - mappingName <- tracing.mappingName ?~> "annotation.agglomerateSkeleton.noMappingSet" - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - agglomerateSkeletonBytes <- editableMappingService.getAgglomerateSkeletonWithFallback( - mappingName, - remoteFallbackLayer, - agglomerateId, - urlOrHeaderToken(token, request)) - } yield Ok(agglomerateSkeletonBytes) - } - } - - def makeMappingEditable(token: Option[String], tracingId: String): Action[AnyContent] = - Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" - _ <- assertMappingIsNotLocked(tracing) - _ <- bool2Fox(tracingService.volumeBucketsAreEmpty(tracingId)) ?~> "annotation.volumeBucketsNotEmpty" - (editableMappingId, editableMappingInfo) <- editableMappingService.create( - baseMappingName = tracingMappingName) - volumeUpdate = UpdateMappingNameAction(Some(editableMappingId), - isEditable = Some(true), - isLocked = Some(true), - actionTimestamp = Some(System.currentTimeMillis())) - _ <- tracingService.handleUpdateGroup( - tracingId, - UpdateActionGroup[VolumeTracing](tracing.version + 1, - System.currentTimeMillis(), - None, - List(volumeUpdate), - None, - None, - "dummyTransactionId", - 1, - 0), - tracing.version, - urlOrHeaderToken(token, request) - ) - infoJson <- editableMappingService.infoJson(tracingId = tracingId, - editableMappingId = editableMappingId, - editableMappingInfo = editableMappingInfo, - version = Some(0L)) - } yield Ok(infoJson) - } - } - } - - private def assertMappingIsNotLocked(volumeTracing: VolumeTracing): Fox[Unit] = - bool2Fox(!volumeTracing.mappingIsLocked.getOrElse(false)) ?~> "annotation.mappingIsLocked" - - def agglomerateGraphMinCut(token: Option[String], tracingId: String): Action[MinCutParameters] = - Action.async(validateJson[MinCutParameters]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - edges <- editableMappingService.agglomerateGraphMinCut(request.body, remoteFallbackLayer, token) - } yield Ok(Json.toJson(edges)) - } - } - } - - def agglomerateGraphNeighbors(token: Option[String], tracingId: String): Action[NeighborsParameters] = - Action.async(validateJson[NeighborsParameters]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(request.body, - remoteFallbackLayer, - token) - } yield Ok(Json.obj("segmentId" -> segmentId, "neighbors" -> Json.toJson(edges))) - } - } - } - - def updateEditableMapping(token: Option[String], tracingId: String): Action[List[EditableMappingUpdateActionGroup]] = - Action.async(validateJson[List[EditableMappingUpdateActionGroup]]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - mappingName <- tracing.mappingName.toFox - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" - currentVersion <- editableMappingService.getClosestMaterializableVersionOrZero(mappingName, None) - _ <- bool2Fox(request.body.length == 1) ?~> "Editable mapping update request must contain exactly one update group" - updateGroup <- request.body.headOption.toFox - _ <- bool2Fox(updateGroup.version == currentVersion + 1) ?~> "version mismatch" - report = TracingUpdatesReport( - tracingId, - timestamps = List(Instant(updateGroup.timestamp)), - statistics = None, - significantChangesCount = updateGroup.actions.length, - viewChangesCount = 0, - urlOrHeaderToken(token, request) - ) - _ <- remoteWebknossosClient.reportTracingUpdates(report) - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - _ <- editableMappingService.update(mappingName, - updateGroup, - updateGroup.version, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) - } yield Ok - } - } - - def editableMappingUpdateActionLog(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { - implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - mappingName <- tracing.mappingName.toFox - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" - updateLog <- editableMappingService.updateActionLog(mappingName) - } yield Ok(updateLog) - } - } - } - - def editableMappingInfo(token: Option[String], tracingId: String, version: Option[Long]): Action[AnyContent] = - Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - mappingName <- tracing.mappingName.toFox - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - editableMappingInfo <- editableMappingService.getInfo(mappingName, - version, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) - infoJson <- editableMappingService.infoJson(tracingId = tracingId, - editableMappingId = mappingName, - editableMappingInfo = editableMappingInfo, - version = version) - } yield Ok(infoJson) - } - } - } - - def editableMappingAgglomerateIdsForSegments(token: Option[String], tracingId: String): Action[ListOfLong] = - Action.async(validateProto[ListOfLong]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - editableMappingId <- tracing.mappingName.toFox - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - (editableMappingInfo, editableMappingVersion) <- editableMappingService.getInfoAndActualVersion( - editableMappingId, - requestedVersion = None, - remoteFallbackLayer = remoteFallbackLayer, - userToken = urlOrHeaderToken(token, request)) - relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( - request.body.items.toSet, - editableMappingInfo, - editableMappingVersion, - editableMappingId, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) - agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) - } yield Ok(ListOfLong(agglomerateIdsSorted).toByteArray) - } - } - } - - def editableMappingSegmentIdsForAgglomerate(token: Option[String], - tracingId: String, - agglomerateId: Long): Action[AnyContent] = Action.async { - implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - mappingName <- tracing.mappingName.toFox - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService - .getAgglomerateGraphForId(mappingName, - agglomerateId, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) - .futureBox - segmentIds <- agglomerateGraphBox match { - case Full(agglomerateGraph) => Fox.successful(agglomerateGraph.segments) - case Empty => Fox.successful(List.empty) - case f: Failure => f.toFox - } - agglomerateIdIsPresent = agglomerateGraphBox.isDefined - } yield Ok(Json.toJson(EditableMappingSegmentListResult(segmentIds.toList, agglomerateIdIsPresent))) - } - } - } - - def getSegmentVolume(token: Option[String], tracingId: String): Action[SegmentStatisticsParameters] = + def getSegmentVolume(tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) - mappingName <- tracingService.baseMappingName(tracing) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) segmentVolumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => - volumeSegmentStatisticsService.getSegmentVolume(tracingId, + volumeSegmentStatisticsService.getSegmentVolume(annotationId, + tracingId, segmentId, request.body.mag, mappingName, - request.body.additionalCoordinates, - urlOrHeaderToken(token, request)) + request.body.additionalCoordinates) } } yield Ok(Json.toJson(segmentVolumes)) } } - def getSegmentBoundingBox(token: Option[String], tracingId: String): Action[SegmentStatisticsParameters] = + def getSegmentBoundingBox(tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) - mappingName <- tracingService.baseMappingName(tracing) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) segmentBoundingBoxes: List[BoundingBox] <- Fox.serialCombined(request.body.segmentIds) { segmentId => - volumeSegmentStatisticsService.getSegmentBoundingBox(tracingId, + volumeSegmentStatisticsService.getSegmentBoundingBox(annotationId, + tracingId, segmentId, request.body.mag, mappingName, - request.body.additionalCoordinates, - urlOrHeaderToken(token, request)) + request.body.additionalCoordinates) } } yield Ok(Json.toJson(segmentBoundingBoxes)) } } - def getSegmentIndex(token: Option[String], tracingId: String, segmentId: Long): Action[GetSegmentIndexParameters] = + def getSegmentIndex(tracingId: String, segmentId: Long): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - fallbackLayer <- tracingService.getFallbackLayer(tracingId) - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - mappingName <- tracingService.baseMappingName(tracing) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId, tracing) + mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) _ <- bool2Fox(DataLayer.bucketSize <= request.body.cubeSize) ?~> "cubeSize must be at least one bucket (32³)" bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( @@ -598,8 +343,7 @@ class VolumeTracingController @Inject()( additionalCoordinates = request.body.additionalCoordinates, additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), mappingName = mappingName, - editableMappingTracingId = tracingService.editableMappingTracingId(tracing, tracingId), - userToken = urlOrHeaderToken(token, request) + editableMappingTracingId = volumeTracingService.editableMappingTracingId(tracing, tracingId) ) bucketPositionsForCubeSize = bucketPositionsRaw.values .map(vec3IntFromProto) @@ -611,4 +355,40 @@ class VolumeTracingController @Inject()( } } + // Used in task creation. History is dropped. Caller is responsible to create and save a matching AnnotationProto object + def duplicate(tracingId: String, + minMag: Option[Int], + maxMag: Option[Int], + editPosition: Option[String], + editRotation: Option[String], + boundingBox: Option[String]): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) + editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) + boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) + magRestrictions = MagRestrictions(minMag, maxMag) + newestSourceVersion <- annotationService.currentMaterializableVersion(annotationId) + newTracingId <- annotationService.duplicateVolumeTracing( + annotationId, + sourceTracingId = tracingId, + sourceVersion = newestSourceVersion, + newTracingId = TracingId.generate, + newVersion = 0, + editPosition = editPositionParsed, + editRotation = editRotationParsed, + boundingBox = boundingBoxParsed, + datasetBoundingBox = None, + isFromTask = false, + magRestrictions = magRestrictions + ) + } yield Ok(Json.toJson(newTracingId)) + } + } + } + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala index 64094d38910..00359960781 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.mvc.ExtendedController import com.scalableminds.util.tools.{Fox, FoxImplicits} @@ -30,6 +31,7 @@ import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, WebknossosDataRequest} import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataFormat, DataLayer, ElementClass} import com.scalableminds.webknossos.datastore.services.UserAccessRequest +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import com.scalableminds.webknossos.tracingstore.{ @@ -47,6 +49,7 @@ class VolumeTracingZarrStreamingController @Inject()( tracingService: VolumeTracingService, accessTokenService: TracingStoreAccessTokenService, editableMappingService: EditableMappingService, + annotationService: TSAnnotationService, remoteDataStoreClient: TSRemoteDatastoreClient, remoteWebknossosClient: TSRemoteWebknossosClient)(implicit ec: ExecutionContext) extends ExtendedController @@ -56,11 +59,12 @@ class VolumeTracingZarrStreamingController @Inject()( override def defaultErrorCode: Int = NOT_FOUND - def volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int): Action[AnyContent] = + def volumeTracingFolderContent(tracingId: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) additionalFiles = if (zarrVersion == 2) List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) @@ -75,11 +79,12 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingFolderContentJson(token: Option[String], tracingId: String, zarrVersion: Int): Action[AnyContent] = + def volumeTracingFolderContentJson(tracingId: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto(_).toMagLiteral(allowScalar = true)) additionalFiles = if (zarrVersion == 2) List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) @@ -88,15 +93,12 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingMagFolderContent(token: Option[String], - tracingId: String, - mag: String, - zarrVersion: Int): Action[AnyContent] = + def volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND @@ -111,15 +113,12 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingMagFolderContentJson(token: Option[String], - tracingId: String, - mag: String, - zarrVersion: Int): Action[AnyContent] = + def volumeTracingMagFolderContentJson(tracingId: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND @@ -128,15 +127,14 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zArray(token: Option[String], tracingId: String, mag: String): Action[AnyContent] = Action.async { - implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + def zArray(tracingId: String, mag: String): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND cubeLength = DataLayer.bucketLength @@ -162,17 +160,17 @@ class VolumeTracingZarrStreamingController @Inject()( order = ArrayOrder.F) } yield Ok(Json.toJson(zarrHeader)) } - } + } - def zarrJsonForMag(token: Option[String], tracingId: String, mag: String): Action[AnyContent] = Action.async { - implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + def zarrJsonForMag(tracingId: String, mag: String): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND additionalAxes = AdditionalAxis.fromProtos(tracing.additionalAxes) @@ -209,10 +207,10 @@ class VolumeTracingZarrStreamingController @Inject()( ) } yield Ok(Json.toJson(zarrHeader)) } - } + } - def zGroup(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + def zGroup(tracingId: String): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { Future(Ok(Json.toJson(NgffGroupHeader(zarr_format = 2)))) } } @@ -223,13 +221,12 @@ class VolumeTracingZarrStreamingController @Inject()( * Used by zarr-streaming. */ def zAttrs( - token: Option[String], tracingId: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND omeNgffHeader = NgffMetadata.fromNameVoxelSizeAndMags(tracingId, @@ -240,13 +237,12 @@ class VolumeTracingZarrStreamingController @Inject()( } def zarrJson( - token: Option[String], tracingId: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND sortedExistingMags = tracing.mags.map(vec3IntFromProto).toList.sortBy(_.maxDim) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND omeNgffHeader = NgffMetadataV0_5.fromNameVoxelSizeAndMags(tracingId, @@ -258,15 +254,12 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zarrSource(token: Option[String], - tracingId: String, - tracingName: Option[String], - zarrVersion: Int): Action[AnyContent] = + def zarrSource(tracingId: String, tracingName: Option[String], zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND zarrLayer = ZarrSegmentationLayer( name = tracingName.getOrElse(tracingId), largestSegmentId = tracing.largestSegmentId, @@ -281,12 +274,13 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def rawZarrCube(token: Option[String], tracingId: String, mag: String, coordinates: String): Action[AnyContent] = + def rawZarrCube(tracingId: String, mag: String, coordinates: String): Action[AnyContent] = Action.async { implicit request => { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.mags.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND @@ -306,9 +300,10 @@ class VolumeTracingZarrStreamingController @Inject()( version = None, additionalCoordinates = additionalCoordinates ) - (data, missingBucketIndices) <- if (tracing.getHasEditableMapping) - editableMappingService.volumeData(tracing, tracingId, List(wkRequest), urlOrHeaderToken(token, request)) - else tracingService.data(tracingId, tracing, List(wkRequest)) + (data, missingBucketIndices) <- if (tracing.getHasEditableMapping) { + val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.volumeData(mappingLayer, List(wkRequest)) + } else tracingService.data(tracingId, tracing, List(wkRequest)) dataWithFallback <- getFallbackLayerDataIfEmpty(tracing, tracingId, data, @@ -316,22 +311,21 @@ class VolumeTracingZarrStreamingController @Inject()( magParsed, Vec3Int(x, y, z), cubeSize, - additionalCoordinates, - urlOrHeaderToken(token, request)) ~> NOT_FOUND + additionalCoordinates) ~> NOT_FOUND } yield Ok(dataWithFallback) } } } - private def getFallbackLayerDataIfEmpty(tracing: VolumeTracing, - tracingId: String, - data: Array[Byte], - missingBucketIndices: List[Int], - mag: Vec3Int, - position: Vec3Int, - cubeSize: Int, - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - urlToken: Option[String]): Fox[Array[Byte]] = + private def getFallbackLayerDataIfEmpty( + tracing: VolumeTracing, + tracingId: String, + data: Array[Byte], + missingBucketIndices: List[Int], + mag: Vec3Int, + position: Vec3Int, + cubeSize: Int, + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext): Fox[Array[Byte]] = if (missingBucketIndices.nonEmpty) { for { remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) ?~> "No data at coordinates, no fallback layer defined" @@ -345,8 +339,7 @@ class VolumeTracingZarrStreamingController @Inject()( additionalCoordinates = additionalCoordinates ) (fallbackData, fallbackMissingBucketIndices) <- remoteDataStoreClient.getData(remoteFallbackLayer, - List(request), - urlToken) + List(request)) _ <- bool2Fox(fallbackMissingBucketIndices.isEmpty) ?~> "No data at coordinations in fallback layer" } yield fallbackData } else Fox.successful(data) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala index e95880ae974..5d347c7a4b0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.option2Fox @@ -36,10 +37,9 @@ trait FallbackDataHelper { datasetId <- remoteWebknossosClient.getDataSourceIdForTracing(tracingId) } yield RemoteFallbackLayer(datasetId.team, datasetId.name, layerName, tracing.elementClass) - def getFallbackDataFromDatastore( - remoteFallbackLayer: RemoteFallbackLayer, - dataRequests: List[WebknossosDataRequest], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[(Array[Byte], List[Int])] = - fallbackDataCache.getOrLoad(FallbackDataKey(remoteFallbackLayer, dataRequests, userToken), - k => remoteDatastoreClient.getData(k.remoteFallbackLayer, k.dataRequests, k.userToken)) + def getFallbackDataFromDatastore(remoteFallbackLayer: RemoteFallbackLayer, dataRequests: List[WebknossosDataRequest])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[(Array[Byte], List[Int])] = + fallbackDataCache.getOrLoad(FallbackDataKey(remoteFallbackLayer, dataRequests, tc.userTokenOpt), + k => remoteDatastoreClient.getData(k.remoteFallbackLayer, k.dataRequests)) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala index 1e96f6c03bb..76be451e007 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala @@ -22,16 +22,12 @@ class TracingDataStore @Inject()(config: TracingStoreConfig, lazy val skeletons = new FossilDBClient("skeletons", config, slackNotificationService) - lazy val skeletonUpdates = new FossilDBClient("skeletonUpdates", config, slackNotificationService) - lazy val volumes = new FossilDBClient("volumes", config, slackNotificationService) lazy val volumeData = new FossilDBClient("volumeData", config, slackNotificationService) lazy val volumeSegmentIndex = new FossilDBClient("volumeSegmentIndex", config, slackNotificationService) - lazy val volumeUpdates = new FossilDBClient("volumeUpdates", config, slackNotificationService) - lazy val editableMappingsInfo = new FossilDBClient("editableMappingsInfo", config, slackNotificationService) lazy val editableMappingsAgglomerateToGraph = @@ -40,19 +36,20 @@ class TracingDataStore @Inject()(config: TracingStoreConfig, lazy val editableMappingsSegmentToAgglomerate = new FossilDBClient("editableMappingsSegmentToAgglomerate", config, slackNotificationService) - lazy val editableMappingUpdates = new FossilDBClient("editableMappingUpdates", config, slackNotificationService) + lazy val annotations = new FossilDBClient("annotations", config, slackNotificationService) + + lazy val annotationUpdates = new FossilDBClient("annotationUpdates", config, slackNotificationService) private def shutdown(): Unit = { healthClient.shutdown() skeletons.shutdown() - skeletonUpdates.shutdown() + annotationUpdates.shutdown() + annotations.shutdown() volumes.shutdown() volumeData.shutdown() - volumeUpdates.shutdown() editableMappingsInfo.shutdown() editableMappingsAgglomerateToGraph.shutdown() editableMappingsSegmentToAgglomerate.shutdown() - editableMappingUpdates.shutdown() volumeSegmentIndex.shutdown() () } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingId.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingId.scala new file mode 100644 index 00000000000..9c6a1af49eb --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingId.scala @@ -0,0 +1,11 @@ +package com.scalableminds.webknossos.tracingstore.tracings + +import java.util.UUID + +object TracingId { + + def generate: String = UUID.randomUUID.toString + + lazy val dummy: String = "dummyTracingId" + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index 40bc6e1a123..c8ac3837bd4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -1,24 +1,14 @@ package com.scalableminds.webknossos.tracingstore.tracings -import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} -import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore +import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreRedisStore} import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType -import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import com.typesafe.scalalogging.LazyLogging -import play.api.http.Status.CONFLICT -import net.liftweb.common.Box -import play.api.i18n.MessagesProvider -import play.api.libs.json._ import scalapb.{GeneratedMessage, GeneratedMessageCompanion} -import java.util.UUID import scala.concurrent.ExecutionContext import scala.concurrent.duration._ -object TracingIds { - val dummyTracingId: String = "dummyTracingId" -} - trait TracingService[T <: GeneratedMessage] extends KeyValueStoreImplicits with FoxImplicits @@ -36,18 +26,12 @@ trait TracingService[T <: GeneratedMessage] def temporaryTracingIdStore: TracingStoreRedisStore - def tracingMigrationService: TracingMigrationService[T] - - def dummyTracing: T + def remoteWebknossosClient: TSRemoteWebknossosClient - val handledGroupIdStore: TracingStoreRedisStore - - val uncommittedUpdatesStore: TracingStoreRedisStore + def tracingMigrationService: TracingMigrationService[T] implicit def tracingCompanion: GeneratedMessageCompanion[T] - implicit val updateActionJsonFormat: Format[UpdateAction[T]] - // this should be longer than maxCacheTime in webknossos/AnnotationStore // so that the references saved there remain valid throughout their life private val temporaryStoreTimeout = 70 minutes @@ -56,50 +40,10 @@ trait TracingService[T <: GeneratedMessage] // to provide useful error messages to the user if the temporary tracing is no longer present private val temporaryIdStoreTimeout = 10 days - private val handledGroupCacheExpiry: FiniteDuration = 24 hours - - def currentVersion(tracingId: String): Fox[Long] - - def currentVersion(tracing: T): Long - - private def transactionGroupKey(tracingId: String, transactionId: String, transactionGroupIndex: Int, version: Long) = - s"transactionGroup___${tracingId}___${transactionId}___${transactionGroupIndex}___$version" - protected def temporaryIdKey(tracingId: String) = s"temporaryTracingId___$tracingId" - private def patternFor(tracingId: String, transactionId: String) = - s"transactionGroup___${tracingId}___${transactionId}___*" - - def saveUncommitted(tracingId: String, - transactionId: String, - transactionGroupIndex: Int, - version: Long, - updateGroup: UpdateActionGroup[T], - expiry: FiniteDuration): Fox[Unit] = - for { - _ <- Fox.runIf(transactionGroupIndex > 0)( - Fox.assertTrue( - uncommittedUpdatesStore.contains(transactionGroupKey( - tracingId, - transactionId, - transactionGroupIndex - 1, - version))) ?~> s"Incorrect transaction index. Got: $transactionGroupIndex but ${transactionGroupIndex - 1} does not exist" ~> CONFLICT) - _ <- uncommittedUpdatesStore.insert(transactionGroupKey(tracingId, transactionId, transactionGroupIndex, version), - Json.toJson(updateGroup).toString(), - Some(expiry)) - } yield () - - def getAllUncommittedFor(tracingId: String, transactionId: String): Fox[List[UpdateActionGroup[T]]] = - for { - raw: Seq[String] <- uncommittedUpdatesStore.findAllConditional(patternFor(tracingId, transactionId)) - parsed: Seq[UpdateActionGroup[T]] = raw.flatMap(itemAsString => - JsonHelper.jsResultToOpt(Json.parse(itemAsString).validate[UpdateActionGroup[T]])) - } yield parsed.toList.sortBy(_.transactionGroupIndex) - - def removeAllUncommittedFor(tracingId: String, transactionId: String): Fox[Unit] = - uncommittedUpdatesStore.removeAllConditional(patternFor(tracingId, transactionId)) - + /* // TODO ? add this to migration? private def migrateTracing(tracingFox: Fox[T], tracingId: String): Fox[T] = tracingMigrationService.migrateTracing(tracingFox).flatMap { case (tracing, hasChanged) => @@ -108,52 +52,11 @@ trait TracingService[T <: GeneratedMessage] else Fox.successful(tracing) } + */ - def handleUpdateGroup(tracingId: String, - updateGroup: UpdateActionGroup[T], - previousVersion: Long, - userToken: Option[String]): Fox[_] - - def applyPendingUpdates(tracing: T, tracingId: String, targetVersion: Option[Long]): Fox[T] = Fox.successful(tracing) - - def find(tracingId: String, - version: Option[Long] = None, - useCache: Boolean = true, - applyUpdates: Boolean = false): Fox[T] = - if (tracingId == TracingIds.dummyTracingId) - Fox.successful(dummyTracing) - else { - val tracingFox = tracingStore.get(tracingId, version)(fromProtoBytes[T]).map(_.value) - tracingFox.flatMap { tracing => - val updatedTracing = if (applyUpdates) { - applyPendingUpdates(tracing, tracingId, version) - } else { - Fox.successful(tracing) - } - migrateTracing(updatedTracing, tracingId) - }.orElse { - if (useCache) - temporaryTracingStore.find(tracingId) - else - tracingFox - } - } - - def findMultiple(selectors: List[Option[TracingSelector]], - useCache: Boolean = true, - applyUpdates: Boolean = false): Fox[List[Option[T]]] = - Fox.combined { - selectors.map { - case Some(selector) => find(selector.tracingId, selector.version, useCache, applyUpdates).map(Some(_)) - case None => Fox.successful(None) - } - } - - def generateTracingId: String = UUID.randomUUID.toString - - def save(tracing: T, tracingId: Option[String], version: Long, toCache: Boolean = false): Fox[String] = { - val id = tracingId.getOrElse(generateTracingId) - if (toCache) { + def save(tracing: T, tracingId: Option[String], version: Long, toTemporaryStore: Boolean = false): Fox[String] = { + val id = tracingId.getOrElse(TracingId.generate) + if (toTemporaryStore) { temporaryTracingStore.insert(id, tracing, Some(temporaryStoreTimeout)) temporaryTracingIdStore.insert(temporaryIdKey(id), "", Some(temporaryIdStoreTimeout)) Fox.successful(id) @@ -162,33 +65,4 @@ trait TracingService[T <: GeneratedMessage] } } - private def handledGroupKey(tracingId: String, transactionId: String, version: Long, transactionGroupIndex: Int) = - s"handledGroup___${tracingId}___${transactionId}___${version}___$transactionGroupIndex" - - def saveToHandledGroupIdStore(tracingId: String, - transactionId: String, - version: Long, - transactionGroupIndex: Int): Fox[Unit] = { - val key = handledGroupKey(tracingId, transactionId, version, transactionGroupIndex) - handledGroupIdStore.insert(key, "()", Some(handledGroupCacheExpiry)) - } - - def handledGroupIdStoreContains(tracingId: String, - transactionId: String, - version: Long, - transactionGroupIndex: Int): Fox[Boolean] = - handledGroupIdStore.contains(handledGroupKey(tracingId, transactionId, version, transactionGroupIndex)) - - def merge(tracings: Seq[T], mergedVolumeStats: MergedVolumeStats, newEditableMappingIdOpt: Option[String]): Box[T] - - def remapTooLargeTreeIds(tracing: T): T = tracing - - def mergeVolumeData(tracingSelectors: Seq[TracingSelector], - tracings: Seq[T], - newId: String, - newVersion: Long, - toCache: Boolean, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[MergedVolumeStats] - - def mergeEditableMappings(tracingsWithIds: List[(T, String)], userToken: Option[String]): Fox[String] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala deleted file mode 100644 index aebd371ae76..00000000000 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala +++ /dev/null @@ -1,91 +0,0 @@ -package com.scalableminds.webknossos.tracingstore.tracings - -import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import play.api.libs.json._ -import scalapb.GeneratedMessage - -trait UpdateAction[T <: GeneratedMessage] { - - def actionTimestamp: Option[Long] - - def actionAuthorId: Option[String] - - def applyOn(tracing: T): T = tracing - - def addTimestamp(timestamp: Long): UpdateAction[T] = this - - def addInfo(info: Option[String]): UpdateAction[T] = this - - def addAuthorId(authorId: Option[String]): UpdateAction[T] = this - - def transformToCompact: UpdateAction[T] = this - - // For analytics we wan to know how many changes are view only (e.g. move camera, toggle tree visibility) - // Overridden in subclasses - def isViewOnlyChange: Boolean = false -} - -object UpdateAction { - type SkeletonUpdateAction = UpdateAction[SkeletonTracing] - type VolumeUpdateAction = UpdateAction[VolumeTracing] -} - -case class UpdateActionGroup[T <: GeneratedMessage]( - version: Long, - timestamp: Long, - authorId: Option[String], - actions: List[UpdateAction[T]], - stats: Option[JsObject], - info: Option[String], - transactionId: String, - transactionGroupCount: Int, - transactionGroupIndex: Int -) { - def significantChangesCount: Int = actions.count(!_.isViewOnlyChange) - def viewChangesCount: Int = actions.count(_.isViewOnlyChange) -} - -object UpdateActionGroup { - - implicit def updateActionGroupReads[T <: GeneratedMessage]( - implicit fmt: Reads[UpdateAction[T]]): Reads[UpdateActionGroup[T]] = - (json: JsValue) => - for { - version <- json.validate((JsPath \ "version").read[Long]) - timestamp <- json.validate((JsPath \ "timestamp").read[Long]) - authorId <- json.validate((JsPath \ "authorId").readNullable[String]) - actions <- json.validate((JsPath \ "actions").read[List[UpdateAction[T]]]) - stats <- json.validate((JsPath \ "stats").readNullable[JsObject]) - info <- json.validate((JsPath \ "info").readNullable[String]) - transactionId <- json.validate((JsPath \ "transactionId").read[String]) - transactionGroupCount <- json.validate((JsPath \ "transactionGroupCount").read[Int]) - transactionGroupIndex <- json.validate((JsPath \ "transactionGroupIndex").read[Int]) - } yield { - UpdateActionGroup[T](version, - timestamp, - authorId, - actions, - stats, - info, - transactionId, - transactionGroupCount, - transactionGroupIndex) - } - - implicit def updateActionGroupWrites[T <: GeneratedMessage]( - implicit fmt: Writes[UpdateAction[T]]): Writes[UpdateActionGroup[T]] = - (value: UpdateActionGroup[T]) => - Json.obj( - "version" -> value.version, - "timestamp" -> value.timestamp, - "authorId" -> value.authorId, - "actions" -> Json.toJson(value.actions), - "stats" -> value.stats, - "info" -> value.info, - "transactionId" -> value.transactionId, - "transactionGroupCount" -> value.transactionGroupCount, - "transactionGroupIndex" -> value.transactionGroupIndex - ) - -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingElementKeys.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingElementKeys.scala new file mode 100644 index 00000000000..af9f7a2a287 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingElementKeys.scala @@ -0,0 +1,18 @@ +package com.scalableminds.webknossos.tracingstore.tracings.editablemapping + +import net.liftweb.common.Box +import net.liftweb.common.Box.tryo + +trait EditableMappingElementKeys { + + protected def agglomerateGraphKey(mappingId: String, agglomerateId: Long): String = + s"$mappingId/$agglomerateId" + + protected def segmentToAgglomerateKey(mappingId: String, chunkId: Long): String = + s"$mappingId/$chunkId" + + protected def chunkIdFromSegmentToAgglomerateKey(key: String): Box[Long] = tryo(key.split("/")(1).toLong) + + protected def agglomerateIdFromAgglomerateGraphKey(key: String): Box[Long] = tryo(key.split("/")(1).toLong) + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index 96d44509897..1f320a38d59 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.tools.Fox @@ -21,6 +22,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ import ucar.ma2.{Array => MultiArray} import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import scala.concurrent.ExecutionContext @@ -29,16 +31,14 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { val bucket: BucketPosition = readInstruction.bucket for { - editableMappingId <- Fox.successful(layer.name) + tracingId <- Fox.successful(layer.name) _ <- bool2Fox(layer.doesContainBucket(bucket)) remoteFallbackLayer <- layer.editableMappingService .remoteFallbackLayerFromVolumeTracing(layer.tracing, layer.tracingId) // called here to ensure updates are applied - (editableMappingInfo, editableMappingVersion) <- layer.editableMappingService.getInfoAndActualVersion( - editableMappingId, - requestedVersion = None, - remoteFallbackLayer = remoteFallbackLayer, - userToken = layer.token) + editableMappingInfo <- layer.annotationService.findEditableMappingInfo(layer.annotationId, + tracingId, + Some(layer.version))(ec, layer.tokenContext) dataRequest: WebknossosDataRequest = WebknossosDataRequest( position = Vec3Int(bucket.topLeft.mag1X, bucket.topLeft.mag1Y, bucket.topLeft.mag1Z), mag = bucket.mag, @@ -48,18 +48,17 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP version = None, additionalCoordinates = readInstruction.bucket.additionalCoordinates ) - (unmappedData, indices) <- layer.editableMappingService.getFallbackDataFromDatastore(remoteFallbackLayer, - List(dataRequest), - layer.token) + (unmappedData, indices) <- layer.editableMappingService + .getFallbackDataFromDatastore(remoteFallbackLayer, List(dataRequest))(ec, layer.tokenContext) _ <- bool2Fox(indices.isEmpty) unmappedDataTyped <- layer.editableMappingService.bytesToUnsignedInt(unmappedData, layer.tracing.elementClass) segmentIds = layer.editableMappingService.collectSegmentIds(unmappedDataTyped) - relevantMapping <- layer.editableMappingService.generateCombinedMappingForSegmentIds(segmentIds, - editableMappingInfo, - editableMappingVersion, - editableMappingId, - remoteFallbackLayer, - layer.token) + relevantMapping <- layer.editableMappingService.generateCombinedMappingForSegmentIds( + segmentIds, + editableMappingInfo, + layer.version, + tracingId, + remoteFallbackLayer)(layer.tokenContext) mappedData: Array[Byte] <- layer.editableMappingService.mapData(unmappedDataTyped, relevantMapping, layer.elementClass) @@ -72,9 +71,11 @@ case class EditableMappingLayer(name: String, resolutions: List[Vec3Int], largestSegmentId: Option[Long], elementClass: ElementClass.Value, - token: Option[String], + tokenContext: TokenContext, tracing: VolumeTracing, + annotationId: String, tracingId: String, + annotationService: TSAnnotationService, editableMappingService: EditableMappingService) extends SegmentationLayer { override val mags: List[MagLocator] = List.empty // MagLocators do not apply for annotation layers @@ -90,7 +91,7 @@ case class EditableMappingLayer(name: String, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]): BucketProvider = new EditableMappingBucketProvider(layer = this) - override def bucketProviderCacheKey: String = s"$name-token=$token" + override def bucketProviderCacheKey: String = s"$name-token=${tokenContext.userTokenOpt}" override def mappings: Option[Set[String]] = None @@ -99,4 +100,6 @@ case class EditableMappingLayer(name: String, override def adminViewConfiguration: Option[LayerViewConfiguration] = None override def additionalAxes: Option[Seq[AdditionalAxis]] = None + + def version: Long = tracing.version } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 0f9f857ca61..53552cc74b1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -1,13 +1,14 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo -import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.SegmentToAgglomerateProto +import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.SegmentToAgglomerateChunkProto import com.scalableminds.webknossos.datastore.SkeletonTracing.{Edge, Tree, TreeTypeProto} import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto @@ -21,6 +22,7 @@ import com.scalableminds.webknossos.datastore.services.{ AdHocMeshServiceHolder, BinaryDataService } +import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, KeyValueStoreImplicits, @@ -34,11 +36,10 @@ import net.liftweb.common.{Box, Empty, Failure, Full} import net.liftweb.common.Box.tryo import org.jgrapht.alg.flow.PushRelabelMFImpl import org.jgrapht.graph.{DefaultWeightedEdge, SimpleWeightedGraph} -import play.api.libs.json.{JsObject, JsValue, Json, OFormat} +import play.api.libs.json.{JsObject, Json, OFormat} import java.nio.file.Paths import java.util -import java.util.UUID import scala.concurrent.ExecutionContext import scala.concurrent.duration._ import scala.jdk.CollectionConverters.CollectionHasAsScala @@ -53,15 +54,14 @@ case class MinCutParameters( segmentId1: Long, segmentId2: Long, mag: Vec3Int, - agglomerateId: Long, - editableMappingId: String + agglomerateId: Long ) object MinCutParameters { implicit val jsonFormat: OFormat[MinCutParameters] = Json.format[MinCutParameters] } -case class NeighborsParameters(segmentId: Long, mag: Vec3Int, agglomerateId: Long, editableMappingId: String) +case class NeighborsParameters(segmentId: Long, mag: Vec3Int, agglomerateId: Long) object NeighborsParameters { implicit val jsonFormat: OFormat[NeighborsParameters] = Json.format[NeighborsParameters] @@ -96,18 +96,19 @@ class EditableMappingService @Inject()( extends KeyValueStoreImplicits with FallbackDataHelper with FoxImplicits + with ReversionHelper + with EditableMappingElementKeys with LazyLogging with ProtoGeometryImplicits { val defaultSegmentToAgglomerateChunkSize: Int = 64 * 1024 // max. 1 MiB chunks (two 8-byte numbers per element) - private def generateId: String = UUID.randomUUID.toString - val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, None, None) adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) private val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService - private lazy val materializedInfoCache: AlfuCache[(String, Long), EditableMappingInfo] = AlfuCache(maxCapacity = 100) + // TODO cache materialized stuff again, for e.g. faster bucket loading + // private lazy val materializedInfoCache: AlfuCache[(String, Long), EditableMappingInfo] = AlfuCache(maxCapacity = 100) private lazy val segmentToAgglomerateChunkCache: AlfuCache[(String, Long, Long), Seq[(Long, Long)]] = AlfuCache() @@ -115,65 +116,35 @@ class EditableMappingService @Inject()( private lazy val agglomerateToGraphCache: AlfuCache[(String, Long, Long), AgglomerateGraph] = AlfuCache(maxCapacity = 50) - def infoJson(tracingId: String, - editableMappingInfo: EditableMappingInfo, - editableMappingId: String, - version: Option[Long]): Fox[JsObject] = - for { - version <- getClosestMaterializableVersionOrZero(editableMappingId, version) - } yield - Json.obj( - "mappingName" -> editableMappingId, - "version" -> version, - "tracingId" -> tracingId, - "baseMappingName" -> editableMappingInfo.baseMappingName, - "largestAgglomerateId" -> editableMappingInfo.largestAgglomerateId, - "createdTimestamp" -> editableMappingInfo.createdTimestamp - ) + def infoJson(tracingId: String, editableMappingInfo: EditableMappingInfo): JsObject = + Json.obj( + "tracingId" -> tracingId, + "baseMappingName" -> editableMappingInfo.baseMappingName, + "largestAgglomerateId" -> editableMappingInfo.largestAgglomerateId, + "createdTimestamp" -> editableMappingInfo.createdTimestamp + ) - def create(baseMappingName: String): Fox[(String, EditableMappingInfo)] = { - val newId = generateId + def create(tracingId: String, baseMappingName: String): Fox[EditableMappingInfo] = { val newEditableMappingInfo = EditableMappingInfo( baseMappingName = baseMappingName, createdTimestamp = Instant.now.epochMillis, largestAgglomerateId = 0L ) for { - _ <- tracingDataStore.editableMappingsInfo.put(newId, 0L, toProtoBytes(newEditableMappingInfo)) - } yield (newId, newEditableMappingInfo) + _ <- tracingDataStore.editableMappingsInfo.put(tracingId, 0L, toProtoBytes(newEditableMappingInfo)) + } yield newEditableMappingInfo } - def duplicate(editableMappingIdOpt: Option[String], - version: Option[Long], - remoteFallbackLayerBox: Box[RemoteFallbackLayer], - userToken: Option[String]): Fox[String] = - for { - editableMappingId <- editableMappingIdOpt ?~> "duplicate on editable mapping without id" - remoteFallbackLayer <- remoteFallbackLayerBox ?~> "duplicate on editable mapping without remote fallback layer" - editableMappingInfoAndVersion <- getInfoAndActualVersion(editableMappingId, - version, - remoteFallbackLayer, - userToken) - newIdAndInfoV0 <- create(editableMappingInfoAndVersion._1.baseMappingName) - newId = newIdAndInfoV0._1 - newVersion = editableMappingInfoAndVersion._2 - _ <- tracingDataStore.editableMappingsInfo.put(newId, newVersion, toProtoBytes(editableMappingInfoAndVersion._1)) - _ <- duplicateSegmentToAgglomerate(editableMappingId, newId, newVersion) - _ <- duplicateAgglomerateToGraph(editableMappingId, newId, newVersion) - updateActionsWithVersions <- getUpdateActionsWithVersions(editableMappingId, editableMappingInfoAndVersion._2, 0L) - _ <- Fox.serialCombined(updateActionsWithVersions) { - updateActionsWithVersion: (Long, List[EditableMappingUpdateAction]) => - tracingDataStore.editableMappingUpdates.put(newId, updateActionsWithVersion._1, updateActionsWithVersion._2) - } - } yield newId - - private def duplicateSegmentToAgglomerate(editableMappingId: String, newId: String, newVersion: Long): Fox[Unit] = { - val iterator = - new VersionedFossilDbIterator(editableMappingId, + def duplicateSegmentToAgglomerate(sourceTracingId: String, + newId: String, + sourceVersion: Long, + newVersion: Long): Fox[Unit] = { + val sourceIterator = + new VersionedFossilDbIterator(sourceTracingId, tracingDataStore.editableMappingsSegmentToAgglomerate, - Some(newVersion)) + Some(sourceVersion)) for { - _ <- Fox.combined(iterator.map { keyValuePair => + _ <- Fox.combined(sourceIterator.map { keyValuePair => for { chunkId <- chunkIdFromSegmentToAgglomerateKey(keyValuePair.key).toFox newKey = segmentToAgglomerateKey(newId, chunkId) @@ -185,13 +156,16 @@ class EditableMappingService @Inject()( } yield () } - private def duplicateAgglomerateToGraph(editableMappingId: String, newId: String, newVersion: Long): Fox[Unit] = { - val iterator = - new VersionedFossilDbIterator(editableMappingId, + def duplicateAgglomerateToGraph(sourceTracingId: String, + newId: String, + sourceVersion: Long, + newVersion: Long): Fox[Unit] = { + val sourceIterator = + new VersionedFossilDbIterator(sourceTracingId, tracingDataStore.editableMappingsAgglomerateToGraph, - Some(newVersion)) + Some(sourceVersion)) for { - _ <- Fox.combined(iterator.map { keyValuePair => + _ <- Fox.combined(sourceIterator.map { keyValuePair => for { agglomerateId <- agglomerateIdFromAgglomerateGraphKey(keyValuePair.key).toFox newKey = agglomerateGraphKey(newId, agglomerateId) @@ -201,220 +175,67 @@ class EditableMappingService @Inject()( } yield () } - def updateActionLog(editableMappingId: String): Fox[JsValue] = { - def versionedTupleToJson(tuple: (Long, List[EditableMappingUpdateAction])): JsObject = - Json.obj( - "version" -> tuple._1, - "value" -> Json.toJson(tuple._2) - ) - - for { - updates <- tracingDataStore.editableMappingUpdates.getMultipleVersionsAsVersionValueTuple(editableMappingId)( - fromJsonBytes[List[EditableMappingUpdateAction]]) - updateActionGroupsJs = updates.map(versionedTupleToJson) - } yield Json.toJson(updateActionGroupsJs) - } - - def getInfo(editableMappingId: String, - version: Option[Long] = None, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[EditableMappingInfo] = - for { - (info, _) <- getInfoAndActualVersion(editableMappingId, version, remoteFallbackLayer, userToken) - } yield info - - def getBaseMappingName(editableMappingId: String): Fox[Option[String]] = - for { - desiredVersion <- getClosestMaterializableVersionOrZero(editableMappingId, None) - infoBox <- getClosestMaterialized(editableMappingId, desiredVersion).futureBox - } yield - infoBox match { - case Full(info) => Some(info.value.baseMappingName) - case _ => None - } - - def getInfoAndActualVersion(editableMappingId: String, - requestedVersion: Option[Long] = None, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[(EditableMappingInfo, Long)] = - for { - desiredVersion <- getClosestMaterializableVersionOrZero(editableMappingId, requestedVersion) - materializedInfo <- materializedInfoCache.getOrLoad( - (editableMappingId, desiredVersion), - _ => applyPendingUpdates(editableMappingId, desiredVersion, remoteFallbackLayer, userToken)) - } yield (materializedInfo, desiredVersion) - - def update(editableMappingId: String, - updateActionGroup: EditableMappingUpdateActionGroup, - newVersion: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Unit] = - for { - actionsWithTimestamp <- Fox.successful(updateActionGroup.actions.map(_.addTimestamp(updateActionGroup.timestamp))) - _ <- dryApplyUpdates(editableMappingId, newVersion, actionsWithTimestamp, remoteFallbackLayer, userToken) ?~> "editableMapping.dryUpdate.failed" - _ <- tracingDataStore.editableMappingUpdates.put(editableMappingId, newVersion, actionsWithTimestamp) - } yield () - - private def dryApplyUpdates(editableMappingId: String, - newVersion: Long, - updates: List[EditableMappingUpdateAction], - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Unit] = - for { - (previousInfo, previousVersion) <- getInfoAndActualVersion(editableMappingId, - None, - remoteFallbackLayer, - userToken) - updater = new EditableMappingUpdater( - editableMappingId, - previousInfo.baseMappingName, - previousVersion, - newVersion, - remoteFallbackLayer, - userToken, - remoteDatastoreClient, - this, - tracingDataStore, - relyOnAgglomerateIds = updates.length <= 1 - ) - updated <- updater.applyUpdatesAndSave(previousInfo, updates, dry = true) ?~> "editableMapping.update.failed" - } yield () - - def applyPendingUpdates(editableMappingId: String, - desiredVersion: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[EditableMappingInfo] = - for { - closestMaterializedWithVersion <- getClosestMaterialized(editableMappingId, desiredVersion) - updatedEditableMappingInfo: EditableMappingInfo <- if (desiredVersion == closestMaterializedWithVersion.version) - Fox.successful(closestMaterializedWithVersion.value) - else - for { - pendingUpdates <- getPendingUpdates(editableMappingId, closestMaterializedWithVersion.version, desiredVersion) - updater = new EditableMappingUpdater( - editableMappingId, - closestMaterializedWithVersion.value.baseMappingName, - closestMaterializedWithVersion.version, - desiredVersion, - remoteFallbackLayer, - userToken, - remoteDatastoreClient, - this, - tracingDataStore, - relyOnAgglomerateIds = pendingUpdates.length <= 1 - ) - - updated <- updater.applyUpdatesAndSave(closestMaterializedWithVersion.value, pendingUpdates) - } yield updated - } yield updatedEditableMappingInfo - - private def getClosestMaterialized(editableMappingId: String, - desiredVersion: Long): Fox[VersionedKeyValuePair[EditableMappingInfo]] = - tracingDataStore.editableMappingsInfo.get(editableMappingId, version = Some(desiredVersion))( - fromProtoBytes[EditableMappingInfo]) - - def getClosestMaterializableVersionOrZero(editableMappingId: String, desiredVersion: Option[Long]): Fox[Long] = - tracingDataStore.editableMappingUpdates.getVersion(editableMappingId, - version = desiredVersion, - mayBeEmpty = Some(true), - emptyFallback = Some(0L)) - - private def getPendingUpdates(editableMappingId: String, - closestMaterializedVersion: Long, - closestMaterializableVersion: Long): Fox[List[EditableMappingUpdateAction]] = - if (closestMaterializableVersion == closestMaterializedVersion) { - Fox.successful(List.empty) - } else { - for { - updates <- getUpdateActionsWithVersions(editableMappingId, - newestVersion = closestMaterializableVersion, - oldestVersion = closestMaterializedVersion + 1L) - } yield updates.map(_._2).reverse.flatten - } - - private def getUpdateActionsWithVersions( - editableMappingId: String, - newestVersion: Long, - oldestVersion: Long): Fox[List[(Long, List[EditableMappingUpdateAction])]] = { - val batchRanges = batchRangeInclusive(oldestVersion, newestVersion, batchSize = 100) - for { - updateActionBatches <- Fox.serialCombined(batchRanges.toList) { batchRange => - val batchFrom = batchRange._1 - val batchTo = batchRange._2 - for { - res <- tracingDataStore.editableMappingUpdates - .getMultipleVersionsAsVersionValueTuple[List[EditableMappingUpdateAction]]( - editableMappingId, - Some(batchTo), - Some(batchFrom) - )(fromJsonBytes[List[EditableMappingUpdateAction]]) - } yield res - } - flat = updateActionBatches.flatten - } yield flat - } + def assertTracingHasEditableMapping(tracing: VolumeTracing)(implicit ec: ExecutionContext): Fox[Unit] = + bool2Fox(tracing.getHasEditableMapping) ?~> "annotation.volume.noEditableMapping" def findSegmentIdAtPositionIfNeeded(remoteFallbackLayer: RemoteFallbackLayer, positionOpt: Option[Vec3Int], segmentIdOpt: Option[Long], - mag: Vec3Int, - userToken: Option[String]): Fox[Long] = + mag: Vec3Int)(implicit tc: TokenContext): Fox[Long] = segmentIdOpt match { case Some(segmentId) => Fox.successful(segmentId) - case None => findSegmentIdAtPosition(remoteFallbackLayer, positionOpt, mag, userToken) + case None => findSegmentIdAtPosition(remoteFallbackLayer, positionOpt, mag) } private def findSegmentIdAtPosition(remoteFallbackLayer: RemoteFallbackLayer, positionOpt: Option[Vec3Int], - mag: Vec3Int, - userToken: Option[String]): Fox[Long] = + mag: Vec3Int)(implicit tc: TokenContext): Fox[Long] = for { pos <- positionOpt.toFox ?~> "segment id or position is required in editable mapping action" - voxelAsBytes: Array[Byte] <- remoteDatastoreClient.getVoxelAtPosition(userToken, remoteFallbackLayer, pos, mag) + voxelAsBytes: Array[Byte] <- remoteDatastoreClient.getVoxelAtPosition(remoteFallbackLayer, pos, mag) voxelAsLongArray: Array[Long] <- bytesToLongs(voxelAsBytes, remoteFallbackLayer.elementClass) _ <- Fox.bool2Fox(voxelAsLongArray.length == 1) ?~> s"Expected one, got ${voxelAsLongArray.length} segment id values for voxel." voxelAsLong <- voxelAsLongArray.headOption } yield voxelAsLong - def volumeData(tracing: VolumeTracing, - tracingId: String, - dataRequests: DataRequestCollection, - userToken: Option[String]): Fox[(Array[Byte], List[Int])] = - for { - editableMappingId <- tracing.mappingName.toFox - dataLayer = editableMappingLayer(editableMappingId, tracing, tracingId, userToken) - requests = dataRequests.map(r => - DataServiceDataRequest(null, dataLayer, r.cuboid(dataLayer), r.settings.copy(appliedAgglomerate = None))) - data <- binaryDataService.handleDataRequests(requests) - } yield data + def volumeData(editableMappingLayer: EditableMappingLayer, + dataRequests: DataRequestCollection): Fox[(Array[Byte], List[Int])] = { + val requests = dataRequests.map( + r => + DataServiceDataRequest(null, + editableMappingLayer, + r.cuboid(editableMappingLayer), + r.settings.copy(appliedAgglomerate = None))) + binaryDataService.handleDataRequests(requests) + } private def getSegmentToAgglomerateForSegmentIds(segmentIds: Set[Long], - editableMappingId: String, + tracingId: String, version: Long): Fox[Map[Long, Long]] = { val chunkIds = segmentIds.map(_ / defaultSegmentToAgglomerateChunkSize) for { maps: List[Seq[(Long, Long)]] <- Fox.serialCombined(chunkIds.toList)(chunkId => - getSegmentToAgglomerateChunkFiltered(editableMappingId, chunkId, version, segmentIds)) + getSegmentToAgglomerateChunkFiltered(tracingId, chunkId, version, segmentIds)) } yield maps.flatten.toMap } - private def getSegmentToAgglomerateChunkFiltered(editableMappingId: String, + private def getSegmentToAgglomerateChunkFiltered(tracingId: String, chunkId: Long, version: Long, segmentIds: Set[Long]): Fox[Seq[(Long, Long)]] = for { - segmentToAgglomerateChunk <- getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId, version) + segmentToAgglomerateChunk <- getSegmentToAgglomerateChunkWithEmptyFallback(tracingId, chunkId, version) filtered = segmentToAgglomerateChunk.filter(pair => segmentIds.contains(pair._1)) } yield filtered - def getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId: String, + def getSegmentToAgglomerateChunkWithEmptyFallback(tracingId: String, chunkId: Long, version: Long): Fox[Seq[(Long, Long)]] = segmentToAgglomerateChunkCache.getOrLoad( - (editableMappingId, chunkId, version), + (tracingId, chunkId, version), _ => for { - chunkBox: Box[Seq[(Long, Long)]] <- getSegmentToAgglomerateChunk(editableMappingId, chunkId, Some(version)).futureBox + chunkBox: Box[Seq[(Long, Long)]] <- getSegmentToAgglomerateChunk(tracingId, chunkId, Some(version)).futureBox segmentToAgglomerate <- chunkBox match { case Full(chunk) => Fox.successful(chunk) case Empty => Fox.successful(Seq.empty[(Long, Long)]) @@ -423,57 +244,58 @@ class EditableMappingService @Inject()( } yield segmentToAgglomerate ) - private def getSegmentToAgglomerateChunk(editableMappingId: String, + private def getSegmentToAgglomerateChunk(tracingId: String, chunkId: Long, - version: Option[Long]): Fox[Seq[(Long, Long)]] = + version: Option[Long]): Fox[Seq[(Long, Long)]] = { + val chunkKey = segmentToAgglomerateKey(tracingId, chunkId) + getSegmentToAgglomerateChunk(chunkKey, version) + } + + def getSegmentToAgglomerateChunk(chunkKey: String, version: Option[Long]): Fox[Seq[(Long, Long)]] = for { - keyValuePair: VersionedKeyValuePair[SegmentToAgglomerateProto] <- tracingDataStore.editableMappingsSegmentToAgglomerate - .get(segmentToAgglomerateKey(editableMappingId, chunkId), version, mayBeEmpty = Some(true))( - fromProtoBytes[SegmentToAgglomerateProto]) - valueProto = keyValuePair.value + keyValuePairBytes: VersionedKeyValuePair[Array[Byte]] <- tracingDataStore.editableMappingsSegmentToAgglomerate + .get(chunkKey, version, mayBeEmpty = Some(true)) + valueProto <- if (isRevertedElement(keyValuePairBytes.value)) Fox.empty + else fromProtoBytes[SegmentToAgglomerateChunkProto](keyValuePairBytes.value).toFox asSequence = valueProto.segmentToAgglomerate.map(pair => pair.segmentId -> pair.agglomerateId) } yield asSequence - def generateCombinedMappingForSegmentIds(segmentIds: Set[Long], - editableMapping: EditableMappingInfo, - editableMappingVersion: Long, - editableMappingId: String, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Map[Long, Long]] = + def generateCombinedMappingForSegmentIds( + segmentIds: Set[Long], + editableMapping: EditableMappingInfo, + editableMappingVersion: Long, + tracingId: String, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Map[Long, Long]] = for { editableMappingForSegmentIds <- getSegmentToAgglomerateForSegmentIds(segmentIds, - editableMappingId, + tracingId, editableMappingVersion) segmentIdsInEditableMapping: Set[Long] = editableMappingForSegmentIds.keySet segmentIdsInBaseMapping: Set[Long] = segmentIds.diff(segmentIdsInEditableMapping) baseMappingSubset <- getBaseSegmentToAgglomerate(editableMapping.baseMappingName, segmentIdsInBaseMapping, - remoteFallbackLayer, - userToken) + remoteFallbackLayer) } yield editableMappingForSegmentIds ++ baseMappingSubset - def getAgglomerateSkeletonWithFallback(editableMappingId: String, + def getAgglomerateSkeletonWithFallback(tracingId: String, + version: Long, + editableMappingInfo: EditableMappingInfo, remoteFallbackLayer: RemoteFallbackLayer, - agglomerateId: Long, - userToken: Option[String]): Fox[Array[Byte]] = + agglomerateId: Long)(implicit tc: TokenContext): Fox[Array[Byte]] = for { - // called here to ensure updates are applied - editableMappingInfo <- getInfo(editableMappingId, version = None, remoteFallbackLayer, userToken) - agglomerateGraphBox <- getAgglomerateGraphForId(editableMappingId, agglomerateId, remoteFallbackLayer, userToken).futureBox + agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, version, agglomerateId).futureBox skeletonBytes <- agglomerateGraphBox match { case Full(agglomerateGraph) => - Fox.successful( - agglomerateGraphToSkeleton(editableMappingId, agglomerateGraph, remoteFallbackLayer, agglomerateId)) + Fox.successful(agglomerateGraphToSkeleton(tracingId, agglomerateGraph, remoteFallbackLayer, agglomerateId)) case Empty => - remoteDatastoreClient.getAgglomerateSkeleton(userToken, - remoteFallbackLayer, + remoteDatastoreClient.getAgglomerateSkeleton(remoteFallbackLayer, editableMappingInfo.baseMappingName, agglomerateId) case f: Failure => f.toFox } } yield skeletonBytes - private def agglomerateGraphToSkeleton(editableMappingId: String, + private def agglomerateGraphToSkeleton(tracingId: String, graph: AgglomerateGraph, remoteFallbackLayer: RemoteFallbackLayer, agglomerateId: Long): Array[Byte] = { @@ -497,7 +319,7 @@ class EditableMappingService @Inject()( createdTimestamp = System.currentTimeMillis(), nodes = nodes, edges = skeletonEdges, - name = s"agglomerate $agglomerateId ($editableMappingId)", + name = s"agglomerate $agglomerateId ($tracingId)", `type` = Some(TreeTypeProto.AGGLOMERATE) )) @@ -508,16 +330,15 @@ class EditableMappingService @Inject()( skeleton.toByteArray } - def getBaseSegmentToAgglomerate(mappingName: String, - segmentIds: Set[Long], - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Map[Long, Long]] = { + def getBaseSegmentToAgglomerate( + baseMappingName: String, + segmentIds: Set[Long], + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Map[Long, Long]] = { val segmentIdsOrdered = segmentIds.toList for { agglomerateIdsOrdered <- remoteDatastoreClient.getAgglomerateIdsForSegmentIds(remoteFallbackLayer, - mappingName, - segmentIdsOrdered, - userToken) + baseMappingName, + segmentIdsOrdered) } yield segmentIdsOrdered.zip(agglomerateIdsOrdered).toMap } @@ -554,105 +375,64 @@ class EditableMappingService @Inject()( bytes = UnsignedIntegerArray.toByteArray(unsignedIntArray, elementClass) } yield bytes - private def editableMappingLayer(mappingName: String, - tracing: VolumeTracing, - tracingId: String, - userToken: Option[String]): EditableMappingLayer = - EditableMappingLayer( - mappingName, - tracing.boundingBox, - resolutions = tracing.mags.map(vec3IntFromProto).toList, - largestSegmentId = Some(0L), - elementClass = tracing.elementClass, - userToken, - tracing = tracing, - tracingId = tracingId, - editableMappingService = this + def createAdHocMesh(editableMappingLayer: EditableMappingLayer, + request: WebknossosAdHocMeshRequest): Fox[(Array[Float], List[Int])] = { + val adHocMeshRequest = AdHocMeshRequest( + dataSource = None, + dataLayer = editableMappingLayer, + cuboid = request.cuboid(editableMappingLayer), + segmentId = request.segmentId, + voxelSizeFactor = request.voxelSizeFactorInUnit, + mapping = None, + mappingType = None, + findNeighbors = request.findNeighbors ) + adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) + } - def createAdHocMesh(tracing: VolumeTracing, - tracingId: String, - request: WebknossosAdHocMeshRequest, - userToken: Option[String]): Fox[(Array[Float], List[Int])] = - for { - mappingName <- tracing.mappingName.toFox - segmentationLayer = editableMappingLayer(mappingName, tracing, tracingId, userToken) - adHocMeshRequest = AdHocMeshRequest( - dataSource = None, - dataLayer = segmentationLayer, - cuboid = request.cuboid(segmentationLayer), - segmentId = request.segmentId, - voxelSizeFactor = request.voxelSizeFactorInUnit, - mapping = None, - mappingType = None, - findNeighbors = request.findNeighbors - ) - result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) - } yield result - - def agglomerateGraphKey(mappingId: String, agglomerateId: Long): String = - s"$mappingId/$agglomerateId" - - def segmentToAgglomerateKey(mappingId: String, chunkId: Long): String = - s"$mappingId/$chunkId" - - private def chunkIdFromSegmentToAgglomerateKey(key: String): Box[Long] = tryo(key.split("/")(1).toLong) - - private def agglomerateIdFromAgglomerateGraphKey(key: String): Box[Long] = tryo(key.split("/")(1).toLong) - - def getAgglomerateGraphForId(mappingId: String, - agglomerateId: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String], - requestedVersion: Option[Long] = None): Fox[AgglomerateGraph] = + def getAgglomerateGraphForId(tracingId: String, version: Long, agglomerateId: Long): Fox[AgglomerateGraph] = for { - // called here to ensure updates are applied - (_, version) <- getInfoAndActualVersion(mappingId, requestedVersion, remoteFallbackLayer, userToken) agglomerateGraph <- agglomerateToGraphCache.getOrLoad( - (mappingId, agglomerateId, version), + (tracingId, agglomerateId, version), _ => - tracingDataStore.editableMappingsAgglomerateToGraph - .get(agglomerateGraphKey(mappingId, agglomerateId), Some(version), mayBeEmpty = Some(true))( - fromProtoBytes[AgglomerateGraph]) - .map(_.value) + for { + graphBytes: VersionedKeyValuePair[Array[Byte]] <- tracingDataStore.editableMappingsAgglomerateToGraph + .get(agglomerateGraphKey(tracingId, agglomerateId), Some(version), mayBeEmpty = Some(true)) + graphParsed <- if (isRevertedElement(graphBytes.value)) Fox.empty + else fromProtoBytes[AgglomerateGraph](graphBytes.value).toFox + } yield graphParsed ) } yield agglomerateGraph - def getAgglomerateGraphForIdWithFallback(mapping: EditableMappingInfo, - editableMappingId: String, - version: Option[Long], - agglomerateId: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[AgglomerateGraph] = + def getAgglomerateGraphForIdWithFallback( + mapping: EditableMappingInfo, + tracingId: String, + version: Long, + agglomerateId: Long, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[AgglomerateGraph] = for { - agglomerateGraphBox <- getAgglomerateGraphForId(editableMappingId, - agglomerateId, - remoteFallbackLayer, - userToken, - version).futureBox + agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, version, agglomerateId).futureBox agglomerateGraph <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful(agglomerateGraph) case Empty => - remoteDatastoreClient.getAgglomerateGraph(remoteFallbackLayer, - mapping.baseMappingName, - agglomerateId, - userToken) + remoteDatastoreClient.getAgglomerateGraph(remoteFallbackLayer, mapping.baseMappingName, agglomerateId) case f: Failure => f.toFox } } yield agglomerateGraph - def agglomerateGraphMinCut(parameters: MinCutParameters, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[List[EdgeWithPositions]] = + def agglomerateGraphMinCut( + tracingId: String, + version: Long, + editableMappingInfo: EditableMappingInfo, + parameters: MinCutParameters, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[List[EdgeWithPositions]] = for { // called here to ensure updates are applied - mapping <- getInfo(parameters.editableMappingId, version = None, remoteFallbackLayer, userToken) - agglomerateGraph <- getAgglomerateGraphForIdWithFallback(mapping, - parameters.editableMappingId, - None, + agglomerateGraph <- getAgglomerateGraphForIdWithFallback(editableMappingInfo, + tracingId, + version, parameters.agglomerateId, - remoteFallbackLayer, - userToken) + remoteFallbackLayer) ?~> "getAgglomerateGraph.failed" edgesToCut <- minCut(agglomerateGraph, parameters.segmentId1, parameters.segmentId2) ?~> "Could not calculate min-cut on agglomerate graph." edgesWithPositions = annotateEdgesWithPositions(edgesToCut, agglomerateGraph) } yield edgesWithPositions @@ -709,18 +489,18 @@ class EditableMappingService @Inject()( ) } - def agglomerateGraphNeighbors(parameters: NeighborsParameters, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[(Long, Seq[NodeWithPosition])] = + def agglomerateGraphNeighbors( + tracingId: String, + editableMappingInfo: EditableMappingInfo, + version: Long, + parameters: NeighborsParameters, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[(Long, Seq[NodeWithPosition])] = for { - // called here to ensure updates are applied - mapping <- getInfo(parameters.editableMappingId, version = None, remoteFallbackLayer, userToken) - agglomerateGraph <- getAgglomerateGraphForIdWithFallback(mapping, - parameters.editableMappingId, - None, + agglomerateGraph <- getAgglomerateGraphForIdWithFallback(editableMappingInfo, + tracingId, + version, parameters.agglomerateId, - remoteFallbackLayer, - userToken) + remoteFallbackLayer) neighborNodes = neighbors(agglomerateGraph, parameters.segmentId) nodesWithPositions = annotateNodesWithPositions(neighborNodes, agglomerateGraph) } yield (parameters.segmentId, nodesWithPositions) @@ -735,39 +515,33 @@ class EditableMappingService @Inject()( neighborNodes } - def merge(editableMappingIds: List[String], - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[String] = + /* + def merge(newTracingId: String, tracingIds: List[String], remoteFallbackLayer: RemoteFallbackLayer)( + implicit tc: TokenContext): Fox[Unit] = for { - firstMappingId <- editableMappingIds.headOption.toFox + firstTracingId <- tracingIds.headOption.toFox before = Instant.now - newMappingId <- duplicate(Some(firstMappingId), version = None, Some(remoteFallbackLayer), userToken) - _ <- Fox.serialCombined(editableMappingIds.tail)(editableMappingId => - mergeInto(newMappingId, editableMappingId, remoteFallbackLayer, userToken)) - _ = logger.info(s"Merging ${editableMappingIds.length} editable mappings took ${Instant.since(before)}") - } yield newMappingId + _ <- duplicate(firstTracingId, newTracingId, version = None, Some(remoteFallbackLayer)) + _ <- Fox.serialCombined(tracingIds.tail)(tracingId => mergeInto(newTracingId, tracingId, remoteFallbackLayer)) + _ = logger.info(s"Merging ${tracingIds.length} editable mappings took ${Instant.since(before)}") + } yield () // read as: merge source into target (mutate target) - private def mergeInto(targetEditableMappingId: String, - sourceEditableMappingId: String, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Unit] = + private def mergeInto(targetTracingId: String, sourceTracingId: String, remoteFallbackLayer: RemoteFallbackLayer)( + implicit tc: TokenContext): Fox[Unit] = for { - targetNewestVersion <- getClosestMaterializableVersionOrZero(targetEditableMappingId, None) - sourceNewestMaterializedWithVersion <- getInfoAndActualVersion(sourceEditableMappingId, - None, - remoteFallbackLayer, - userToken) + targetNewestVersion <- getClosestMaterializableVersionOrZero(targetTracingId, None) + sourceNewestMaterializedWithVersion <- getInfoAndActualVersion(sourceTracingId, None, remoteFallbackLayer) sourceNewestVersion = sourceNewestMaterializedWithVersion._2 - updateActionsWithVersions <- getUpdateActionsWithVersions(sourceEditableMappingId, sourceNewestVersion, 0L) + updateActionsWithVersions <- getUpdateActionsWithVersions(sourceTracingId, sourceNewestVersion, 0L) updateActionsToApply = updateActionsWithVersions.map(_._2).reverse.flatten updater = new EditableMappingUpdater( - targetEditableMappingId, + targetTracingId, sourceNewestMaterializedWithVersion._1.baseMappingName, targetNewestVersion, targetNewestVersion + sourceNewestVersion, remoteFallbackLayer, - userToken, + tc, remoteDatastoreClient, this, tracingDataStore, @@ -775,16 +549,11 @@ class EditableMappingService @Inject()( ) _ <- updater.applyUpdatesAndSave(sourceNewestMaterializedWithVersion._1, updateActionsToApply) _ <- Fox.serialCombined(updateActionsWithVersions) { updateActionsWithVersion => - tracingDataStore.editableMappingUpdates.put(targetEditableMappingId, + tracingDataStore.editableMappingUpdates.put(targetTracingId, updateActionsWithVersion._1 + targetNewestVersion, updateActionsWithVersion._2) } } yield () - private def batchRangeInclusive(from: Long, to: Long, batchSize: Long): Seq[(Long, Long)] = - (0L to ((to - from) / batchSize)).map { batchIndex => - val batchFrom = batchIndex * batchSize + from - val batchTo = Math.min(to, (batchIndex + 1) * batchSize + from - 1) - (batchFrom, batchTo) - } + */ } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala new file mode 100644 index 00000000000..e9c7422ecd3 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala @@ -0,0 +1,123 @@ +package com.scalableminds.webknossos.tracingstore.tracings.editablemapping + +import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph +import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.SegmentToAgglomerateChunkProto +import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper +import com.scalableminds.webknossos.tracingstore.tracings.{ + FossilDBClient, + KeyValueStoreImplicits, + VersionedKeyValuePair +} +import net.liftweb.common.Full + +import scala.annotation.tailrec + +class VersionedAgglomerateToGraphIterator(prefix: String, + segmentToAgglomerateDataStore: FossilDBClient, + version: Option[Long] = None) + extends Iterator[(String, AgglomerateGraph, Long)] + with ReversionHelper + with KeyValueStoreImplicits { + private val batchSize = 64 + + private var currentStartAfterKey: Option[String] = None + private var currentBatchIterator: Iterator[VersionedKeyValuePair[Array[Byte]]] = fetchNext + private var nextGraph: Option[VersionedKeyValuePair[AgglomerateGraph]] = None + + private def fetchNext: Iterator[VersionedKeyValuePair[Array[Byte]]] = + segmentToAgglomerateDataStore.getMultipleKeys(currentStartAfterKey, Some(prefix), version, Some(batchSize)).iterator + + private def fetchNextAndSave = { + currentBatchIterator = fetchNext + currentBatchIterator + } + + @tailrec + private def getNextNonRevertedGraph: Option[VersionedKeyValuePair[AgglomerateGraph]] = + if (currentBatchIterator.hasNext) { + val chunk = currentBatchIterator.next() + currentStartAfterKey = Some(chunk.key) + val graphParsedBox = fromProtoBytes[AgglomerateGraph](chunk.value) + graphParsedBox match { + case _ if isRevertedElement(chunk.value) => getNextNonRevertedGraph + case Full(graphParsed) => Some(VersionedKeyValuePair(versionedKey = chunk.versionedKey, value = graphParsed)) + case _ => getNextNonRevertedGraph + } + } else { + if (!fetchNextAndSave.hasNext) None + else getNextNonRevertedGraph + } + + override def hasNext: Boolean = + if (nextGraph.isDefined) true + else { + nextGraph = getNextNonRevertedGraph + nextGraph.isDefined + } + + override def next(): (String, AgglomerateGraph, Long) = { + val nextRes = nextGraph match { + case Some(bucket) => bucket + case None => getNextNonRevertedGraph.get + } + nextGraph = None + // TODO parse graph key? (=agglomerate id) + (nextRes.key, nextRes.value, nextRes.version) + } + +} + +class VersionedSegmentToAgglomerateChunkIterator(prefix: String, + segmentToAgglomerateDataStore: FossilDBClient, + version: Option[Long] = None) + extends Iterator[(String, SegmentToAgglomerateChunkProto, Long)] + with ReversionHelper + with KeyValueStoreImplicits { + private val batchSize = 64 + + private var currentStartAfterKey: Option[String] = None + private var currentBatchIterator: Iterator[VersionedKeyValuePair[Array[Byte]]] = fetchNext + private var nextChunk: Option[VersionedKeyValuePair[SegmentToAgglomerateChunkProto]] = None + + private def fetchNext: Iterator[VersionedKeyValuePair[Array[Byte]]] = + segmentToAgglomerateDataStore.getMultipleKeys(currentStartAfterKey, Some(prefix), version, Some(batchSize)).iterator + + private def fetchNextAndSave = { + currentBatchIterator = fetchNext + currentBatchIterator + } + + @tailrec + private def getNextNonRevertedChunk: Option[VersionedKeyValuePair[SegmentToAgglomerateChunkProto]] = + if (currentBatchIterator.hasNext) { + val chunk = currentBatchIterator.next() + currentStartAfterKey = Some(chunk.key) + val chunkParsedBox = fromProtoBytes[SegmentToAgglomerateChunkProto](chunk.value) + chunkParsedBox match { + case _ if isRevertedElement(chunk.value) => getNextNonRevertedChunk + case Full(chunkParsed) => Some(VersionedKeyValuePair(versionedKey = chunk.versionedKey, value = chunkParsed)) + case _ => getNextNonRevertedChunk + } + } else { + if (!fetchNextAndSave.hasNext) None + else getNextNonRevertedChunk + } + + override def hasNext: Boolean = + if (nextChunk.isDefined) true + else { + nextChunk = getNextNonRevertedChunk + nextChunk.isDefined + } + + override def next(): (String, SegmentToAgglomerateChunkProto, Long) = { + val nextRes = nextChunk match { + case Some(bucket) => bucket + case None => getNextNonRevertedChunk.get + } + nextChunk = None + // TODO parse chunk key? + (nextRes.key, nextRes.value, nextRes.version) + } + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala index 652f13c96d9..9cd4ae51d32 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala @@ -1,12 +1,11 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, UpdateAction} import play.api.libs.json.Format.GenericFormat import play.api.libs.json._ -trait EditableMappingUpdateAction { - def addTimestamp(timestamp: Long): EditableMappingUpdateAction -} +trait EditableMappingUpdateAction extends LayerUpdateAction // we switched from positions to segment ids in https://github.com/scalableminds/webknossos/pull/7742. // Both are now optional to support applying old update actions stored in the db. @@ -16,9 +15,17 @@ case class SplitAgglomerateUpdateAction(agglomerateId: Long, segmentId1: Option[Long], segmentId2: Option[Long], mag: Vec3Int, - actionTimestamp: Option[Long] = None) + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends EditableMappingUpdateAction { override def addTimestamp(timestamp: Long): EditableMappingUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } object SplitAgglomerateUpdateAction { @@ -34,41 +41,19 @@ case class MergeAgglomerateUpdateAction(agglomerateId1: Long, segmentId1: Option[Long], segmentId2: Option[Long], mag: Vec3Int, - actionTimestamp: Option[Long] = None) + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends EditableMappingUpdateAction { override def addTimestamp(timestamp: Long): EditableMappingUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } object MergeAgglomerateUpdateAction { implicit val jsonFormat: OFormat[MergeAgglomerateUpdateAction] = Json.format[MergeAgglomerateUpdateAction] } - -object EditableMappingUpdateAction { - - implicit object editableMappingUpdateActionFormat extends Format[EditableMappingUpdateAction] { - override def reads(json: JsValue): JsResult[EditableMappingUpdateAction] = - (json \ "name").validate[String].flatMap { - case "mergeAgglomerate" => (json \ "value").validate[MergeAgglomerateUpdateAction] - case "splitAgglomerate" => (json \ "value").validate[SplitAgglomerateUpdateAction] - case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") - } - - override def writes(o: EditableMappingUpdateAction): JsValue = o match { - case s: SplitAgglomerateUpdateAction => - Json.obj("name" -> "splitAgglomerate", "value" -> Json.toJson(s)(SplitAgglomerateUpdateAction.jsonFormat)) - case s: MergeAgglomerateUpdateAction => - Json.obj("name" -> "mergeAgglomerate", "value" -> Json.toJson(s)(MergeAgglomerateUpdateAction.jsonFormat)) - } - } - -} - -case class EditableMappingUpdateActionGroup( - version: Long, - timestamp: Long, - actions: List[EditableMappingUpdateAction] -) - -object EditableMappingUpdateActionGroup { - implicit val jsonFormat: OFormat[EditableMappingUpdateActionGroup] = Json.format[EditableMappingUpdateActionGroup] -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 2440e17a667..fc545c9e8d7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -1,14 +1,17 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.AgglomerateGraph.{AgglomerateEdge, AgglomerateGraph} import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.{ SegmentAgglomeratePair, - SegmentToAgglomerateProto + SegmentToAgglomerateChunkProto } import com.scalableminds.webknossos.tracingstore.TSRemoteDatastoreClient +import com.scalableminds.webknossos.tracingstore.annotation.{TSAnnotationService, UpdateAction} +import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ KeyValueStoreImplicits, RemoteFallbackLayer, @@ -29,54 +32,71 @@ import scala.jdk.CollectionConverters.CollectionHasAsScala // this results in only one version increment in the db per update group class EditableMappingUpdater( - editableMappingId: String, + annotationId: String, + tracingId: String, baseMappingName: String, oldVersion: Long, newVersion: Long, remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String], + tokenContext: TokenContext, remoteDatastoreClient: TSRemoteDatastoreClient, editableMappingService: EditableMappingService, + annotationService: TSAnnotationService, tracingDataStore: TracingDataStore, relyOnAgglomerateIds: Boolean // False during merge and in case of multiple actions. Then, look up all agglomerate ids at positions ) extends KeyValueStoreImplicits + with ReversionHelper with FoxImplicits + with EditableMappingElementKeys with LazyLogging { - private val segmentToAgglomerateBuffer: mutable.Map[String, Map[Long, Long]] = - new mutable.HashMap[String, Map[Long, Long]]() - private val agglomerateToGraphBuffer: mutable.Map[String, AgglomerateGraph] = - new mutable.HashMap[String, AgglomerateGraph]() + // chunkKey → (Map[segmentId → agglomerateId], isToBeReverted) + private val segmentToAgglomerateBuffer: mutable.Map[String, (Map[Long, Long], Boolean)] = + new mutable.HashMap[String, (Map[Long, Long], Boolean)]() + // agglomerateKey → (agglomerateGraph, isToBeReverted) + private val agglomerateToGraphBuffer: mutable.Map[String, (AgglomerateGraph, Boolean)] = + new mutable.HashMap[String, (AgglomerateGraph, Boolean)]() def applyUpdatesAndSave(existingEditabeMappingInfo: EditableMappingInfo, - updates: List[EditableMappingUpdateAction], + updates: List[UpdateAction], dry: Boolean = false)(implicit ec: ExecutionContext): Fox[EditableMappingInfo] = for { updatedEditableMappingInfo: EditableMappingInfo <- updateIter(Some(existingEditabeMappingInfo), updates) - _ <- Fox.runIf(!dry)(flushToFossil(updatedEditableMappingInfo)) + _ <- Fox.runIf(!dry)(flushBuffersToFossil()) + _ <- Fox.runIf(!dry)(flushUpdatedInfoToFossil(updatedEditableMappingInfo)) } yield updatedEditableMappingInfo - private def flushToFossil(updatedEditableMappingInfo: EditableMappingInfo)(implicit ec: ExecutionContext): Fox[Unit] = + def flushBuffersToFossil()(implicit ec: ExecutionContext): Fox[Unit] = for { _ <- Fox.serialCombined(segmentToAgglomerateBuffer.keys.toList)(flushSegmentToAgglomerateChunk) _ <- Fox.serialCombined(agglomerateToGraphBuffer.keys.toList)(flushAgglomerateGraph) - _ <- tracingDataStore.editableMappingsInfo.put(editableMappingId, newVersion, updatedEditableMappingInfo) + } yield () + + private def flushUpdatedInfoToFossil(updatedEditableMappingInfo: EditableMappingInfo): Fox[Unit] = + for { + _ <- tracingDataStore.editableMappingsInfo.put(tracingId, newVersion, updatedEditableMappingInfo) } yield () private def flushSegmentToAgglomerateChunk(key: String): Fox[Unit] = { - val chunk = segmentToAgglomerateBuffer(key) - val proto = SegmentToAgglomerateProto(chunk.toVector.map { segmentAgglomerateTuple => - SegmentAgglomeratePair(segmentAgglomerateTuple._1, segmentAgglomerateTuple._2) - }) - tracingDataStore.editableMappingsSegmentToAgglomerate.put(key, newVersion, proto.toByteArray) + val (chunk, isToBeReverted) = segmentToAgglomerateBuffer(key) + val valueToFlush: Array[Byte] = + if (isToBeReverted) revertedValue + else { + val proto = SegmentToAgglomerateChunkProto(chunk.toVector.map { segmentAgglomerateTuple => + SegmentAgglomeratePair(segmentAgglomerateTuple._1, segmentAgglomerateTuple._2) + }) + proto.toByteArray + } + tracingDataStore.editableMappingsSegmentToAgglomerate.put(key, newVersion, valueToFlush) } private def flushAgglomerateGraph(key: String): Fox[Unit] = { - val graph = agglomerateToGraphBuffer(key) - tracingDataStore.editableMappingsAgglomerateToGraph.put(key, newVersion, graph) + val (graph, isToBeReverted) = agglomerateToGraphBuffer(key) + val valueToFlush: Array[Byte] = if (isToBeReverted) revertedValue else graph + tracingDataStore.editableMappingsAgglomerateToGraph.put(key, newVersion, valueToFlush) } - private def updateIter(mappingFox: Fox[EditableMappingInfo], remainingUpdates: List[EditableMappingUpdateAction])( + private def updateIter(mappingFox: Fox[EditableMappingInfo], remainingUpdates: List[UpdateAction])( implicit ec: ExecutionContext): Fox[EditableMappingInfo] = mappingFox.futureBox.flatMap { case Empty => @@ -97,13 +117,14 @@ class EditableMappingUpdater( mappingFox } - private def applyOneUpdate(mapping: EditableMappingInfo, update: EditableMappingUpdateAction)( + def applyOneUpdate(mapping: EditableMappingInfo, update: UpdateAction)( implicit ec: ExecutionContext): Fox[EditableMappingInfo] = update match { case splitAction: SplitAgglomerateUpdateAction => applySplitAction(mapping, splitAction) ?~> "Failed to apply split action" case mergeAction: MergeAgglomerateUpdateAction => applyMergeAction(mapping, mergeAction) ?~> "Failed to apply merge action" + case _ => Fox.failure("this is not an editable mapping update action!") } private def applySplitAction(editableMappingInfo: EditableMappingInfo, update: SplitAgglomerateUpdateAction)( @@ -112,21 +133,19 @@ class EditableMappingUpdater( segmentId1 <- editableMappingService.findSegmentIdAtPositionIfNeeded(remoteFallbackLayer, update.segmentPosition1, update.segmentId1, - update.mag, - userToken) + update.mag)(tokenContext) segmentId2 <- editableMappingService.findSegmentIdAtPositionIfNeeded(remoteFallbackLayer, update.segmentPosition2, update.segmentId2, - update.mag, - userToken) + update.mag)(tokenContext) agglomerateId <- agglomerateIdForSplitAction(update, segmentId1) agglomerateGraph <- agglomerateGraphForIdWithFallback(editableMappingInfo, agglomerateId) _ = if (segmentId1 == 0) logger.warn( - s"Split action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") + s"Split action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") _ = if (segmentId2 == 0) logger.warn( - s"Split action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") + s"Split action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") (graph1, graph2) <- tryo(splitGraph(agglomerateId, agglomerateGraph, update, segmentId1, segmentId2)) ?~> s"splitGraph failed while removing edge between segments $segmentId1 and $segmentId2" largestExistingAgglomerateId <- largestAgglomerateId(editableMappingInfo) agglomerateId2 = largestExistingAgglomerateId + 1L @@ -155,21 +174,33 @@ class EditableMappingUpdater( } yield (agglomerateId1, agglomerateId2) } + private def getFromSegmentToAgglomerateBuffer(chunkKey: String): Option[Map[Long, Long]] = + segmentToAgglomerateBuffer.get(chunkKey).flatMap { + case (chunkFromBuffer, isToBeReverted) => + if (isToBeReverted) None else Some(chunkFromBuffer) + } + + private def getFromAgglomerateToGraphBuffer(chunkKey: String): Option[AgglomerateGraph] = + agglomerateToGraphBuffer.get(chunkKey).flatMap { + case (graphFromBuffer, isToBeReverted) => + if (isToBeReverted) None else Some(graphFromBuffer) + } + private def agglomerateIdForSegmentId(segmentId: Long)(implicit ec: ExecutionContext): Fox[Long] = { val chunkId = segmentId / editableMappingService.defaultSegmentToAgglomerateChunkSize - val chunkKey = editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId) - val chunkFromBufferOpt = segmentToAgglomerateBuffer.get(chunkKey) + val chunkKey = segmentToAgglomerateKey(tracingId, chunkId) + val chunkFromBufferOpt = getFromSegmentToAgglomerateBuffer(chunkKey) for { chunk <- Fox.fillOption(chunkFromBufferOpt) { editableMappingService - .getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId, version = oldVersion) + .getSegmentToAgglomerateChunkWithEmptyFallback(tracingId, chunkId, version = oldVersion) .map(_.toMap) } agglomerateId <- chunk.get(segmentId) match { case Some(agglomerateId) => Fox.successful(agglomerateId) case None => editableMappingService - .getBaseSegmentToAgglomerate(baseMappingName, Set(segmentId), remoteFallbackLayer, userToken) + .getBaseSegmentToAgglomerate(baseMappingName, Set(segmentId), remoteFallbackLayer)(tokenContext) .flatMap(baseSegmentToAgglomerate => baseSegmentToAgglomerate.get(segmentId)) } } yield agglomerateId @@ -188,42 +219,42 @@ class EditableMappingUpdater( private def updateSegmentToAgglomerateChunk(agglomerateId: Long, chunkId: Long, segmentIdsToUpdate: Seq[Long])( implicit ec: ExecutionContext): Fox[Unit] = for { - existingChunk: Map[Long, Long] <- getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId) ?~> "failed to get old segment to agglomerate chunk for updating it" + existingChunk: Map[Long, Long] <- getSegmentToAgglomerateChunkWithEmptyFallback(tracingId, chunkId) ?~> "failed to get old segment to agglomerate chunk for updating it" mergedMap = existingChunk ++ segmentIdsToUpdate.map(_ -> agglomerateId).toMap - _ = segmentToAgglomerateBuffer.put(editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId), - mergedMap) + _ = segmentToAgglomerateBuffer.put(segmentToAgglomerateKey(tracingId, chunkId), (mergedMap, false)) } yield () - private def getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId: String, chunkId: Long)( + private def getSegmentToAgglomerateChunkWithEmptyFallback(tracingId: String, chunkId: Long)( implicit ec: ExecutionContext): Fox[Map[Long, Long]] = { - val key = editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId) - val fromBufferOpt = segmentToAgglomerateBuffer.get(key) + val key = segmentToAgglomerateKey(tracingId, chunkId) + val fromBufferOpt = getFromSegmentToAgglomerateBuffer(key) Fox.fillOption(fromBufferOpt) { editableMappingService - .getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId, version = oldVersion) + .getSegmentToAgglomerateChunkWithEmptyFallback(tracingId, chunkId, version = oldVersion) .map(_.toMap) } } private def agglomerateGraphForIdWithFallback(mapping: EditableMappingInfo, agglomerateId: Long)( implicit ec: ExecutionContext): Fox[AgglomerateGraph] = { - val key = editableMappingService.agglomerateGraphKey(editableMappingId, agglomerateId) - val fromBufferOpt = agglomerateToGraphBuffer.get(key) + val key = agglomerateGraphKey(tracingId, agglomerateId) + val fromBufferOpt = getFromAgglomerateToGraphBuffer(key) fromBufferOpt.map(Fox.successful(_)).getOrElse { editableMappingService.getAgglomerateGraphForIdWithFallback(mapping, - editableMappingId, - Some(oldVersion), + tracingId, + oldVersion, agglomerateId, - remoteFallbackLayer, - userToken) + remoteFallbackLayer)(tokenContext) } } private def updateAgglomerateGraph(agglomerateId: Long, graph: AgglomerateGraph): Unit = { - val key = editableMappingService.agglomerateGraphKey(editableMappingId, agglomerateId) - agglomerateToGraphBuffer.put(key, graph) + val key = agglomerateGraphKey(tracingId, agglomerateId) + agglomerateToGraphBuffer.put(key, (graph, false)) } + private def emptyAgglomerateGraph = AgglomerateGraph(Seq(), Seq(), Seq(), Seq()) + private def splitGraph(agglomerateId: Long, agglomerateGraph: AgglomerateGraph, update: SplitAgglomerateUpdateAction, @@ -237,9 +268,9 @@ class EditableMappingUpdater( if (edgesAndAffinitiesMinusOne.length == agglomerateGraph.edges.length) { if (relyOnAgglomerateIds) { logger.warn( - s"Split action for editable mapping $editableMappingId: Edge to remove ($segmentId1 at ${update.segmentPosition1} in mag ${update.mag} to $segmentId2 at ${update.segmentPosition2} in mag ${update.mag} in agglomerate $agglomerateId) already absent. This split becomes a no-op.") + s"Split action for editable mapping $tracingId: Edge to remove ($segmentId1 at ${update.segmentPosition1} in mag ${update.mag} to $segmentId2 at ${update.segmentPosition2} in mag ${update.mag} in agglomerate $agglomerateId) already absent. This split becomes a no-op.") } - (agglomerateGraph, AgglomerateGraph(Seq(), Seq(), Seq(), Seq())) + (agglomerateGraph, emptyAgglomerateGraph) } else { val graph1Nodes: Set[Long] = computeConnectedComponent(startNode = segmentId1, @@ -308,8 +339,7 @@ class EditableMappingUpdater( private def largestAgglomerateId(mapping: EditableMappingInfo): Fox[Long] = for { largestBaseAgglomerateId <- remoteDatastoreClient.getLargestAgglomerateId(remoteFallbackLayer, - mapping.baseMappingName, - userToken) + mapping.baseMappingName)(tokenContext) } yield math.max(mapping.largestAgglomerateId, largestBaseAgglomerateId) private def applyMergeAction(mapping: EditableMappingInfo, update: MergeAgglomerateUpdateAction)( @@ -318,19 +348,17 @@ class EditableMappingUpdater( segmentId1 <- editableMappingService.findSegmentIdAtPositionIfNeeded(remoteFallbackLayer, update.segmentPosition1, update.segmentId1, - update.mag, - userToken) + update.mag)(tokenContext) segmentId2 <- editableMappingService.findSegmentIdAtPositionIfNeeded(remoteFallbackLayer, update.segmentPosition2, update.segmentId2, - update.mag, - userToken) + update.mag)(tokenContext) _ = if (segmentId1 == 0) logger.warn( - s"Merge action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") + s"Merge action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") _ = if (segmentId2 == 0) logger.warn( - s"Merge action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") + s"Merge action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") (agglomerateId1, agglomerateId2) <- agglomerateIdsForMergeAction(update, segmentId1, segmentId2) ?~> "Failed to look up agglomerate ids for merge action segments" agglomerateGraph1 <- agglomerateGraphForIdWithFallback(mapping, agglomerateId1) ?~> s"Failed to get agglomerate graph for id $agglomerateId1" agglomerateGraph2 <- agglomerateGraphForIdWithFallback(mapping, agglomerateId2) ?~> s"Failed to get agglomerate graph for id $agglomerateId2" @@ -382,8 +410,64 @@ class EditableMappingUpdater( agglomerateId: Long): Unit = if (!isValid && relyOnAgglomerateIds) { logger.warn( - s"Merge action for editable mapping $editableMappingId: segment $segmentId as looked up at $position in mag $mag is not present in agglomerate $agglomerateId. This merge becomes a no-op" + s"Merge action for editable mapping $tracingId: segment $segmentId as looked up at $position in mag $mag is not present in agglomerate $agglomerateId. This merge becomes a no-op" ) } + def revertToVersion(sourceVersion: Long)(implicit ec: ExecutionContext): Fox[Unit] = + for { + _ <- bool2Fox(sourceVersion <= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" + _ = segmentToAgglomerateBuffer.clear() + _ = agglomerateToGraphBuffer.clear() + segmentToAgglomerateChunkNewestStream = new VersionedSegmentToAgglomerateChunkIterator( + tracingId, + tracingDataStore.editableMappingsSegmentToAgglomerate) + _ <- Fox.serialCombined(segmentToAgglomerateChunkNewestStream) { + case (chunkKey, _, version) => + if (version > sourceVersion) { + editableMappingService.getSegmentToAgglomerateChunk(chunkKey, Some(sourceVersion)).futureBox.map { + case Full(chunkData) => segmentToAgglomerateBuffer.put(chunkKey, (chunkData.toMap, false)) + case Empty => segmentToAgglomerateBuffer.put(chunkKey, (Map[Long, Long](), true)) + case Failure(msg, _, chain) => + Fox.failure(msg, Empty, chain) + } + } else Fox.successful(()) + } + agglomerateToGraphNewestStream = new VersionedAgglomerateToGraphIterator( + tracingId, + tracingDataStore.editableMappingsAgglomerateToGraph) + _ <- Fox.serialCombined(agglomerateToGraphNewestStream) { + case (graphKey, _, version) => + if (version > sourceVersion) { + for { + agglomerateId <- agglomerateIdFromAgglomerateGraphKey(graphKey) + _ <- editableMappingService + .getAgglomerateGraphForId(tracingId, sourceVersion, agglomerateId) + .futureBox + .map { + case Full(graphData) => agglomerateToGraphBuffer.put(graphKey, (graphData, false)) + case Empty => agglomerateToGraphBuffer.put(graphKey, (emptyAgglomerateGraph, true)) + case Failure(msg, _, chain) => + Fox.failure(msg, Empty, chain) + } + } yield () + } else Fox.successful(()) + } + } yield () + + def newWithTargetVersion(currentMaterializedVersion: Long, targetVersion: Long): EditableMappingUpdater = + new EditableMappingUpdater( + annotationId, + tracingId, + baseMappingName, + currentMaterializedVersion, + targetVersion, + remoteFallbackLayer, + tokenContext, + remoteDatastoreClient, + editableMappingService, + annotationService, + tracingDataStore, + relyOnAgglomerateIds = relyOnAgglomerateIds + ) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 66154842b1d..ad6c21d13d7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -2,19 +2,14 @@ package com.scalableminds.webknossos.tracingstore.tracings.skeleton import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} -import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.util.tools.FoxImplicits import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis -import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore -import com.scalableminds.webknossos.tracingstore.tracings.UpdateAction.SkeletonUpdateAction +import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreRedisStore} import com.scalableminds.webknossos.tracingstore.tracings._ -import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating._ -import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats -import net.liftweb.common.{Box, Empty, Full} -import play.api.i18n.MessagesProvider -import play.api.libs.json.{JsObject, JsValue, Json} +import net.liftweb.common.{Box, Full} import scala.concurrent.ExecutionContext @@ -23,6 +18,7 @@ class SkeletonTracingService @Inject()( val temporaryTracingStore: TemporaryTracingStore[SkeletonTracing], val handledGroupIdStore: TracingStoreRedisStore, val temporaryTracingIdStore: TracingStoreRedisStore, + val remoteWebknossosClient: TSRemoteWebknossosClient, val uncommittedUpdatesStore: TracingStoreRedisStore, val tracingMigrationService: SkeletonTracingMigrationService)(implicit val ec: ExecutionContext) extends TracingService[SkeletonTracing] @@ -36,110 +32,12 @@ class SkeletonTracingService @Inject()( implicit val tracingCompanion: SkeletonTracing.type = SkeletonTracing - implicit val updateActionJsonFormat: SkeletonUpdateAction.skeletonUpdateActionFormat.type = - SkeletonUpdateAction.skeletonUpdateActionFormat - - def currentVersion(tracingId: String): Fox[Long] = - tracingDataStore.skeletonUpdates.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) - - def currentVersion(tracing: SkeletonTracing): Long = tracing.version - - def handleUpdateGroup(tracingId: String, - updateActionGroup: UpdateActionGroup[SkeletonTracing], - previousVersion: Long, - userToken: Option[String]): Fox[_] = - tracingDataStore.skeletonUpdates.put( - tracingId, - updateActionGroup.version, - updateActionGroup.actions - .map(_.addTimestamp(updateActionGroup.timestamp).addAuthorId(updateActionGroup.authorId)) match { //to the first action in the group, attach the group's info - case Nil => Nil - case first :: rest => first.addInfo(updateActionGroup.info) :: rest - } - ) - - override def applyPendingUpdates(tracing: SkeletonTracing, - tracingId: String, - desiredVersion: Option[Long]): Fox[SkeletonTracing] = { - val existingVersion = tracing.version - findDesiredOrNewestPossibleVersion(tracing, tracingId, desiredVersion).flatMap { newVersion => - if (newVersion > existingVersion) { - for { - pendingUpdates <- findPendingUpdates(tracingId, existingVersion, newVersion) - updatedTracing <- update(tracing, tracingId, pendingUpdates, newVersion) - _ <- save(updatedTracing, Some(tracingId), newVersion) - } yield updatedTracing - } else { - Full(tracing) - } - } - } - - private def findDesiredOrNewestPossibleVersion(tracing: SkeletonTracing, - tracingId: String, - desiredVersion: Option[Long]): Fox[Long] = - /* - * Determines the newest saved version from the updates column. - * if there are no updates at all, assume tracing is brand new (possibly created from NML, - * hence the emptyFallbck tracing.version) - */ - for { - newestUpdateVersion <- tracingDataStore.skeletonUpdates.getVersion(tracingId, - mayBeEmpty = Some(true), - emptyFallback = Some(tracing.version)) - } yield { - desiredVersion match { - case None => newestUpdateVersion - case Some(desiredSome) => math.min(desiredSome, newestUpdateVersion) - } - } - - private def findPendingUpdates(tracingId: String, - existingVersion: Long, - desiredVersion: Long): Fox[List[SkeletonUpdateAction]] = - if (desiredVersion == existingVersion) Fox.successful(List()) - else { - for { - updateActionGroups <- tracingDataStore.skeletonUpdates.getMultipleVersions( - tracingId, - Some(desiredVersion), - Some(existingVersion + 1))(fromJsonBytes[List[SkeletonUpdateAction]]) - } yield updateActionGroups.reverse.flatten - } - - private def update(tracing: SkeletonTracing, - tracingId: String, - updates: List[SkeletonUpdateAction], - newVersion: Long): Fox[SkeletonTracing] = { - def updateIter(tracingFox: Fox[SkeletonTracing], - remainingUpdates: List[SkeletonUpdateAction]): Fox[SkeletonTracing] = - tracingFox.futureBox.flatMap { - case Empty => Fox.empty - case Full(tracing) => - remainingUpdates match { - case List() => Fox.successful(tracing) - case RevertToVersionAction(sourceVersion, _, _, _) :: tail => - val sourceTracing = find(tracingId, Some(sourceVersion), useCache = false, applyUpdates = true) - updateIter(sourceTracing, tail) - case update :: tail => updateIter(Full(update.applyOn(tracing)), tail) - } - case _ => tracingFox - } - - updates match { - case List() => Full(tracing) - case _ :: _ => - for { - updated <- updateIter(Some(tracing), updates) - } yield updated.withVersion(newVersion) - } - } - - def duplicate(tracing: SkeletonTracing, - fromTask: Boolean, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox]): Fox[String] = { + def adaptSkeletonForDuplicate(tracing: SkeletonTracing, + fromTask: Boolean, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + boundingBox: Option[BoundingBox], + newVersion: Long): SkeletonTracing = { val taskBoundingBox = if (fromTask) { tracing.boundingBox.map { bb => val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 @@ -154,16 +52,13 @@ class SkeletonTracingService @Inject()( editPosition = editPosition.map(vec3IntToProto).getOrElse(tracing.editPosition), editRotation = editRotation.map(vec3DoubleToProto).getOrElse(tracing.editRotation), boundingBox = boundingBoxOptToProto(boundingBox).orElse(tracing.boundingBox), - version = 0 + version = newVersion ) .addAllUserBoundingBoxes(taskBoundingBox) - val finalTracing = if (fromTask) newTracing.clearBoundingBox else newTracing - save(finalTracing, None, finalTracing.version) + if (fromTask) newTracing.clearBoundingBox else newTracing } - def merge(tracings: Seq[SkeletonTracing], - mergedVolumeStats: MergedVolumeStats, - newEditableMappingIdOpt: Option[String]): Box[SkeletonTracing] = + def merge(tracings: Seq[SkeletonTracing]): Box[SkeletonTracing] = for { tracing <- tracings.map(Full(_)).reduceLeft(mergeTwo) } yield @@ -198,59 +93,12 @@ class SkeletonTracingService @Inject()( ) // Can be removed again when https://github.com/scalableminds/webknossos/issues/5009 is fixed - override def remapTooLargeTreeIds(skeletonTracing: SkeletonTracing): SkeletonTracing = + def remapTooLargeTreeIds(skeletonTracing: SkeletonTracing): SkeletonTracing = if (skeletonTracing.trees.exists(_.treeId > 1048576)) { val newTrees = for ((tree, index) <- skeletonTracing.trees.zipWithIndex) yield tree.withTreeId(index + 1) skeletonTracing.withTrees(newTrees) } else skeletonTracing - def mergeVolumeData(tracingSelectors: Seq[TracingSelector], - tracings: Seq[SkeletonTracing], - newId: String, - newVersion: Long, - toCache: Boolean, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[MergedVolumeStats] = - Fox.successful(MergedVolumeStats.empty()) - - def updateActionLog(tracingId: String, newestVersion: Option[Long], oldestVersion: Option[Long]): Fox[JsValue] = { - def versionedTupleToJson(tuple: (Long, List[SkeletonUpdateAction])): JsObject = - Json.obj( - "version" -> tuple._1, - "value" -> Json.toJson(tuple._2) - ) - for { - updateActionGroups <- tracingDataStore.skeletonUpdates.getMultipleVersionsAsVersionValueTuple( - tracingId, - newestVersion, - oldestVersion)(fromJsonBytes[List[SkeletonUpdateAction]]) - updateActionGroupsJs = updateActionGroups.map(versionedTupleToJson) - } yield Json.toJson(updateActionGroupsJs) - } - - def updateActionStatistics(tracingId: String): Fox[JsObject] = - for { - updateActionGroups <- tracingDataStore.skeletonUpdates.getMultipleVersions(tracingId)( - fromJsonBytes[List[SkeletonUpdateAction]]) - updateActions = updateActionGroups.flatten - } yield { - Json.obj( - "updateTracingActionCount" -> updateActions.count { - case _: UpdateTracingSkeletonAction => true - case _ => false - }, - "createNodeActionCount" -> updateActions.count { - case _: CreateNodeSkeletonAction => true - case _ => false - }, - "deleteNodeActionCount" -> updateActions.count { - case _: DeleteNodeSkeletonAction => true - case _ => false - } - ) - } - def dummyTracing: SkeletonTracing = SkeletonTracingDefaults.createInstance - def mergeEditableMappings(tracingsWithIds: List[(SkeletonTracing, String)], userToken: Option[String]): Fox[String] = - Fox.empty } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala index c34e5ae8c74..1ca66e9f5cd 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala @@ -1,13 +1,18 @@ package com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating -import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} +import com.scalableminds.webknossos.datastore.SkeletonTracing.{Edge, Node, SkeletonTracing, Tree, TreeGroup} import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits} import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate +import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, UpdateAction} import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.TreeType.TreeType import play.api.libs.json._ +trait SkeletonUpdateAction extends LayerUpdateAction { + def applyOn(tracing: SkeletonTracing): SkeletonTracing +} + case class CreateTreeSkeletonAction(id: Int, color: Option[com.scalableminds.util.image.Color], name: String, @@ -16,13 +21,14 @@ case class CreateTreeSkeletonAction(id: Int, comments: List[UpdateActionComment], groupId: Option[Int], isVisible: Option[Boolean], + `type`: Option[TreeType] = None, + edgesAreVisible: Option[Boolean], + metadata: Option[Seq[MetadataEntry]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - `type`: Option[TreeType] = None, - edgesAreVisible: Option[Boolean] = None, - metadata: Option[Seq[MetadataEntry]] = None) - extends UpdateAction.SkeletonUpdateAction + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { val newTree = Tree( @@ -43,26 +49,31 @@ case class CreateTreeSkeletonAction(id: Int, tracing.withTrees(newTree +: tracing.trees) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class DeleteTreeSkeletonAction(id: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { + extends SkeletonUpdateAction { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.withTrees(tracing.trees.filter(_.treeId != id)) - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateTreeSkeletonAction(id: Int, @@ -72,12 +83,13 @@ case class UpdateTreeSkeletonAction(id: Int, branchPoints: List[UpdateActionBranchPoint], comments: List[UpdateActionComment], groupId: Option[Int], + `type`: Option[TreeType] = None, + metadata: Option[Seq[MetadataEntry]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - `type`: Option[TreeType] = None, - metadata: Option[Seq[MetadataEntry]] = None) - extends UpdateAction.SkeletonUpdateAction + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = @@ -95,20 +107,24 @@ case class UpdateTreeSkeletonAction(id: Int, tracing.withTrees(mapTrees(tracing, id, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class MergeTreeSkeletonAction(sourceId: Int, targetId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { + // only nodes and edges are merged here, // other properties are managed explicitly // by the frontend with extra actions @@ -123,21 +139,25 @@ case class MergeTreeSkeletonAction(sourceId: Int, tracing.withTrees(mapTrees(tracing, targetId, treeTransform).filter(_.treeId != sourceId)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], sourceId: Int, targetId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { + // this should only move a whole component, // that is disjoint from the rest of the tree override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -161,51 +181,60 @@ case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], tracing.withTrees(tracing.trees.map(selectTree)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class CreateEdgeSkeletonAction(source: Int, target: Int, treeId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.withEdges(Edge(source, target) +: tree.edges) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class DeleteEdgeSkeletonAction(source: Int, target: Int, treeId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy(edges = tree.edges.filter(_ != Edge(source, target))) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) + } case class CreateNodeSkeletonAction(id: Int, @@ -218,11 +247,12 @@ case class CreateNodeSkeletonAction(id: Int, interpolation: Option[Boolean], treeId: Int, timestamp: Long, + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) - extends UpdateAction.SkeletonUpdateAction + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper with ProtoGeometryImplicits { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -245,11 +275,13 @@ case class CreateNodeSkeletonAction(id: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateNodeSkeletonAction(id: Int, @@ -262,11 +294,12 @@ case class UpdateNodeSkeletonAction(id: Int, interpolation: Option[Boolean], treeId: Int, timestamp: Long, + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) - extends UpdateAction.SkeletonUpdateAction + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper with ProtoGeometryImplicits { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -291,20 +324,22 @@ case class UpdateNodeSkeletonAction(id: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class DeleteNodeSkeletonAction(nodeId: Int, treeId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -314,27 +349,32 @@ case class DeleteNodeSkeletonAction(nodeId: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateTreeGroupsSkeletonAction(treeGroups: List[UpdateActionTreeGroup], + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.withTreeGroups(treeGroups.map(convertTreeGroup)) - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateTracingSkeletonAction(activeNode: Option[Int], @@ -342,11 +382,12 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], editRotation: com.scalableminds.util.geometry.Vec3Double, zoomLevel: Double, userBoundingBox: Option[com.scalableminds.util.geometry.BoundingBox], + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None, editPositionAdditionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with ProtoGeometryImplicits { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.copy( @@ -358,35 +399,24 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], editPositionAdditionalCoordinates = AdditionalCoordinate.toProto(editPositionAdditionalCoordinates) ) - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true -} - -case class RevertToVersionAction(sourceVersion: Long, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = - throw new Exception("RevertToVersionAction applied on unversioned tracing") + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = - this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = - this.copy(actionAuthorId = authorId) + override def isViewOnlyChange: Boolean = true } -case class UpdateTreeVisibility(treeId: Int, - isVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction +case class UpdateTreeVisibilitySkeletonAction(treeId: Int, + isVisible: Boolean, + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy(isVisible = Some(isVisible)) @@ -394,20 +424,24 @@ case class UpdateTreeVisibility(treeId: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) + override def isViewOnlyChange: Boolean = true } -case class UpdateTreeGroupVisibility(treeGroupId: Option[Int], - isVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction +case class UpdateTreeGroupVisibilitySkeletonAction(treeGroupId: Option[Int], + isVisible: Boolean, + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def updateTreeGroups(treeGroups: Seq[TreeGroup]) = { @@ -431,56 +465,64 @@ case class UpdateTreeGroupVisibility(treeGroupId: Option[Int], } } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } -case class UpdateTreeEdgesVisibility(treeId: Int, - edgesAreVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction +case class UpdateTreeEdgesVisibilitySkeletonAction(treeId: Int, + edgesAreVisible: Boolean, + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy(edgesAreVisible = Some(edgesAreVisible)) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } -case class UpdateUserBoundingBoxes(boundingBoxes: List[NamedBoundingBox], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { +case class UpdateUserBoundingBoxesSkeletonAction(boundingBoxes: List[NamedBoundingBox], + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends SkeletonUpdateAction { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto)) - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } -case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], - isVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { +case class UpdateUserBoundingBoxVisibilitySkeletonAction(boundingBoxId: Option[Int], + isVisible: Boolean, + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends SkeletonUpdateAction { override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def updateUserBoundingBoxes() = tracing.userBoundingBoxes.map { boundingBox => @@ -493,26 +535,14 @@ case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], tracing.withUserBoundingBoxes(updateUserBoundingBoxes()) } - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true -} - -case class UpdateTdCamera(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { - - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = - this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = - this.copy(actionAuthorId = authorId) override def isViewOnlyChange: Boolean = true } @@ -552,104 +582,22 @@ object UpdateTreeGroupsSkeletonAction { object UpdateTracingSkeletonAction { implicit val jsonFormat: OFormat[UpdateTracingSkeletonAction] = Json.format[UpdateTracingSkeletonAction] } -object RevertToVersionAction { - implicit val jsonFormat: OFormat[RevertToVersionAction] = Json.format[RevertToVersionAction] -} -object UpdateTreeVisibility { - implicit val jsonFormat: OFormat[UpdateTreeVisibility] = Json.format[UpdateTreeVisibility] -} -object UpdateTreeGroupVisibility { - implicit val jsonFormat: OFormat[UpdateTreeGroupVisibility] = Json.format[UpdateTreeGroupVisibility] -} -object UpdateTreeEdgesVisibility { - implicit val jsonFormat: OFormat[UpdateTreeEdgesVisibility] = Json.format[UpdateTreeEdgesVisibility] -} -object UpdateUserBoundingBoxes { - implicit val jsonFormat: OFormat[UpdateUserBoundingBoxes] = Json.format[UpdateUserBoundingBoxes] -} -object UpdateUserBoundingBoxVisibility { - implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibility] = Json.format[UpdateUserBoundingBoxVisibility] -} -object UpdateTdCamera { implicit val jsonFormat: OFormat[UpdateTdCamera] = Json.format[UpdateTdCamera] } - -object SkeletonUpdateAction { - - implicit object skeletonUpdateActionFormat extends Format[UpdateAction[SkeletonTracing]] { - override def reads(json: JsValue): JsResult[UpdateAction.SkeletonUpdateAction] = { - val jsonValue = (json \ "value").as[JsObject] - (json \ "name").as[String] match { - case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) - case "deleteTree" => deserialize[DeleteTreeSkeletonAction](jsonValue) - case "updateTree" => deserialize[UpdateTreeSkeletonAction](jsonValue) - case "mergeTree" => deserialize[MergeTreeSkeletonAction](jsonValue) - case "moveTreeComponent" => deserialize[MoveTreeComponentSkeletonAction](jsonValue) - case "createNode" => deserialize[CreateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) - case "deleteNode" => deserialize[DeleteNodeSkeletonAction](jsonValue) - case "updateNode" => deserialize[UpdateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) - case "createEdge" => deserialize[CreateEdgeSkeletonAction](jsonValue) - case "deleteEdge" => deserialize[DeleteEdgeSkeletonAction](jsonValue) - case "updateTreeGroups" => deserialize[UpdateTreeGroupsSkeletonAction](jsonValue) - case "updateTracing" => deserialize[UpdateTracingSkeletonAction](jsonValue) - case "revertToVersion" => deserialize[RevertToVersionAction](jsonValue) - case "updateTreeVisibility" => deserialize[UpdateTreeVisibility](jsonValue) - case "updateTreeGroupVisibility" => deserialize[UpdateTreeGroupVisibility](jsonValue) - case "updateTreeEdgesVisibility" => deserialize[UpdateTreeEdgesVisibility](jsonValue) - case "updateUserBoundingBoxes" => deserialize[UpdateUserBoundingBoxes](jsonValue) - case "updateUserBoundingBoxVisibility" => deserialize[UpdateUserBoundingBoxVisibility](jsonValue) - case "updateTdCamera" => deserialize[UpdateTdCamera](jsonValue) - } - } - - private def deserialize[T](json: JsValue, shouldTransformPositions: Boolean = false)( - implicit tjs: Reads[T]): JsResult[T] = - if (shouldTransformPositions) - json.transform(positionTransform).get.validate[T] - else - json.validate[T] - - private val positionTransform = - (JsPath \ "position").json.update(JsPath.read[List[Float]].map(position => Json.toJson(position.map(_.toInt)))) - - override def writes(a: UpdateAction[SkeletonTracing]): JsObject = a match { - case s: CreateTreeSkeletonAction => - Json.obj("name" -> "createTree", "value" -> Json.toJson(s)(CreateTreeSkeletonAction.jsonFormat)) - case s: DeleteTreeSkeletonAction => - Json.obj("name" -> "deleteTree", "value" -> Json.toJson(s)(DeleteTreeSkeletonAction.jsonFormat)) - case s: UpdateTreeSkeletonAction => - Json.obj("name" -> "updateTree", "value" -> Json.toJson(s)(UpdateTreeSkeletonAction.jsonFormat)) - case s: MergeTreeSkeletonAction => - Json.obj("name" -> "mergeTree", "value" -> Json.toJson(s)(MergeTreeSkeletonAction.jsonFormat)) - case s: MoveTreeComponentSkeletonAction => - Json.obj("name" -> "moveTreeComponent", "value" -> Json.toJson(s)(MoveTreeComponentSkeletonAction.jsonFormat)) - case s: CreateNodeSkeletonAction => - Json.obj("name" -> "createNode", "value" -> Json.toJson(s)(CreateNodeSkeletonAction.jsonFormat)) - case s: DeleteNodeSkeletonAction => - Json.obj("name" -> "deleteNode", "value" -> Json.toJson(s)(DeleteNodeSkeletonAction.jsonFormat)) - case s: UpdateNodeSkeletonAction => - Json.obj("name" -> "updateNode", "value" -> Json.toJson(s)(UpdateNodeSkeletonAction.jsonFormat)) - case s: CreateEdgeSkeletonAction => - Json.obj("name" -> "createEdge", "value" -> Json.toJson(s)(CreateEdgeSkeletonAction.jsonFormat)) - case s: DeleteEdgeSkeletonAction => - Json.obj("name" -> "deleteEdge", "value" -> Json.toJson(s)(DeleteEdgeSkeletonAction.jsonFormat)) - case s: UpdateTreeGroupsSkeletonAction => - Json.obj("name" -> "updateTreeGroups", "value" -> Json.toJson(s)(UpdateTreeGroupsSkeletonAction.jsonFormat)) - case s: UpdateTracingSkeletonAction => - Json.obj("name" -> "updateTracing", "value" -> Json.toJson(s)(UpdateTracingSkeletonAction.jsonFormat)) - case s: RevertToVersionAction => - Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionAction.jsonFormat)) - case s: UpdateTreeVisibility => - Json.obj("name" -> "updateTreeVisibility", "value" -> Json.toJson(s)(UpdateTreeVisibility.jsonFormat)) - case s: UpdateTreeGroupVisibility => - Json.obj("name" -> "updateTreeGroupVisibility", "value" -> Json.toJson(s)(UpdateTreeGroupVisibility.jsonFormat)) - case s: UpdateTreeEdgesVisibility => - Json.obj("name" -> "updateTreeEdgesVisibility", "value" -> Json.toJson(s)(UpdateTreeEdgesVisibility.jsonFormat)) - case s: UpdateUserBoundingBoxes => - Json.obj("name" -> "updateUserBoundingBoxes", "value" -> Json.toJson(s)(UpdateUserBoundingBoxes.jsonFormat)) - case s: UpdateUserBoundingBoxVisibility => - Json.obj("name" -> "updateUserBoundingBoxVisibility", - "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibility.jsonFormat)) - case s: UpdateTdCamera => - Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCamera.jsonFormat)) - } - } +object UpdateTreeVisibilitySkeletonAction { + implicit val jsonFormat: OFormat[UpdateTreeVisibilitySkeletonAction] = Json.format[UpdateTreeVisibilitySkeletonAction] +} +object UpdateTreeGroupVisibilitySkeletonAction { + implicit val jsonFormat: OFormat[UpdateTreeGroupVisibilitySkeletonAction] = + Json.format[UpdateTreeGroupVisibilitySkeletonAction] +} +object UpdateTreeEdgesVisibilitySkeletonAction { + implicit val jsonFormat: OFormat[UpdateTreeEdgesVisibilitySkeletonAction] = + Json.format[UpdateTreeEdgesVisibilitySkeletonAction] +} +object UpdateUserBoundingBoxesSkeletonAction { + implicit val jsonFormat: OFormat[UpdateUserBoundingBoxesSkeletonAction] = + Json.format[UpdateUserBoundingBoxesSkeletonAction] +} +object UpdateUserBoundingBoxVisibilitySkeletonAction { + implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibilitySkeletonAction] = + Json.format[UpdateUserBoundingBoxVisibilitySkeletonAction] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala index 3b3c99db295..1f3c716ffd7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox @@ -15,6 +16,7 @@ import com.scalableminds.webknossos.datastore.models.{ WebknossosAdHocMeshRequest } import com.scalableminds.webknossos.datastore.services.{FullMeshHelper, FullMeshRequest} +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings.FallbackDataHelper import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} @@ -25,6 +27,7 @@ import scala.concurrent.ExecutionContext class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, editableMappingService: EditableMappingService, + annotationService: TSAnnotationService, volumeSegmentIndexService: VolumeSegmentIndexService, val remoteDatastoreClient: TSRemoteDatastoreClient, val remoteWebknossosClient: TSRemoteWebknossosClient) @@ -33,66 +36,70 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, with FullMeshHelper with LazyLogging { - def loadFor(token: Option[String], tracingId: String, fullMeshRequest: FullMeshRequest)( - implicit ec: ExecutionContext): Fox[Array[Byte]] = + def loadFor(annotationId: String, tracingId: String, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[Byte]] = for { - tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> "tracing.notFound" data <- if (fullMeshRequest.meshFileName.isDefined) - loadFullMeshFromMeshfile(token, tracing, tracingId, fullMeshRequest) - else loadFullMeshFromAdHoc(token, tracing, tracingId, fullMeshRequest) + loadFullMeshFromMeshfile(annotationId, tracingId, tracing, fullMeshRequest) + else loadFullMeshFromAdHoc(annotationId, tracingId, tracing, fullMeshRequest) } yield data private def loadFullMeshFromMeshfile( - token: Option[String], - tracing: VolumeTracing, + annotationId: String, tracingId: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + tracing: VolumeTracing, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = for { remoteFallbackLayer <- remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - baseMappingName <- volumeTracingService.baseMappingName(tracing) + baseMappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) fullMeshRequestAdapted = if (tracing.getHasEditableMapping) fullMeshRequest.copy(mappingName = baseMappingName, editableMappingTracingId = Some(tracingId), mappingType = Some("HDF5")) else fullMeshRequest - array <- remoteDatastoreClient.loadFullMeshStl(token, remoteFallbackLayer, fullMeshRequestAdapted) + array <- remoteDatastoreClient.loadFullMeshStl(remoteFallbackLayer, fullMeshRequestAdapted) } yield array - private def loadFullMeshFromAdHoc(token: Option[String], - tracing: VolumeTracing, - tracingId: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + private def loadFullMeshFromAdHoc( + annotationId: String, + tracingId: String, + tracing: VolumeTracing, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = for { mag <- fullMeshRequest.mag.toFox ?~> "mag.neededForAdHoc" _ <- bool2Fox(tracing.mags.contains(vec3IntToProto(mag))) ?~> "mag.notPresentInTracing" before = Instant.now - voxelSize <- remoteDatastoreClient.voxelSizeForTracingWithCache(tracingId, token) ?~> "voxelSize.failedToFetch" + voxelSize <- remoteDatastoreClient.voxelSizeForTracingWithCache(tracingId) ?~> "voxelSize.failedToFetch" verticesForChunks <- if (tracing.hasSegmentIndex.getOrElse(false)) - getAllAdHocChunksWithSegmentIndex(token, tracing, tracingId, mag, voxelSize, fullMeshRequest) + getAllAdHocChunksWithSegmentIndex(annotationId, tracingId, tracing, mag, voxelSize, fullMeshRequest) else - getAllAdHocChunksWithNeighborLogic(token, - tracing, - tracingId, - mag, - voxelSize, - fullMeshRequest, - fullMeshRequest.seedPosition.map(sp => VoxelPosition(sp.x, sp.y, sp.z, mag)), - adHocChunkSize) + getAllAdHocChunksWithNeighborLogic( + tracing, + annotationId, + tracingId, + mag, + voxelSize, + fullMeshRequest, + fullMeshRequest.seedPosition.map(sp => VoxelPosition(sp.x, sp.y, sp.z, mag)), + adHocChunkSize + ) encoded = verticesForChunks.map(adHocMeshToStl) array = combineEncodedChunksToStl(encoded) _ = logMeshingDuration(before, "ad-hoc meshing (tracingstore)", array.length) } yield array private def getAllAdHocChunksWithSegmentIndex( - token: Option[String], - tracing: VolumeTracing, + annotationId: String, tracingId: String, + tracing: VolumeTracing, mag: Vec3Int, voxelSize: VoxelSize, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[List[Array[Float]]] = + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Array[Float]]] = for { - fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId) - mappingName <- volumeTracingService.baseMappingName(tracing) + fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId, tracing) + mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( fallbackLayer, @@ -103,8 +110,7 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, mappingName = mappingName, editableMappingTracingId = volumeTracingService.editableMappingTracingId(tracing, tracingId), fullMeshRequest.additionalCoordinates, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - token + AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) ) bucketPositions = bucketPositionsRaw.values .map(vec3IntFromProto) @@ -124,13 +130,13 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, fullMeshRequest.additionalCoordinates, findNeighbors = false ) - loadMeshChunkFromAdHoc(token, tracing, adHocMeshRequest, tracingId) + loadMeshChunkFromAdHoc(tracing, adHocMeshRequest, annotationId, tracingId) } allVertices = vertexChunksWithNeighbors.map(_._1) } yield allVertices - private def getAllAdHocChunksWithNeighborLogic(token: Option[String], - tracing: VolumeTracing, + private def getAllAdHocChunksWithNeighborLogic(tracing: VolumeTracing, + annotationId: String, tracingId: String, mag: Vec3Int, voxelSize: VoxelSize, @@ -139,7 +145,8 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, chunkSize: Vec3Int, visited: collection.mutable.Set[VoxelPosition] = collection.mutable.Set[VoxelPosition]())( - implicit ec: ExecutionContext): Fox[List[Array[Float]]] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[List[Array[Float]]] = for { topLeft <- topLeftOpt.toFox ?~> "seedPosition.neededForAdHoc" adHocMeshRequest = WebknossosAdHocMeshRequest( @@ -153,12 +160,12 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, fullMeshRequest.additionalCoordinates ) _ = visited += topLeft - (vertices: Array[Float], neighbors) <- loadMeshChunkFromAdHoc(token, tracing, adHocMeshRequest, tracingId) + (vertices: Array[Float], neighbors) <- loadMeshChunkFromAdHoc(tracing, adHocMeshRequest, annotationId, tracingId) nextPositions: List[VoxelPosition] = generateNextTopLeftsFromNeighbors(topLeft, neighbors, chunkSize, visited) _ = visited ++= nextPositions neighborVerticesNested <- Fox.serialCombined(nextPositions) { position: VoxelPosition => - getAllAdHocChunksWithNeighborLogic(token, - tracing, + getAllAdHocChunksWithNeighborLogic(tracing, + annotationId, tracingId, mag, voxelSize, @@ -170,11 +177,12 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, allVertices: List[Array[Float]] = vertices +: neighborVerticesNested.flatten } yield allVertices - private def loadMeshChunkFromAdHoc(token: Option[String], - tracing: VolumeTracing, + private def loadMeshChunkFromAdHoc(tracing: VolumeTracing, adHocMeshRequest: WebknossosAdHocMeshRequest, - tracingId: String): Fox[(Array[Float], List[Int])] = - if (tracing.getHasEditableMapping) - editableMappingService.createAdHocMesh(tracing, tracingId, adHocMeshRequest, token) - else volumeTracingService.createAdHocMesh(tracingId, adHocMeshRequest, token) + annotationId: String, + tracingId: String)(implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = + if (tracing.getHasEditableMapping) { + val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.createAdHocMesh(mappingLayer, adHocMeshRequest) + } else volumeTracingService.createAdHocMesh(tracingId, tracing, adHocMeshRequest) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala index 5b9fb21fad4..c16fa929d47 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala @@ -25,11 +25,7 @@ import java.util.zip.{ZipEntry, ZipFile} import scala.collection.mutable import scala.concurrent.ExecutionContext -trait VolumeDataZipHelper - extends WKWDataFormatHelper - with VolumeBucketReversionHelper - with BoxImplicits - with LazyLogging { +trait VolumeDataZipHelper extends WKWDataFormatHelper with ReversionHelper with BoxImplicits with LazyLogging { protected def withBucketsFromZip(zipFile: File)(block: (BucketPosition, Array[Byte]) => Fox[Unit])( implicit ec: ExecutionContext): Fox[Unit] = @@ -61,7 +57,7 @@ trait VolumeDataZipHelper parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => if (buckets.hasNext) { val data = buckets.next() - if (!isRevertedBucket(data)) { + if (!isRevertedElement(data)) { block(bucketPosition, data) } else Fox.successful(()) } else Fox.successful(()) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala index 755cc665464..9d0d35cf2e3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto @@ -33,7 +34,7 @@ class VolumeSegmentIndexBuffer(tracingId: String, remoteDatastoreClient: TSRemoteDatastoreClient, fallbackLayer: Option[RemoteFallbackLayer], additionalAxes: Option[Seq[AdditionalAxis]], - userToken: Option[String]) + tc: TokenContext) extends KeyValueStoreImplicits with SegmentIndexKeyHelper with ProtoGeometryImplicits @@ -86,12 +87,7 @@ class VolumeSegmentIndexBuffer(tracingId: String, .fillEmpty(ListOfVec3IntProto.of(Seq())) data <- fallbackLayer match { case Some(layer) if fossilDbData.length == 0 => - remoteDatastoreClient.querySegmentIndex(layer, - segmentId, - mag, - mappingName, - editableMappingTracingId, - userToken) + remoteDatastoreClient.querySegmentIndex(layer, segmentId, mag, mappingName, editableMappingTracingId)(tc) case _ => Fox.successful(fossilDbData.values.map(vec3IntFromProto)) } } yield ListOfVec3IntProto(data.map(vec3IntToProto)) @@ -168,13 +164,8 @@ class VolumeSegmentIndexBuffer(tracingId: String, fileBucketPositions <- fallbackLayer match { case Some(layer) => for { - fileBucketPositionsOpt <- Fox.runIf(missesSoFar.nonEmpty)( - remoteDatastoreClient.querySegmentIndexForMultipleSegments(layer, - missesSoFar, - mag, - mappingName, - editableMappingTracingId, - userToken)) + fileBucketPositionsOpt <- Fox.runIf(missesSoFar.nonEmpty)(remoteDatastoreClient + .querySegmentIndexForMultipleSegments(layer, missesSoFar, mag, mappingName, editableMappingTracingId)(tc)) fileBucketPositions = fileBucketPositionsOpt.getOrElse(Seq()) _ = fileBucketPositions.map { case (segmentId, positions) => diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala index 150c4938bb6..4416f678d9a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.box2Fox @@ -25,11 +26,11 @@ import net.liftweb.common.Box.tryo import scala.concurrent.ExecutionContext object VolumeSegmentIndexService { - def canHaveSegmentIndex(remoteDatastoreClient: TSRemoteDatastoreClient, - fallbackLayer: Option[RemoteFallbackLayer], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[Boolean] = + def canHaveSegmentIndex(remoteDatastoreClient: TSRemoteDatastoreClient, fallbackLayer: Option[RemoteFallbackLayer])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Boolean] = fallbackLayer match { - case Some(layer) => remoteDatastoreClient.hasSegmentIndexFile(layer, userToken) + case Some(layer) => remoteDatastoreClient.hasSegmentIndexFile(layer) case None => Fox.successful(true) } } @@ -43,6 +44,7 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore with ProtoGeometryImplicits with VolumeBucketCompression with SegmentIndexKeyHelper + with ReversionHelper with LazyLogging { private val volumeSegmentIndexClient: FossilDBClient = tracingDataStore.volumeSegmentIndex @@ -59,8 +61,14 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore mappingName: Option[String], editableMappingTracingId: Option[String])(implicit ec: ExecutionContext): Fox[Unit] = for { - bucketBytesDecompressed <- tryo( - decompressIfNeeded(bucketBytes, expectedUncompressedBucketSizeFor(elementClass), "")).toFox + bucketBytesDecompressed <- if (isRevertedElement(bucketBytes)) { + Fox.successful(emptyArrayForElementClass(elementClass)) + } else { + tryo( + decompressIfNeeded(bucketBytes, + expectedUncompressedBucketSizeFor(elementClass), + "updating segment index, new bucket data")).toFox + } // previous bytes: include fallback layer bytes if available, otherwise use empty bytes previousBucketBytesWithEmptyFallback <- bytesWithEmptyFallback(previousBucketBytesBox, elementClass) ?~> "volumeSegmentIndex.update.getPreviousBucket.failed" segmentIds: Set[Long] <- collectSegmentIds(bucketBytesDecompressed, elementClass) @@ -87,11 +95,14 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore private def bytesWithEmptyFallback(bytesBox: Box[Array[Byte]], elementClass: ElementClassProto)( implicit ec: ExecutionContext): Fox[Array[Byte]] = bytesBox match { - case Empty => Fox.successful(Array.fill[Byte](ElementClass.bytesPerElement(elementClass))(0)) + case Empty => Fox.successful(emptyArrayForElementClass(elementClass)) case Full(bytes) => Fox.successful(bytes) case f: Failure => f.toFox } + private def emptyArrayForElementClass(elementClass: ElementClassProto): Array[Byte] = + Array.fill[Byte](ElementClass.bytesPerElement(elementClass))(0) + private def removeBucketFromSegmentIndex( segmentIndexBuffer: VolumeSegmentIndexBuffer, segmentId: Long, @@ -158,17 +169,17 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore bucketList <- addEmptyFallback(bucketListBox) } yield bucketList - def getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( - fallbackLayer: Option[RemoteFallbackLayer], - tracingId: String, - segmentId: Long, - mag: Vec3Int, - version: Option[Long] = None, - mappingName: Option[String], - editableMappingTracingId: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - additionalAxes: Option[Seq[AdditionalAxis]], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[ListOfVec3IntProto] = + def getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer(fallbackLayer: Option[RemoteFallbackLayer], + tracingId: String, + segmentId: Long, + mag: Vec3Int, + version: Option[Long] = None, + mappingName: Option[String], + editableMappingTracingId: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + additionalAxes: Option[Seq[AdditionalAxis]])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[ListOfVec3IntProto] = for { bucketListBox <- getSegmentToBucketIndex(fallbackLayer, tracingId, @@ -178,8 +189,7 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore mappingName, editableMappingTracingId, additionalCoordinates, - additionalAxes, - userToken).futureBox + additionalAxes).futureBox bucketList <- addEmptyFallback(bucketListBox) } yield bucketList @@ -191,17 +201,17 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore case Empty => Fox.successful(ListOfVec3IntProto(Seq.empty)) } - private def getSegmentToBucketIndex( - fallbackLayerOpt: Option[RemoteFallbackLayer], - tracingId: String, - segmentId: Long, - mag: Vec3Int, - version: Option[Long], - mappingName: Option[String], - editableMappingTracingId: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - additionalAxes: Option[Seq[AdditionalAxis]], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[ListOfVec3IntProto] = + private def getSegmentToBucketIndex(fallbackLayerOpt: Option[RemoteFallbackLayer], + tracingId: String, + segmentId: Long, + mag: Vec3Int, + version: Option[Long], + mappingName: Option[String], + editableMappingTracingId: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + additionalAxes: Option[Seq[AdditionalAxis]])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[ListOfVec3IntProto] = for { fromMutableIndex <- getSegmentToBucketIndexFromFossilDB(tracingId, segmentId, @@ -211,12 +221,7 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore additionalAxes).fillEmpty(ListOfVec3IntProto.of(Seq())) fromFileIndex <- fallbackLayerOpt match { // isEmpty is not the same as length == 0 here :( case Some(fallbackLayer) if fromMutableIndex.length == 0 => - getSegmentToBucketIndexFromFile(fallbackLayer, - segmentId, - mag, - mappingName, - editableMappingTracingId, - userToken) // additional coordinates not supported, see #7556 + getSegmentToBucketIndexFromFile(fallbackLayer, segmentId, mag, mappingName, editableMappingTracingId) // additional coordinates not supported, see #7556 case _ => Fox.successful(Seq.empty) } combined = fromMutableIndex.values.map(vec3IntFromProto) ++ fromFileIndex @@ -237,8 +242,7 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore segmentId: Long, mag: Vec3Int, mappingName: Option[String], - editableMappingTracingId: Option[String], - userToken: Option[String]) = - remoteDatastoreClient.querySegmentIndex(layer, segmentId, mag, mappingName, editableMappingTracingId, userToken) + editableMappingTracingId: Option[String])(implicit tc: TokenContext) = + remoteDatastoreClient.querySegmentIndex(layer, segmentId, mag, mappingName, editableMappingTracingId) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala index cb12c273f53..cede715f841 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing @@ -9,71 +10,77 @@ import com.scalableminds.webknossos.datastore.models.{UnsignedInteger, UnsignedI import com.scalableminds.webknossos.datastore.models.datasource.DataLayer import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import javax.inject.Inject import scala.concurrent.ExecutionContext class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTracingService, + annotationService: TSAnnotationService, volumeSegmentIndexService: VolumeSegmentIndexService, editableMappingService: EditableMappingService) extends ProtoGeometryImplicits with SegmentStatistics { // Returns the segment volume (=number of voxels) in the target mag - def getSegmentVolume(tracingId: String, + def getSegmentVolume(annotationId: String, + tracingId: String, segmentId: Long, mag: Vec3Int, mappingName: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = calculateSegmentVolume( segmentId, mag, additionalCoordinates, - getBucketPositions(tracingId, mappingName, additionalCoordinates, userToken), - getTypedDataForBucketPosition(tracingId, userToken) + getBucketPositions(annotationId, tracingId, mappingName, additionalCoordinates), + getTypedDataForBucketPosition(annotationId, tracingId) ) - def getSegmentBoundingBox(tracingId: String, + def getSegmentBoundingBox(annotationId: String, + tracingId: String, segmentId: Long, mag: Vec3Int, mappingName: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[BoundingBox] = + additionalCoordinates: Option[Seq[AdditionalCoordinate]])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[BoundingBox] = calculateSegmentBoundingBox( segmentId, mag, additionalCoordinates, - getBucketPositions(tracingId, mappingName, additionalCoordinates, userToken), - getTypedDataForBucketPosition(tracingId, userToken) + getBucketPositions(annotationId, tracingId, mappingName, additionalCoordinates), + getTypedDataForBucketPosition(annotationId, tracingId) ) - private def getTypedDataForBucketPosition(tracingId: String, userToken: Option[String])( + private def getTypedDataForBucketPosition(annotationId: String, tracingId: String)( bucketPosition: Vec3Int, mag: Vec3Int, - additionalCoordinates: Option[Seq[AdditionalCoordinate]]) = + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext, ec: ExecutionContext) = for { - tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" - bucketData <- getVolumeDataForPositions(tracing, + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> "tracing.notFound" + bucketData <- getVolumeDataForPositions(annotationId, tracingId, + tracing, mag, Seq(bucketPosition), - additionalCoordinates, - userToken) + additionalCoordinates) dataTyped: Array[UnsignedInteger] = UnsignedIntegerArray.fromByteArray( bucketData, elementClassFromProto(tracing.elementClass)) } yield dataTyped - private def getBucketPositions( - tracingId: String, - mappingName: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String])(segmentId: Long, mag: Vec3Int)(implicit ec: ExecutionContext) = + private def getBucketPositions(annotationId: String, + tracingId: String, + mappingName: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]])( + segmentId: Long, + mag: Vec3Int)(implicit ec: ExecutionContext, tc: TokenContext) = for { - fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId) - tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> "tracing.notFound" + fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId, tracing) additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) allBucketPositions: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( @@ -85,17 +92,17 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci mappingName, editableMappingTracingId = volumeTracingService.editableMappingTracingId(tracing, tracingId), additionalCoordinates, - additionalAxes, - userToken + additionalAxes ) } yield allBucketPositions - private def getVolumeDataForPositions(tracing: VolumeTracing, - tracingId: String, - mag: Vec3Int, - bucketPositions: Seq[Vec3Int], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String]): Fox[Array[Byte]] = { + private def getVolumeDataForPositions( + annotationId: String, + tracingId: String, + tracing: VolumeTracing, + mag: Vec3Int, + bucketPositions: Seq[Vec3Int], + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext): Fox[Array[Byte]] = { val dataRequests = bucketPositions.map { position => WebknossosDataRequest( @@ -109,9 +116,10 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci ) }.toList for { - (data, _) <- if (tracing.getHasEditableMapping) - editableMappingService.volumeData(tracing, tracingId, dataRequests, userToken) - else volumeTracingService.data(tracingId, tracing, dataRequests, includeFallbackDataIfAvailable = true, userToken) + (data, _) <- if (tracing.getHasEditableMapping) { + val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.volumeData(mappingLayer, dataRequests) + } else volumeTracingService.data(tracingId, tracing, dataRequests, includeFallbackDataIfAvailable = true) } yield data } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index 14832ee36de..fee555b9788 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -15,10 +15,12 @@ import scala.annotation.tailrec import scala.concurrent.ExecutionContext import scala.concurrent.duration._ -trait VolumeBucketReversionHelper { - protected def isRevertedBucket(data: Array[Byte]): Boolean = data sameElements Array[Byte](0) +trait ReversionHelper { + val revertedValue: Array[Byte] = Array[Byte](0) - protected def isRevertedBucket(bucket: VersionedKeyValuePair[Array[Byte]]): Boolean = isRevertedBucket(bucket.value) + protected def isRevertedElement(data: Array[Byte]): Boolean = data.sameElements(revertedValue) + + protected def isRevertedElement(bucket: VersionedKeyValuePair[Array[Byte]]): Boolean = isRevertedElement(bucket.value) } trait VolumeBucketCompression extends LazyLogging { @@ -48,7 +50,7 @@ trait VolumeBucketCompression extends LazyLogging { } catch { case e: Exception => logger.error( - s"Failed to LZ4-decompress volume bucket ($debugInfo, expected uncompressed size $expectedUncompressedBucketSize): $e") + s"Failed to LZ4-decompress volume bucket ($debugInfo, compressed size: ${data.length}, expected uncompressed size $expectedUncompressedBucketSize): $e") throw e } } @@ -170,7 +172,7 @@ trait VolumeTracingBucketHelper with VolumeBucketCompression with DataConverter with BucketKeys - with VolumeBucketReversionHelper { + with ReversionHelper { implicit def ec: ExecutionContext @@ -193,7 +195,7 @@ trait VolumeTracingBucketHelper case None => volumeDataStore.get(key, version, mayBeEmpty = Some(true)) } val unpackedDataFox = dataFox.flatMap { versionedVolumeBucket => - if (isRevertedBucket(versionedVolumeBucket)) Fox.empty + if (isRevertedElement(versionedVolumeBucket)) Fox.empty else { val debugInfo = s"key: $key, ${versionedVolumeBucket.value.length} bytes, version ${versionedVolumeBucket.version}" @@ -211,22 +213,20 @@ trait VolumeTracingBucketHelper } } - private def loadFallbackBucket(dataLayer: VolumeTracingLayer, bucket: BucketPosition): Fox[Array[Byte]] = { + private def loadFallbackBucket(layer: VolumeTracingLayer, bucket: BucketPosition): Fox[Array[Byte]] = { val dataRequest: WebknossosDataRequest = WebknossosDataRequest( position = Vec3Int(bucket.topLeft.mag1X, bucket.topLeft.mag1Y, bucket.topLeft.mag1Z), mag = bucket.mag, - cubeSize = dataLayer.lengthOfUnderlyingCubes(bucket.mag), + cubeSize = layer.lengthOfUnderlyingCubes(bucket.mag), fourBit = None, - applyAgglomerate = dataLayer.tracing.mappingName, + applyAgglomerate = layer.tracing.mappingName, version = None, additionalCoordinates = None ) for { - remoteFallbackLayer <- dataLayer.volumeTracingService - .remoteFallbackLayerFromVolumeTracing(dataLayer.tracing, dataLayer.name) - (unmappedData, indices) <- dataLayer.volumeTracingService.getFallbackDataFromDatastore(remoteFallbackLayer, - List(dataRequest), - dataLayer.userToken) + remoteFallbackLayer <- layer.volumeTracingService.remoteFallbackLayerFromVolumeTracing(layer.tracing, layer.name) + (unmappedData, indices) <- layer.volumeTracingService + .getFallbackDataFromDatastore(remoteFallbackLayer, List(dataRequest))(ec, layer.tokenContext) unmappedDataOrEmpty <- if (indices.isEmpty) Fox.successful(unmappedData) else Fox.empty } yield unmappedDataOrEmpty } @@ -301,7 +301,7 @@ class VersionedBucketIterator(prefix: String, with VolumeBucketCompression with BucketKeys with FoxImplicits - with VolumeBucketReversionHelper { + with ReversionHelper { private val batchSize = 64 private var currentStartAfterKey: Option[String] = None @@ -321,7 +321,7 @@ class VersionedBucketIterator(prefix: String, if (currentBatchIterator.hasNext) { val bucket = currentBatchIterator.next() currentStartAfterKey = Some(bucket.key) - if (isRevertedBucket(bucket) || parseBucketKey(bucket.key, additionalAxes).isEmpty) { + if (isRevertedElement(bucket) || parseBucketKey(bucket.key, additionalAxes).isEmpty) { getNextNonRevertedBucket } else { Some(bucket) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala deleted file mode 100644 index 59a915494fa..00000000000 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ /dev/null @@ -1,302 +0,0 @@ -package com.scalableminds.webknossos.tracingstore.tracings.volume - -import com.scalableminds.util.geometry.Vec3Int -import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.{BucketPosition, UnsignedIntegerArray} -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceLike, ElementClass} -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto -import com.scalableminds.webknossos.tracingstore.TSRemoteWebknossosClient -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import com.scalableminds.webknossos.tracingstore.tracings.{ - FossilDBClient, - KeyValueStoreImplicits, - TracingDataStore, - VersionedKeyValuePair -} -import net.liftweb.common.Empty -import com.scalableminds.webknossos.datastore.geometry.{Vec3IntProto => ProtoPoint3D} -import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import net.liftweb.common.Box -import play.api.libs.json.{Format, Json} - -import scala.collection.mutable -import scala.concurrent.ExecutionContext -import scala.reflect.ClassTag - -object VolumeTracingDownsampling { - private def magsForVolumeTracingByLayerName(dataSource: DataSourceLike, - fallbackLayerName: Option[String]): List[Vec3Int] = { - val fallbackLayer: Option[DataLayerLike] = - fallbackLayerName.flatMap(name => dataSource.dataLayers.find(_.name == name)) - magsForVolumeTracing(dataSource, fallbackLayer) - } - - def magsForVolumeTracing(dataSource: DataSourceLike, fallbackLayer: Option[DataLayerLike]): List[Vec3Int] = { - val fallbackLayerMags = fallbackLayer.map(_.resolutions) - fallbackLayerMags.getOrElse { - val unionOfAllLayers = dataSource.dataLayers.flatMap(_.resolutions).distinct - val unionHasDistinctMaxDims = unionOfAllLayers.map(_.maxDim).distinct.length == unionOfAllLayers.length - if (unionHasDistinctMaxDims) { - unionOfAllLayers - } else { - // If the union of all layer’s mags has conflicting mags (meaning non-distinct maxDims, e.g. 2-2-1 and 2-2-2), - // instead use one layer as template. Use the layer with the most mags. - dataSource.dataLayers.maxBy(_.resolutions.length).resolutions.distinct - } - }.sortBy(_.maxDim) - } -} - -trait VolumeTracingDownsampling - extends BucketKeys - with ProtoGeometryImplicits - with VolumeBucketCompression - with KeyValueStoreImplicits - with FoxImplicits { - - val tracingDataStore: TracingDataStore - val tracingStoreWkRpcClient: TSRemoteWebknossosClient - protected def saveBucket(dataLayer: VolumeTracingLayer, - bucket: BucketPosition, - data: Array[Byte], - version: Long, - toCache: Boolean = false): Fox[Unit] - - protected def updateSegmentIndex(segmentIndexBuffer: VolumeSegmentIndexBuffer, - bucketPosition: BucketPosition, - bucketBytes: Array[Byte], - previousBucketBytesBox: Box[Array[Byte]], - elementClass: ElementClassProto, - mappingName: Option[String], - editableMappingTracingId: Option[String]): Fox[Unit] - - protected def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] - - protected def baseMappingName(tracing: VolumeTracing): Fox[Option[String]] - - protected def volumeSegmentIndexClient: FossilDBClient - - protected def downsampleWithLayer(tracingId: String, - oldTracingId: String, - tracing: VolumeTracing, - dataLayer: VolumeTracingLayer, - tracingService: VolumeTracingService, - userToken: Option[String])(implicit ec: ExecutionContext): Fox[List[Vec3Int]] = { - val bucketVolume = 32 * 32 * 32 - for { - _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." - _ <- bool2Fox(tracing.mags.nonEmpty) ?~> "Cannot downsample tracing with no mag list" - sourceMag = getSourceMag(tracing) - magsToCreate <- getMagsToCreate(tracing, oldTracingId) - elementClass = elementClassFromProto(tracing.elementClass) - bucketDataMapMutable = new mutable.HashMap[BucketPosition, Array[Byte]]().withDefault(_ => Array[Byte](0)) - _ = fillMapWithSourceBucketsInplace(bucketDataMapMutable, tracingId, dataLayer, sourceMag) - originalBucketPositions = bucketDataMapMutable.keys.toList - updatedBucketsMutable = new mutable.ListBuffer[BucketPosition]() - _ = magsToCreate.foldLeft(sourceMag) { (previousMag, requiredMag) => - downsampleMagFromMag(previousMag, - requiredMag, - originalBucketPositions, - bucketDataMapMutable, - updatedBucketsMutable, - bucketVolume, - elementClass, - dataLayer) - requiredMag - } - fallbackLayer <- tracingService.getFallbackLayer(oldTracingId) // remote wk does not know the new id yet - tracing <- tracingService.find(tracingId) ?~> "tracing.notFound" - segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, - volumeSegmentIndexClient, - tracing.version, - tracingService.remoteDatastoreClient, - fallbackLayer, - dataLayer.additionalAxes, - userToken) - _ <- Fox.serialCombined(updatedBucketsMutable.toList) { bucketPosition: BucketPosition => - for { - _ <- saveBucket(dataLayer, bucketPosition, bucketDataMapMutable(bucketPosition), tracing.version) - mappingName <- baseMappingName(tracing) - _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( - updateSegmentIndex( - segmentIndexBuffer, - bucketPosition, - bucketDataMapMutable(bucketPosition), - Empty, - tracing.elementClass, - mappingName, - editableMappingTracingId(tracing, tracingId) - )) - } yield () - } - _ <- segmentIndexBuffer.flush() - _ = logger.debug(s"Downsampled mags $magsToCreate from $sourceMag for volume tracing $tracingId.") - } yield sourceMag :: magsToCreate - } - - private def fillMapWithSourceBucketsInplace(bucketDataMap: mutable.Map[BucketPosition, Array[Byte]], - tracingId: String, - dataLayer: VolumeTracingLayer, - sourceMag: Vec3Int): Unit = { - val data: List[VersionedKeyValuePair[Array[Byte]]] = - tracingDataStore.volumeData.getMultipleKeys(None, Some(tracingId)) - data.foreach { keyValuePair: VersionedKeyValuePair[Array[Byte]] => - val bucketPositionOpt = parseBucketKey(keyValuePair.key, dataLayer.additionalAxes).map(_._2) - bucketPositionOpt.foreach { bucketPosition => - if (bucketPosition.mag == sourceMag) { - bucketDataMap(bucketPosition) = decompressIfNeeded(keyValuePair.value, - expectedUncompressedBucketSizeFor(dataLayer), - s"bucket $bucketPosition during downsampling") - } - } - } - } - - private def downsampleMagFromMag(previousMag: Vec3Int, - requiredMag: Vec3Int, - originalBucketPositions: List[BucketPosition], - bucketDataMapMutable: mutable.Map[BucketPosition, Array[Byte]], - updatedBucketsMutable: mutable.ListBuffer[BucketPosition], - bucketVolume: Int, - elementClass: ElementClass.Value, - dataLayer: VolumeTracingLayer): Unit = { - val downScaleFactor = - Vec3Int(requiredMag.x / previousMag.x, requiredMag.y / previousMag.y, requiredMag.z / previousMag.z) - downsampledBucketPositions(originalBucketPositions, requiredMag).foreach { downsampledBucketPosition => - val sourceBuckets: Seq[BucketPosition] = - sourceBucketPositionsFor(downsampledBucketPosition, downScaleFactor, previousMag) - val sourceData: Seq[Array[Byte]] = sourceBuckets.map(bucketDataMapMutable(_)) - val downsampledData: Array[Byte] = - if (sourceData.forall(_.sameElements(Array[Byte](0)))) - Array[Byte](0) - else { - val sourceDataFilled = fillZeroedIfNeeded(sourceData, bucketVolume, dataLayer.bytesPerElement) - val sourceDataTyped = UnsignedIntegerArray.fromByteArray(sourceDataFilled.toArray.flatten, elementClass) - val dataDownscaledTyped = - downsampleData(sourceDataTyped.grouped(bucketVolume).toArray, downScaleFactor, bucketVolume) - UnsignedIntegerArray.toByteArray(dataDownscaledTyped, elementClass) - } - bucketDataMapMutable(downsampledBucketPosition) = downsampledData - updatedBucketsMutable += downsampledBucketPosition - } - } - - private def downsampledBucketPositions(originalBucketPositions: List[BucketPosition], - requiredMag: Vec3Int): Set[BucketPosition] = - originalBucketPositions.map { bucketPosition: BucketPosition => - BucketPosition( - (bucketPosition.voxelMag1X / requiredMag.x / 32) * requiredMag.x * 32, - (bucketPosition.voxelMag1Y / requiredMag.y / 32) * requiredMag.y * 32, - (bucketPosition.voxelMag1Z / requiredMag.z / 32) * requiredMag.z * 32, - requiredMag, - bucketPosition.additionalCoordinates - ) - }.toSet - - private def sourceBucketPositionsFor(bucketPosition: BucketPosition, - downScaleFactor: Vec3Int, - previousMag: Vec3Int): Seq[BucketPosition] = - for { - z <- 0 until downScaleFactor.z - y <- 0 until downScaleFactor.y - x <- 0 until downScaleFactor.x - } yield { - BucketPosition( - bucketPosition.voxelMag1X + x * bucketPosition.bucketLength * previousMag.x, - bucketPosition.voxelMag1Y + y * bucketPosition.bucketLength * previousMag.y, - bucketPosition.voxelMag1Z + z * bucketPosition.bucketLength * previousMag.z, - previousMag, - bucketPosition.additionalCoordinates - ) - } - - private def fillZeroedIfNeeded(sourceData: Seq[Array[Byte]], - bucketVolume: Int, - bytesPerElement: Int): Seq[Array[Byte]] = - // Reverted buckets and missing buckets are represented by a single zero-byte. - // For downsampling, those need to be replaced with the full bucket volume of zero-bytes. - sourceData.map { sourceBucketData => - if (sourceBucketData.sameElements(Array[Byte](0))) { - Array.fill[Byte](bucketVolume * bytesPerElement)(0) - } else sourceBucketData - } - - private def downsampleData[T: ClassTag](data: Array[Array[T]], - downScaleFactor: Vec3Int, - bucketVolume: Int): Array[T] = { - val result = new Array[T](bucketVolume) - for { - z <- 0 until 32 - y <- 0 until 32 - x <- 0 until 32 - } { - val voxelSourceData: IndexedSeq[T] = for { - z_offset <- 0 until downScaleFactor.z - y_offset <- 0 until downScaleFactor.y - x_offset <- 0 until downScaleFactor.x - } yield { - val sourceVoxelPosition = - Vec3Int(x * downScaleFactor.x + x_offset, y * downScaleFactor.y + y_offset, z * downScaleFactor.z + z_offset) - val sourceBucketPosition = - Vec3Int(sourceVoxelPosition.x / 32, sourceVoxelPosition.y / 32, sourceVoxelPosition.z / 32) - val sourceVoxelPositionInSourceBucket = - Vec3Int(sourceVoxelPosition.x % 32, sourceVoxelPosition.y % 32, sourceVoxelPosition.z % 32) - val sourceBucketIndex = sourceBucketPosition.x + sourceBucketPosition.y * downScaleFactor.y + sourceBucketPosition.z * downScaleFactor.y * downScaleFactor.z - val sourceVoxelIndex = sourceVoxelPositionInSourceBucket.x + sourceVoxelPositionInSourceBucket.y * 32 + sourceVoxelPositionInSourceBucket.z * 32 * 32 - data(sourceBucketIndex)(sourceVoxelIndex) - } - result(x + y * 32 + z * 32 * 32) = mode(voxelSourceData) - } - result - } - - private def mode[T](items: Seq[T]): T = - items.groupBy(i => i).view.mapValues(_.size).maxBy(_._2)._1 - - private def getSourceMag(tracing: VolumeTracing): Vec3Int = - tracing.mags.minBy(_.maxDim) - - private def getMagsToCreate(tracing: VolumeTracing, oldTracingId: String): Fox[List[Vec3Int]] = - for { - requiredMags <- getRequiredMags(tracing, oldTracingId) - sourceMag = getSourceMag(tracing) - magsToCreate = requiredMags.filter(_.maxDim > sourceMag.maxDim) - } yield magsToCreate - - private def getRequiredMags(tracing: VolumeTracing, oldTracingId: String): Fox[List[Vec3Int]] = - for { - dataSource: DataSourceLike <- tracingStoreWkRpcClient.getDataSourceForTracing(oldTracingId) - magsForTracing = VolumeTracingDownsampling.magsForVolumeTracingByLayerName(dataSource, tracing.fallbackLayer) - } yield magsForTracing.sortBy(_.maxDim) - - protected def restrictMagList(tracing: VolumeTracing, magRestrictions: MagRestrictions): VolumeTracing = { - val tracingMags = - resolveLegacyMagList(tracing.mags) - val allowedMags = magRestrictions.filterAllowed(tracingMags.map(vec3IntFromProto)) - tracing.withMags(allowedMags.map(vec3IntToProto)) - } - - protected def resolveLegacyMagList(mags: Seq[ProtoPoint3D]): Seq[ProtoPoint3D] = - if (mags.isEmpty) Seq(ProtoPoint3D(1, 1, 1)) else mags -} - -object MagRestrictions { - def empty: MagRestrictions = MagRestrictions(None, None) - implicit val jsonFormat: Format[MagRestrictions] = Json.format[MagRestrictions] -} - -case class MagRestrictions( - min: Option[Int], - max: Option[Int] -) { - def filterAllowed(mags: Seq[Vec3Int]): Seq[Vec3Int] = - mags.filter(isAllowed) - - def isAllowed(mag: Vec3Int): Boolean = - min.getOrElse(0) <= mag.maxDim && max.getOrElse(Int.MaxValue) >= mag.maxDim - - def isForbidden(mag: Vec3Int): Boolean = !isAllowed(mag) - - def minStr: Option[String] = min.map(_.toString) - def maxStr: Option[String] = max.map(_.toString) -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index 534ca0ada77..10868db7097 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.tools.{Fox, FoxImplicits} @@ -75,7 +76,7 @@ case class VolumeTracingLayer( isTemporaryTracing: Boolean = false, includeFallbackDataIfAvailable: Boolean = false, tracing: VolumeTracing, - userToken: Option[String], + tokenContext: TokenContext, additionalAxes: Option[Seq[AdditionalAxis]] )(implicit val volumeDataStore: FossilDBClient, implicit val volumeDataCache: TemporaryVolumeDataStore, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala new file mode 100644 index 00000000000..1c7f316bb39 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala @@ -0,0 +1,57 @@ +package com.scalableminds.webknossos.tracingstore.tracings.volume + +import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceLike} +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.geometry.{Vec3IntProto => ProtoPoint3D} +import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits +import play.api.libs.json.{Format, Json} + +object VolumeTracingMags extends ProtoGeometryImplicits { + + def magsForVolumeTracing(dataSource: DataSourceLike, fallbackLayer: Option[DataLayerLike]): List[Vec3Int] = { + val fallbackLayerMags = fallbackLayer.map(_.resolutions) + fallbackLayerMags.getOrElse { + val unionOfAllLayers = dataSource.dataLayers.flatMap(_.resolutions).distinct + val unionHasDistinctMaxDims = unionOfAllLayers.map(_.maxDim).distinct.length == unionOfAllLayers.length + if (unionHasDistinctMaxDims) { + unionOfAllLayers + } else { + // If the union of all layer’s mags has conflicting mags (meaning non-distinct maxDims, e.g. 2-2-1 and 2-2-2), + // instead use one layer as template. Use the layer with the most mags. + dataSource.dataLayers.maxBy(_.resolutions.length).resolutions.distinct + } + }.sortBy(_.maxDim) + } + + def restrictMagList(tracing: VolumeTracing, magRestrictions: MagRestrictions): VolumeTracing = { + val tracingMags = + resolveLegacyMagList(tracing.mags) + val allowedMags = magRestrictions.filterAllowed(tracingMags.map(vec3IntFromProto)) + tracing.withMags(allowedMags.map(vec3IntToProto)) + } + + def resolveLegacyMagList(mags: Seq[ProtoPoint3D]): Seq[ProtoPoint3D] = + if (mags.isEmpty) Seq(ProtoPoint3D(1, 1, 1)) else mags +} + +object MagRestrictions { + def empty: MagRestrictions = MagRestrictions(None, None) + implicit val jsonFormat: Format[MagRestrictions] = Json.format[MagRestrictions] +} + +case class MagRestrictions( + min: Option[Int], + max: Option[Int] +) { + def filterAllowed(mags: Seq[Vec3Int]): Seq[Vec3Int] = + mags.filter(isAllowed) + + def isAllowed(mag: Vec3Int): Boolean = + min.getOrElse(0) <= mag.maxDim && max.getOrElse(Int.MaxValue) >= mag.maxDim + + def isForbidden(mag: Vec3Int): Boolean = !isAllowed(mag) + + def minStr: Option[String] = min.map(_.toString) + def maxStr: Option[String] = max.map(_.toString) +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 77dca95a5b2..02902f222ce 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} @@ -15,7 +16,6 @@ import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto import com.scalableminds.webknossos.datastore.models.requests.DataServiceDataRequest import com.scalableminds.webknossos.datastore.models.{ - AdditionalCoordinate, BucketPosition, UnsignedInteger, UnsignedIntegerArray, @@ -25,7 +25,6 @@ import com.scalableminds.webknossos.datastore.models.{ import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings._ -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, @@ -37,10 +36,9 @@ import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files import play.api.libs.Files.TemporaryFileCreator -import play.api.libs.json.{JsObject, JsValue, Json} - import java.io._ import java.nio.file.Paths +import java.util.Base64 import java.util.zip.Deflater import scala.collection.mutable import scala.concurrent.ExecutionContext @@ -48,14 +46,12 @@ import scala.concurrent.duration._ class VolumeTracingService @Inject()( val tracingDataStore: TracingDataStore, - val tracingStoreWkRpcClient: TSRemoteWebknossosClient, val adHocMeshServiceHolder: AdHocMeshServiceHolder, implicit val temporaryTracingStore: TemporaryTracingStore[VolumeTracing], implicit val temporaryVolumeDataStore: TemporaryVolumeDataStore, implicit val ec: ExecutionContext, val handledGroupIdStore: TracingStoreRedisStore, val uncommittedUpdatesStore: TracingStoreRedisStore, - editableMappingService: EditableMappingService, val temporaryTracingIdStore: TracingStoreRedisStore, val remoteDatastoreClient: TSRemoteDatastoreClient, val remoteWebknossosClient: TSRemoteWebknossosClient, @@ -64,7 +60,6 @@ class VolumeTracingService @Inject()( volumeSegmentIndexService: VolumeSegmentIndexService ) extends TracingService[VolumeTracing] with VolumeTracingBucketHelper - with VolumeTracingDownsampling with WKWDataFormatHelper with FallbackDataHelper with DataFinder @@ -77,9 +72,6 @@ class VolumeTracingService @Inject()( implicit val tracingCompanion: VolumeTracing.type = VolumeTracing - implicit val updateActionJsonFormat: VolumeUpdateAction.volumeUpdateActionFormat.type = - VolumeUpdateAction.volumeUpdateActionFormat - val tracingType: TracingType = TracingType.volume val tracingStore: FossilDBClient = tracingDataStore.volumes @@ -93,14 +85,10 @@ class VolumeTracingService @Inject()( adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService - private val fallbackLayerCache: AlfuCache[String, Option[RemoteFallbackLayer]] = AlfuCache(maxCapacity = 100) - - override def currentVersion(tracingId: String): Fox[Long] = - tracingDataStore.volumes.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) - - override def currentVersion(tracing: VolumeTracing): Long = tracing.version + private val fallbackLayerCache: AlfuCache[(String, Option[String], Option[String]), Option[RemoteFallbackLayer]] = + AlfuCache(maxCapacity = 100) - override protected def updateSegmentIndex( + private def updateSegmentIndex( segmentIndexBuffer: VolumeSegmentIndexBuffer, bucketPosition: BucketPosition, bucketBytes: Array[Byte], @@ -116,75 +104,45 @@ class VolumeTracingService @Inject()( mappingName, editableMappingTracingId) ?~> "volumeSegmentIndex.update.failed" - def handleUpdateGroup(tracingId: String, - updateGroup: UpdateActionGroup[VolumeTracing], - previousVersion: Long, - userToken: Option[String]): Fox[Unit] = + def applyBucketMutatingActions(tracingId: String, + tracing: VolumeTracing, + updateActions: List[BucketMutatingVolumeUpdateAction], + newVersion: Long)(implicit tc: TokenContext): Fox[Unit] = for { // warning, may be called multiple times with the same version number (due to transaction management). // frontend ensures that each bucket is only updated once per transaction - fallbackLayer <- getFallbackLayer(tracingId) - tracing <- find(tracingId) ?~> "tracing.notFound" - segmentIndexBuffer <- Fox.successful( - new VolumeSegmentIndexBuffer( - tracingId, - volumeSegmentIndexClient, - updateGroup.version, - remoteDatastoreClient, - fallbackLayer, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - userToken - )) - updatedTracing: VolumeTracing <- updateGroup.actions.foldLeft(find(tracingId)) { (tracingFox, action) => - tracingFox.futureBox.flatMap { - case Full(tracing) => - action match { - case a: UpdateBucketVolumeAction => - if (tracing.getHasEditableMapping) { - Fox.failure("Cannot mutate volume data in annotation with editable mapping.") - } else - updateBucket(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version) ?~> "Failed to save volume data." - case a: UpdateTracingVolumeAction => - Fox.successful( - tracing.copy( - activeSegmentId = Some(a.activeSegmentId), - editPosition = a.editPosition, - editRotation = a.editRotation, - largestSegmentId = a.largestSegmentId, - zoomLevel = a.zoomLevel, - editPositionAdditionalCoordinates = - AdditionalCoordinate.toProto(a.editPositionAdditionalCoordinates) - )) - case a: RevertToVersionVolumeAction => - revertToVolumeVersion(tracingId, a.sourceVersion, updateGroup.version, tracing, userToken) - case a: DeleteSegmentDataVolumeAction => - if (!tracing.getHasSegmentIndex) { - Fox.failure("Cannot delete segment data for annotations without segment index.") - } else - deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version, userToken) ?~> "Failed to delete segment data." - case _: UpdateTdCamera => Fox.successful(tracing) - case a: ApplyableVolumeAction => Fox.successful(a.applyOn(tracing)) - case _ => Fox.failure("Unknown action.") - } - case Empty => - Fox.empty - case f: Failure => - f.toFox - } + fallbackLayerOpt <- getFallbackLayer(tracingId, tracing) + segmentIndexBuffer = new VolumeSegmentIndexBuffer( + tracingId, + volumeSegmentIndexClient, + newVersion, + remoteDatastoreClient, + fallbackLayerOpt, + AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), + tc + ) + _ <- Fox.serialCombined(updateActions) { + case a: UpdateBucketVolumeAction => + if (tracing.getHasEditableMapping) { + Fox.failure("Cannot mutate volume data in annotation with editable mapping.") + } else + updateBucket(tracingId, tracing, a, segmentIndexBuffer, newVersion) ?~> "Failed to save volume data." + //case a: RevertToVersionVolumeAction => revertToVolumeVersion(tracingId, a.sourceVersion, updateGroup.version, tracing, userToken) + case a: DeleteSegmentDataVolumeAction => + if (!tracing.getHasSegmentIndex) { + Fox.failure("Cannot delete segment data for annotations without segment index.") + } else + deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, newVersion) ?~> "Failed to delete segment data." + case _ => Fox.failure("Unknown bucket-mutating action.") } _ <- segmentIndexBuffer.flush() - _ <- save(updatedTracing.copy(version = updateGroup.version), Some(tracingId), updateGroup.version) - _ <- tracingDataStore.volumeUpdates.put( - tracingId, - updateGroup.version, - updateGroup.actions.map(_.addTimestamp(updateGroup.timestamp)).map(_.transformToCompact)) - } yield Fox.successful(()) + } yield () private def updateBucket(tracingId: String, volumeTracing: VolumeTracing, action: UpdateBucketVolumeAction, segmentIndexBuffer: VolumeSegmentIndexBuffer, - updateGroupVersion: Long): Fox[VolumeTracing] = + updateGroupVersion: Long)(implicit tc: TokenContext): Fox[VolumeTracing] = for { _ <- assertMagIsValid(volumeTracing, action.mag) ?~> s"Received a mag-${action.mag.toMagLiteral(allowScalar = true)} bucket, which is invalid for this annotation." bucketPosition = BucketPosition(action.position.x, @@ -194,15 +152,16 @@ class VolumeTracingService @Inject()( action.additionalCoordinates) _ <- bool2Fox(!bucketPosition.hasNegativeComponent) ?~> s"Received a bucket at negative position ($bucketPosition), must be positive" dataLayer = volumeTracingLayer(tracingId, volumeTracing) - _ <- saveBucket(dataLayer, bucketPosition, action.data, updateGroupVersion) ?~> "failed to save bucket" - mappingName <- baseMappingName(volumeTracing) + actionBucketData <- action.base64Data.map(Base64.getDecoder.decode).toFox + _ <- saveBucket(dataLayer, bucketPosition, actionBucketData, updateGroupVersion) ?~> "failed to save bucket" + mappingName <- selectMappingName(volumeTracing) _ <- Fox.runIfOptionTrue(volumeTracing.hasSegmentIndex) { for { previousBucketBytes <- loadBucket(dataLayer, bucketPosition, Some(updateGroupVersion - 1L)).futureBox _ <- updateSegmentIndex( segmentIndexBuffer, bucketPosition, - action.data, + actionBucketData, previousBucketBytes, volumeTracing.elementClass, mappingName, @@ -212,35 +171,34 @@ class VolumeTracingService @Inject()( } } yield volumeTracing - override def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = + def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = if (tracing.getHasEditableMapping) Some(tracingId) else None - override def baseMappingName(tracing: VolumeTracing): Fox[Option[String]] = + private def selectMappingName(tracing: VolumeTracing): Fox[Option[String]] = if (tracing.getHasEditableMapping) - tracing.mappingName.map(editableMappingService.getBaseMappingName).getOrElse(Fox.successful(None)) + Fox.failure("mappingName called on volumeTracing with editableMapping!") else Fox.successful(tracing.mappingName) private def deleteSegmentData(tracingId: String, volumeTracing: VolumeTracing, a: DeleteSegmentDataVolumeAction, segmentIndexBuffer: VolumeSegmentIndexBuffer, - version: Long, - userToken: Option[String]): Fox[VolumeTracing] = + version: Long)(implicit tc: TokenContext): Fox[VolumeTracing] = for { _ <- Fox.successful(()) dataLayer = volumeTracingLayer(tracingId, volumeTracing) + fallbackLayer <- getFallbackLayer(tracingId, volumeTracing) possibleAdditionalCoordinates = AdditionalAxis.coordinateSpace(dataLayer.additionalAxes).map(Some(_)) additionalCoordinateList = if (possibleAdditionalCoordinates.isEmpty) { List(None) } else { possibleAdditionalCoordinates.toList } - mappingName <- baseMappingName(volumeTracing) + mappingName <- selectMappingName(volumeTracing) _ <- Fox.serialCombined(volumeTracing.mags.toList)(magProto => Fox.serialCombined(additionalCoordinateList)(additionalCoordinates => { val mag = vec3IntFromProto(magProto) for { - fallbackLayer <- getFallbackLayer(tracingId) bucketPositionsRaw <- volumeSegmentIndexService.getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( fallbackLayer, tracingId, @@ -250,8 +208,7 @@ class VolumeTracingService @Inject()( mappingName, editableMappingTracingId(volumeTracing, tracingId), additionalCoordinates, - dataLayer.additionalAxes, - userToken + dataLayer.additionalAxes ) bucketPositions = bucketPositionsRaw.values .map(vec3IntFromProto) @@ -290,34 +247,36 @@ class VolumeTracingService @Inject()( bool2Fox(mag.isIsotropic) } - private def revertToVolumeVersion(tracingId: String, - sourceVersion: Long, - newVersion: Long, - tracing: VolumeTracing, - userToken: Option[String]): Fox[VolumeTracing] = { + def revertVolumeData(tracingId: String, + sourceVersion: Long, + sourceTracing: VolumeTracing, + newVersion: Long, + tracingBeforeRevert: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = { - val dataLayer = volumeTracingLayer(tracingId, tracing) - val bucketStream = dataLayer.volumeBucketProvider.bucketStreamWithVersion() + val dataLayer = volumeTracingLayer(tracingId, tracingBeforeRevert) + val bucketStreamBeforeRevert = + dataLayer.volumeBucketProvider.bucketStreamWithVersion(version = Some(tracingBeforeRevert.version)) + + logger.info(s"reverting volume data from v${tracingBeforeRevert.version} to v$sourceVersion, creating v$newVersion") for { - fallbackLayer <- getFallbackLayer(tracingId) + fallbackLayer <- getFallbackLayer(tracingId, tracingBeforeRevert) segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, newVersion, remoteDatastoreClient, fallbackLayer, dataLayer.additionalAxes, - userToken) - sourceTracing <- find(tracingId, Some(sourceVersion)) - mappingName <- baseMappingName(sourceTracing) - _ <- Fox.serialCombined(bucketStream) { + tc) + mappingName <- selectMappingName(sourceTracing) + _ <- Fox.serialCombined(bucketStreamBeforeRevert) { case (bucketPosition, dataBeforeRevert, version) => if (version > sourceVersion) { loadBucket(dataLayer, bucketPosition, Some(sourceVersion)).futureBox.map { case Full(dataAfterRevert) => for { _ <- saveBucket(dataLayer, bucketPosition, dataAfterRevert, newVersion) - _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( + _ <- Fox.runIfOptionTrue(tracingBeforeRevert.hasSegmentIndex)( updateSegmentIndex( segmentIndexBuffer, bucketPosition, @@ -330,9 +289,9 @@ class VolumeTracingService @Inject()( } yield () case Empty => for { - dataAfterRevert <- Fox.successful(Array[Byte](0)) + dataAfterRevert <- Fox.successful(revertedValue) _ <- saveBucket(dataLayer, bucketPosition, dataAfterRevert, newVersion) - _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( + _ <- Fox.runIfOptionTrue(tracingBeforeRevert.hasSegmentIndex)( updateSegmentIndex( segmentIndexBuffer, bucketPosition, @@ -348,13 +307,12 @@ class VolumeTracingService @Inject()( } else Fox.successful(()) } _ <- segmentIndexBuffer.flush() - } yield sourceTracing + } yield () } - def initializeWithDataMultiple(tracingId: String, - tracing: VolumeTracing, - initialData: File, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[Set[Vec3Int]] = + def initializeWithDataMultiple(tracingId: String, tracing: VolumeTracing, initialData: File)( + implicit mp: MessagesProvider, + tc: TokenContext): Fox[Set[Vec3Int]] = if (tracing.version != 0L) Failure("Tracing has already been edited.") else { @@ -367,9 +325,9 @@ class VolumeTracingService @Inject()( _ = if (magSet.nonEmpty) magSets.add(magSet) } yield () } - mappingName <- baseMappingName(tracing) + mappingName <- selectMappingName(tracing) mags <- - // if none of the tracings contained any volume data do not save buckets, use full mag list, as already initialized on wk-side + // if none of the tracings contained any volume data do not save buckets, use full resolution list, as already initialized on wk-side if (magSets.isEmpty) Fox.successful(tracing.mags.map(vec3IntFromProto).toSet) else { @@ -391,7 +349,7 @@ class VolumeTracingService @Inject()( mergedVolume.largestSegmentId.toLong, tracing.elementClass) destinationDataLayer = volumeTracingLayer(tracingId, tracing) - fallbackLayer <- getFallbackLayer(tracingId) + fallbackLayer <- getFallbackLayer(tracingId, tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, volumeSegmentIndexClient, @@ -399,7 +357,7 @@ class VolumeTracingService @Inject()( remoteDatastoreClient, fallbackLayer, AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - userToken + tc ) _ <- mergedVolume.withMergedBuckets { (bucketPosition, bytes) => for { @@ -424,16 +382,15 @@ class VolumeTracingService @Inject()( def initializeWithData(tracingId: String, tracing: VolumeTracing, initialData: File, - magRestrictions: MagRestrictions, - userToken: Option[String]): Fox[Set[Vec3Int]] = + magRestrictions: MagRestrictions)(implicit tc: TokenContext): Fox[Set[Vec3Int]] = if (tracing.version != 0L) { Failure("Tracing has already been edited.") } else { val dataLayer = volumeTracingLayer(tracingId, tracing) val savedMags = new mutable.HashSet[Vec3Int]() for { - fallbackLayer <- getFallbackLayer(tracingId) - mappingName <- baseMappingName(tracing) + fallbackLayer <- getFallbackLayer(tracingId, tracing) + mappingName <- selectMappingName(tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, volumeSegmentIndexClient, @@ -441,7 +398,7 @@ class VolumeTracingService @Inject()( remoteDatastoreClient, fallbackLayer, AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - userToken + tc ) _ <- withBucketsFromZip(initialData) { (bucketPosition, bytes) => if (magRestrictions.isForbidden(bucketPosition.mag)) { @@ -471,10 +428,11 @@ class VolumeTracingService @Inject()( } } - def allDataZip(tracingId: String, - tracing: VolumeTracing, - volumeDataZipFormat: VolumeDataZipFormat, - voxelSize: Option[VoxelSize])(implicit ec: ExecutionContext): Fox[Files.TemporaryFile] = { + def allDataZip( + tracingId: String, + tracing: VolumeTracing, + volumeDataZipFormat: VolumeDataZipFormat, + voxelSize: Option[VoxelSize])(implicit ec: ExecutionContext, tc: TokenContext): Fox[Files.TemporaryFile] = { val zipped = temporaryFileCreator.create(tracingId, ".zip") val os = new BufferedOutputStream(new FileOutputStream(new File(zipped.path.toString))) allDataToOutputStream(tracingId, tracing, volumeDataZipFormat, voxelSize, os).map(_ => zipped) @@ -484,7 +442,7 @@ class VolumeTracingService @Inject()( tracing: VolumeTracing, volumeDataZipFormmat: VolumeDataZipFormat, voxelSize: Option[VoxelSize], - os: OutputStream)(implicit ec: ExecutionContext): Fox[Unit] = { + os: OutputStream)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = { val dataLayer = volumeTracingLayer(tracingId, tracing) val buckets: Iterator[NamedStream] = volumeDataZipFormmat match { case VolumeDataZipFormat.wkw => @@ -515,90 +473,92 @@ class VolumeTracingService @Inject()( def data(tracingId: String, tracing: VolumeTracing, dataRequests: DataRequestCollection, - includeFallbackDataIfAvailable: Boolean = false, - userToken: Option[String] = None): Fox[(Array[Byte], List[Int])] = + includeFallbackDataIfAvailable: Boolean = false)(implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = for { isTemporaryTracing <- isTemporaryTracing(tracingId) - dataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing, includeFallbackDataIfAvailable, userToken) + dataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing, includeFallbackDataIfAvailable) requests = dataRequests.map(r => DataServiceDataRequest(null, dataLayer, r.cuboid(dataLayer), r.settings.copy(appliedAgglomerate = None))) data <- binaryDataService.handleDataRequests(requests) } yield data - def duplicate(tracingId: String, - sourceTracing: VolumeTracing, - fromTask: Boolean, - datasetBoundingBox: Option[BoundingBox], - magRestrictions: MagRestrictions, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox], - mappingName: Option[String], - userToken: Option[String]): Fox[(String, VolumeTracing)] = { - val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, fromTask, datasetBoundingBox) - val tracingWithMagRestrictions = restrictMagList(tracingWithBB, magRestrictions) + def adaptVolumeForDuplicate(sourceTracingId: String, + newTracingId: String, + sourceTracing: VolumeTracing, + isFromTask: Boolean, + boundingBox: Option[BoundingBox], + datasetBoundingBox: Option[BoundingBox], + magRestrictions: MagRestrictions, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[VolumeTracing] = { + val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, isFromTask, datasetBoundingBox) + val tracingWithMagRestrictions = VolumeTracingMags.restrictMagList(tracingWithBB, magRestrictions) for { - fallbackLayer <- getFallbackLayer(tracingId) - hasSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayer, userToken) + fallbackLayer <- getFallbackLayer(sourceTracingId, sourceTracing) + hasSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayer) newTracing = tracingWithMagRestrictions.copy( createdTimestamp = System.currentTimeMillis(), - editPosition = editPosition.map(vec3IntToProto).getOrElse(tracingWithMagRestrictions.editPosition), - editRotation = editRotation.map(vec3DoubleToProto).getOrElse(tracingWithMagRestrictions.editRotation), - boundingBox = boundingBoxOptToProto(boundingBox).getOrElse(tracingWithMagRestrictions.boundingBox), - mappingName = mappingName.orElse(tracingWithMagRestrictions.mappingName), - version = 0, + editPosition = editPosition.map(vec3IntToProto).getOrElse(sourceTracing.editPosition), + editRotation = editRotation.map(vec3DoubleToProto).getOrElse(sourceTracing.editRotation), + boundingBox = boundingBoxOptToProto(boundingBox).getOrElse(sourceTracing.boundingBox), + mappingName = + if (sourceTracing.getHasEditableMapping) Some(newTracingId) + else sourceTracing.mappingName, + version = newVersion, // Adding segment index on duplication if the volume tracing allows it. This will be used in duplicateData hasSegmentIndex = Some(hasSegmentIndex) ) _ <- bool2Fox(newTracing.mags.nonEmpty) ?~> "magRestrictions.tooTight" - newId <- save(newTracing, None, newTracing.version) - _ <- duplicateData(tracingId, sourceTracing, newId, newTracing, userToken) - } yield (newId, newTracing) + } yield newTracing } - @SuppressWarnings(Array("OptionGet")) //We suppress this warning because we check the option beforehand private def addBoundingBoxFromTaskIfRequired(tracing: VolumeTracing, - fromTask: Boolean, - datasetBoundingBox: Option[BoundingBox]): VolumeTracing = - if (fromTask && datasetBoundingBox.isDefined) { - val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 - tracing - .addUserBoundingBoxes( - NamedBoundingBoxProto(newId, - Some("task bounding box"), - Some(true), - Some(getRandomColor), - tracing.boundingBox)) - .withBoundingBox(datasetBoundingBox.get) - } else tracing + isFromTask: Boolean, + datasetBoundingBoxOpt: Option[BoundingBox]): VolumeTracing = + datasetBoundingBoxOpt match { + case Some(datasetBoundingBox) if isFromTask => + val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 + tracing + .addUserBoundingBoxes( + NamedBoundingBoxProto(newId, + Some("task bounding box"), + Some(true), + Some(getRandomColor), + tracing.boundingBox)) + .withBoundingBox(datasetBoundingBox) + case _ => tracing + } - private def duplicateData(sourceId: String, - sourceTracing: VolumeTracing, - destinationId: String, - destinationTracing: VolumeTracing, - userToken: Option[String]): Fox[Unit] = + def duplicateVolumeData(sourceTracingId: String, + sourceTracing: VolumeTracing, + newTracingId: String, + newTracing: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = { + var bucketCount = 0 for { - isTemporaryTracing <- isTemporaryTracing(sourceId) - sourceDataLayer = volumeTracingLayer(sourceId, sourceTracing, isTemporaryTracing) - buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() - destinationDataLayer = volumeTracingLayer(destinationId, destinationTracing) - fallbackLayer <- getFallbackLayer(sourceId) + isTemporaryTracing <- isTemporaryTracing(sourceTracingId) + sourceDataLayer = volumeTracingLayer(sourceTracingId, sourceTracing, isTemporaryTracing) + buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream( + Some(sourceTracing.version)) + destinationDataLayer = volumeTracingLayer(newTracingId, newTracing) + fallbackLayer <- getFallbackLayer(sourceTracingId, sourceTracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( - destinationId, + newTracingId, volumeSegmentIndexClient, - destinationTracing.version, + newTracing.version, remoteDatastoreClient, fallbackLayer, AdditionalAxis.fromProtosAsOpt(sourceTracing.additionalAxes), - userToken + tc ) - mappingName <- baseMappingName(sourceTracing) + mappingName <- selectMappingName(sourceTracing) _ <- Fox.serialCombined(buckets) { case (bucketPosition, bucketData) => - if (destinationTracing.mags.contains(vec3IntToProto(bucketPosition.mag))) { + if (newTracing.mags.contains(vec3IntToProto(bucketPosition.mag))) { for { - _ <- saveBucket(destinationDataLayer, bucketPosition, bucketData, destinationTracing.version) - _ <- Fox.runIfOptionTrue(destinationTracing.hasSegmentIndex)( + _ <- saveBucket(destinationDataLayer, bucketPosition, bucketData, newTracing.version) + _ = bucketCount += 1 + _ <- Fox.runIfOptionTrue(newTracing.hasSegmentIndex)( updateSegmentIndex( segmentIndexBuffer, bucketPosition, @@ -606,103 +566,68 @@ class VolumeTracingService @Inject()( Empty, sourceTracing.elementClass, mappingName, - editableMappingTracingId(sourceTracing, sourceId) + editableMappingTracingId(sourceTracing, sourceTracingId) )) } yield () } else Fox.successful(()) } + _ = logger.info( + s"Duplicated $bucketCount volume buckets from $sourceTracingId v${sourceTracing.version} to $newTracingId v${newTracing.version}.") _ <- segmentIndexBuffer.flush() } yield () + } - private def volumeTracingLayer(tracingId: String, - tracing: VolumeTracing, - isTemporaryTracing: Boolean = false, - includeFallbackDataIfAvailable: Boolean = false, - userToken: Option[String] = None): VolumeTracingLayer = + private def volumeTracingLayer( + tracingId: String, + tracing: VolumeTracing, + isTemporaryTracing: Boolean = false, + includeFallbackDataIfAvailable: Boolean = false)(implicit tc: TokenContext): VolumeTracingLayer = VolumeTracingLayer( name = tracingId, isTemporaryTracing = isTemporaryTracing, volumeTracingService = this, includeFallbackDataIfAvailable = includeFallbackDataIfAvailable, tracing = tracing, - userToken = userToken, + tokenContext = tc, additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) ) - def updateActionLog(tracingId: String, - newestVersion: Option[Long] = None, - oldestVersion: Option[Long] = None): Fox[JsValue] = { - def versionedTupleToJson(tuple: (Long, List[CompactVolumeUpdateAction])): JsObject = - Json.obj( - "version" -> tuple._1, - "value" -> Json.toJson(tuple._2) - ) - - for { - volumeTracings <- tracingDataStore.volumeUpdates.getMultipleVersionsAsVersionValueTuple( - tracingId, - newestVersion, - oldestVersion)(fromJsonBytes[List[CompactVolumeUpdateAction]]) - updateActionGroupsJs = volumeTracings.map(versionedTupleToJson) - } yield Json.toJson(updateActionGroupsJs) - } - def updateMagList(tracingId: String, tracing: VolumeTracing, mags: Set[Vec3Int], - toCache: Boolean = false): Fox[String] = + toTemporaryStore: Boolean = false): Fox[String] = for { _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." _ <- bool2Fox(mags.nonEmpty) ?~> "Mag restrictions result in zero mags" id <- save(tracing.copy(mags = mags.toList.sortBy(_.maxDim).map(vec3IntToProto)), Some(tracingId), tracing.version, - toCache) + toTemporaryStore) } yield id - def downsample(tracingId: String, - oldTracingId: String, - tracing: VolumeTracing, - userToken: Option[String]): Fox[Unit] = - for { - resultingMags <- downsampleWithLayer(tracingId, - oldTracingId, - tracing, - volumeTracingLayer(tracingId, tracing), - this, - userToken) - _ <- updateMagList(tracingId, tracing, resultingMags.toSet) - } yield () - def volumeBucketsAreEmpty(tracingId: String): Boolean = volumeDataStore.getMultipleKeys(None, Some(tracingId), limit = Some(1))(toBox).isEmpty - def createAdHocMesh(tracingId: String, - request: WebknossosAdHocMeshRequest, - userToken: Option[String]): Fox[(Array[Float], List[Int])] = - for { - tracing <- find(tracingId) ?~> "tracing.notFound" - segmentationLayer = volumeTracingLayer(tracingId, - tracing, - includeFallbackDataIfAvailable = true, - userToken = userToken) - adHocMeshRequest = AdHocMeshRequest( - None, - segmentationLayer, - request.cuboid(segmentationLayer), - request.segmentId, - request.voxelSizeFactorInUnit, - None, - None, - request.additionalCoordinates, - request.findNeighbors - ) - result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) - } yield result + def createAdHocMesh(tracingId: String, tracing: VolumeTracing, request: WebknossosAdHocMeshRequest)( + implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = { + val volumeLayer = volumeTracingLayer(tracingId, tracing, includeFallbackDataIfAvailable = true) + val adHocMeshRequest = AdHocMeshRequest( + None, + volumeLayer, + request.cuboid(volumeLayer), + request.segmentId, + request.voxelSizeFactorInUnit, + None, + None, + request.additionalCoordinates, + request.findNeighbors + ) + adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) + } - def findData(tracingId: String): Fox[Option[Vec3Int]] = + def findData(tracingId: String, tracing: VolumeTracing)(implicit tc: TokenContext): Fox[Option[Vec3Int]] = for { - tracing <- find(tracingId) ?~> "tracing.notFound" + _ <- Fox.successful(()) volumeLayer = volumeTracingLayer(tracingId, tracing) bucketStream = volumeLayer.bucketProvider.bucketStream(Some(tracing.version)) bucketPosOpt = if (bucketStream.hasNext) { @@ -786,25 +711,24 @@ class VolumeTracingService @Inject()( case (None, None) => None } - private def bucketStreamFromSelector(selector: TracingSelector, - tracing: VolumeTracing): Iterator[(BucketPosition, Array[Byte])] = { - val dataLayer = volumeTracingLayer(selector.tracingId, tracing) + private def bucketStreamFor(tracingId: String, tracing: VolumeTracing)( + implicit tc: TokenContext): Iterator[(BucketPosition, Array[Byte])] = { + val dataLayer = volumeTracingLayer(tracingId, tracing) dataLayer.bucketProvider.bucketStream(Some(tracing.version)) } - def mergeVolumeData(tracingSelectors: Seq[TracingSelector], + def mergeVolumeData(tracingIds: Seq[String], tracings: Seq[VolumeTracing], newId: String, newVersion: Long, - toCache: Boolean, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[MergedVolumeStats] = { + persist: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] = { val elementClass = tracings.headOption.map(_.elementClass).getOrElse(elementClassToProto(ElementClass.uint8)) val magSets = new mutable.HashSet[Set[Vec3Int]]() - tracingSelectors.zip(tracings).foreach { - case (selector, tracing) => + tracingIds.zip(tracings).foreach { + case (tracingId, tracing) => val magSet = new mutable.HashSet[Vec3Int]() - bucketStreamFromSelector(selector, tracing).foreach { + bucketStreamFor(tracingId, tracing).foreach { case (bucketPosition, _) => magSet.add(bucketPosition.mag) } @@ -830,15 +754,15 @@ class VolumeTracingService @Inject()( val mergedVolume = new MergedVolume(elementClass) - tracingSelectors.zip(tracings).foreach { - case (selector, tracing) => - val bucketStream = bucketStreamFromSelector(selector, tracing) + tracingIds.zip(tracings).foreach { + case (tracingId, tracing) => + val bucketStream = bucketStreamFor(tracingId, tracing) mergedVolume.addLabelSetFromBucketStream(bucketStream, magsIntersection) } - tracingSelectors.zip(tracings).zipWithIndex.foreach { - case ((selector, tracing), sourceVolumeIndex) => - val bucketStream = bucketStreamFromSelector(selector, tracing) + tracingIds.zip(tracings).zipWithIndex.foreach { + case ((tracingIds, tracing), sourceVolumeIndex) => + val bucketStream = bucketStreamFor(tracingIds, tracing) mergedVolume.addFromBucketStream(sourceVolumeIndex, bucketStream, Some(magsIntersection)) } for { @@ -848,17 +772,25 @@ class VolumeTracingService @Inject()( elementClass) mergedAdditionalAxes <- Fox.box2Fox(AdditionalAxis.mergeAndAssertSameAdditionalAxes(tracings.map(t => AdditionalAxis.fromProtosAsOpt(t.additionalAxes)))) - fallbackLayer <- getFallbackLayer(tracingSelectors.head.tracingId) + firstTracingId <- tracingIds.headOption ?~> "merge.noTracings" + firstTracing <- tracings.headOption ?~> "merge.noTracings" + fallbackLayer <- getFallbackLayer(firstTracingId, firstTracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer(newId, volumeSegmentIndexClient, newVersion, remoteDatastoreClient, fallbackLayer, mergedAdditionalAxes, - userToken) + tc) _ <- mergedVolume.withMergedBuckets { (bucketPosition, bucketBytes) => for { - _ <- saveBucket(newId, elementClass, bucketPosition, bucketBytes, newVersion, toCache, mergedAdditionalAxes) + _ <- saveBucket(newId, + elementClass, + bucketPosition, + bucketBytes, + newVersion, + toTemporaryStore = !persist, // TODO unify boolean direction + naming + mergedAdditionalAxes) _ <- Fox.runIf(shouldCreateSegmentIndex)( updateSegmentIndex(segmentIndexBuffer, bucketPosition, @@ -874,74 +806,15 @@ class VolumeTracingService @Inject()( } } - def addSegmentIndex(tracingId: String, - tracing: VolumeTracing, - currentVersion: Long, - userToken: Option[String], - dryRun: Boolean): Fox[Option[Int]] = { - var processedBucketCount = 0 - for { - isTemporaryTracing <- isTemporaryTracing(tracingId) - sourceDataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing) - buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() - fallbackLayer <- getFallbackLayer(tracingId) - mappingName <- baseMappingName(tracing) - segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, - volumeSegmentIndexClient, - currentVersion + 1L, - remoteDatastoreClient, - fallbackLayer, - sourceDataLayer.additionalAxes, - userToken) - _ <- Fox.serialCombined(buckets) { - case (bucketPosition, bucketData) => - processedBucketCount += 1 - updateSegmentIndex(segmentIndexBuffer, - bucketPosition, - bucketData, - Empty, - tracing.elementClass, - mappingName, - editableMappingTracingId(tracing, tracingId)) - } - _ <- Fox.runIf(!dryRun)(segmentIndexBuffer.flush()) - updateGroup = UpdateActionGroup[VolumeTracing]( - tracing.version + 1L, - System.currentTimeMillis(), - None, - List(AddSegmentIndex()), - None, - None, - "dummyTransactionId", - 1, - 0 - ) - _ <- Fox.runIf(!dryRun)(handleUpdateGroup(tracingId, updateGroup, tracing.version, userToken)) - } yield Some(processedBucketCount) - } - - def checkIfSegmentIndexMayBeAdded(tracingId: String, tracing: VolumeTracing, userToken: Option[String])( - implicit ec: ExecutionContext): Fox[Boolean] = - for { - fallbackLayerOpt <- Fox.runIf(tracing.fallbackLayer.isDefined)( - remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) - canHaveSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, - fallbackLayerOpt, - userToken) - alreadyHasSegmentIndex = tracing.hasSegmentIndex.getOrElse(false) - } yield canHaveSegmentIndex && !alreadyHasSegmentIndex - - def importVolumeData(tracingId: String, - tracing: VolumeTracing, - zipFile: File, - currentVersion: Int, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[Long] = + def importVolumeData(tracingId: String, tracing: VolumeTracing, zipFile: File, currentVersion: Int)( + implicit mp: MessagesProvider, + tc: TokenContext): Fox[Long] = if (currentVersion != tracing.version) Fox.failure("version.mismatch") else { val magSet = magSetFromZipfile(zipFile) val magsDoMatch = - magSet.isEmpty || magSet == resolveLegacyMagList(tracing.mags).map(vec3IntFromProto).toSet + magSet.isEmpty || magSet == VolumeTracingMags.resolveLegacyMagList(tracing.mags).map(vec3IntFromProto).toSet if (!magsDoMatch) Fox.failure("annotation.volume.magssDoNotMatch") @@ -960,8 +833,8 @@ class VolumeTracingService @Inject()( mergedVolume.largestSegmentId.toLong, tracing.elementClass) dataLayer = volumeTracingLayer(tracingId, tracing) - fallbackLayer <- getFallbackLayer(tracingId) - mappingName <- baseMappingName(tracing) + fallbackLayer <- getFallbackLayer(tracingId, tracing) + mappingName <- selectMappingName(tracing) segmentIndexBuffer <- Fox.successful( new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, @@ -969,7 +842,7 @@ class VolumeTracingService @Inject()( remoteDatastoreClient, fallbackLayer, dataLayer.additionalAxes, - userToken)) + tc)) _ <- mergedVolume.withMergedBuckets { (bucketPosition, bucketBytes) => for { _ <- saveBucket(volumeLayer, bucketPosition, bucketBytes, tracing.version + 1) @@ -990,54 +863,43 @@ class VolumeTracingService @Inject()( } yield () } _ <- segmentIndexBuffer.flush() - updateGroup = UpdateActionGroup[VolumeTracing]( - tracing.version + 1, - System.currentTimeMillis(), - None, - List(ImportVolumeData(Some(mergedVolume.largestSegmentId.toPositiveLong))), - None, - None, - "dummyTransactionId", - 1, - 0 - ) - _ <- handleUpdateGroup(tracingId, updateGroup, tracing.version, userToken) } yield mergedVolume.largestSegmentId.toPositiveLong } } - def dummyTracing: VolumeTracing = ??? - - def mergeEditableMappings(tracingsWithIds: List[(VolumeTracing, String)], userToken: Option[String]): Fox[String] = + def mergeEditableMappings(newTracingId: String, + tracingsWithIds: List[(VolumeTracing, String)], + persist: Boolean): Fox[Unit] = if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { + _ <- bool2Fox(persist) ?~> "Cannot merge editable mappings without “persist” (trying to merge compound annotations?)" remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) remoteFallbackLayer <- remoteFallbackLayers.headOption.toFox _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" - editableMappingIds <- Fox.serialCombined(tracingsWithIds)(tracingWithId => tracingWithId._1.mappingName) - _ <- bool2Fox(editableMappingIds.length == tracingsWithIds.length) ?~> "Not all volume tracings have editable mappings" - newEditableMappingId <- editableMappingService.merge(editableMappingIds, remoteFallbackLayer, userToken) - } yield newEditableMappingId + // TODO _ <- editableMappingService.merge(newTracingId, tracingsWithIds.map(_._2), remoteFallbackLayer) + } yield () } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { Fox.empty } else { Fox.failure("Cannot merge tracings with and without editable mappings") } - def getFallbackLayer(tracingId: String): Fox[Option[RemoteFallbackLayer]] = - fallbackLayerCache.getOrLoad(tracingId, t => getFallbackLayerFromWebknossos(t)) + def getFallbackLayer(tracingId: String, tracing: VolumeTracing)( + implicit tc: TokenContext): Fox[Option[RemoteFallbackLayer]] = + fallbackLayerCache.getOrLoad((tracingId, tracing.fallbackLayer, tc.userTokenOpt), + t => getFallbackLayerFromWebknossos(t._1, t._2)) - private def getFallbackLayerFromWebknossos(tracingId: String) = Fox[Option[RemoteFallbackLayer]] { - for { - tracing <- find(tracingId) - dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) - dataSourceId = dataSource.id - fallbackLayerName = tracing.fallbackLayer - fallbackLayer = dataSource.dataLayers - .find(_.name == fallbackLayerName.getOrElse("")) - .map(RemoteFallbackLayer.fromDataLayerAndDataSource(_, dataSourceId)) - } yield fallbackLayer - } + private def getFallbackLayerFromWebknossos(tracingId: String, fallbackLayerName: Option[String])( + implicit tc: TokenContext) = + Fox[Option[RemoteFallbackLayer]] { + for { + dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) + dataSourceId = dataSource.id + fallbackLayer = dataSource.dataLayers + .find(_.name == fallbackLayerName.getOrElse("")) + .map(RemoteFallbackLayer.fromDataLayerAndDataSource(_, dataSourceId)) + } yield fallbackLayer + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index d35b3cf7da8..20cb63206ed 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -1,13 +1,12 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume -import java.util.Base64 import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.webknossos.datastore.VolumeTracing.{Segment, SegmentGroup, VolumeTracing} -import com.scalableminds.webknossos.datastore.geometry +import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate -import com.scalableminds.webknossos.tracingstore.tracings.UpdateAction.VolumeUpdateAction -import com.scalableminds.webknossos.tracingstore.tracings.{NamedBoundingBox, UpdateAction, MetadataEntry} +import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, UpdateAction} +import com.scalableminds.webknossos.tracingstore.tracings.{NamedBoundingBox, MetadataEntry} import play.api.libs.json._ trait VolumeUpdateActionHelper { @@ -26,29 +25,34 @@ trait VolumeUpdateActionHelper { } -trait ApplyableVolumeAction extends VolumeUpdateAction +trait VolumeUpdateAction extends LayerUpdateAction + +trait ApplyableVolumeUpdateAction extends VolumeUpdateAction { + def applyOn(tracing: VolumeTracing): VolumeTracing +} + +trait BucketMutatingVolumeUpdateAction extends VolumeUpdateAction case class UpdateBucketVolumeAction(position: Vec3Int, cubeSize: Int, mag: Vec3Int, - base64Data: String, + base64Data: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) - extends VolumeUpdateAction { - lazy val data: Array[Byte] = Base64.getDecoder.decode(base64Data) + info: Option[String] = None) + extends BucketMutatingVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateBucket", actionTimestamp, actionAuthorId, Json.obj()) -} - -object UpdateBucketVolumeAction { - implicit val jsonFormat: OFormat[UpdateBucketVolumeAction] = Json.format[UpdateBucketVolumeAction] + def withoutBase64Data: UpdateBucketVolumeAction = + this.copy(base64Data = None) } case class UpdateTracingVolumeAction( @@ -57,86 +61,68 @@ case class UpdateTracingVolumeAction( editRotation: Vec3Double, largestSegmentId: Option[Long], zoomLevel: Double, + editPositionAdditionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - editPositionAdditionalCoordinates: Option[Seq[AdditionalCoordinate]] = None -) extends VolumeUpdateAction { + info: Option[String] = None +) extends ApplyableVolumeUpdateAction + with ProtoGeometryImplicits { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateTracing", actionTimestamp, actionAuthorId, Json.obj()) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def isViewOnlyChange: Boolean = true -} - -object UpdateTracingVolumeAction { - implicit val jsonFormat: OFormat[UpdateTracingVolumeAction] = Json.format[UpdateTracingVolumeAction] -} - -case class RevertToVersionVolumeAction(sourceVersion: Long, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends VolumeUpdateAction { - override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = - this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("revertToVersion", - actionTimestamp, - actionAuthorId, - Json.obj("sourceVersion" -> sourceVersion)) -} -object RevertToVersionVolumeAction { - implicit val jsonFormat: OFormat[RevertToVersionVolumeAction] = Json.format[RevertToVersionVolumeAction] -} - -case class UpdateUserBoundingBoxes(boundingBoxes: List[NamedBoundingBox], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeAction { + override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.copy( + activeSegmentId = Some(activeSegmentId), + editPosition = editPosition, + editRotation = editRotation, + largestSegmentId = largestSegmentId, + zoomLevel = zoomLevel, + editPositionAdditionalCoordinates = AdditionalCoordinate.toProto(editPositionAdditionalCoordinates) + ) +} + +case class UpdateUserBoundingBoxesVolumeAction(boundingBoxes: List[NamedBoundingBox], + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateUserBoundingBoxes", actionTimestamp, actionAuthorId, Json.obj()) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto)) } -object UpdateUserBoundingBoxes { - implicit val jsonFormat: OFormat[UpdateUserBoundingBoxes] = Json.format[UpdateUserBoundingBoxes] -} - -case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], - isVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeAction { +case class UpdateUserBoundingBoxVisibilityVolumeAction(boundingBoxId: Option[Int], + isVisible: Boolean, + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateUserBoundingBoxVisibility", - actionTimestamp, - actionAuthorId, - Json.obj("boundingBoxId" -> boundingBoxId, "newVisibility" -> isVisible)) - override def isViewOnlyChange: Boolean = true + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = { - def updateUserBoundingBoxes(): Seq[geometry.NamedBoundingBoxProto] = + def updateUserBoundingBoxes(): Seq[NamedBoundingBoxProto] = tracing.userBoundingBoxes.map { boundingBox => if (boundingBoxId.forall(_ == boundingBox.id)) boundingBox.copy(isVisible = Some(isVisible)) @@ -146,92 +132,58 @@ case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], tracing.withUserBoundingBoxes(updateUserBoundingBoxes()) } -} -object UpdateUserBoundingBoxVisibility { - implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibility] = Json.format[UpdateUserBoundingBoxVisibility] + override def isViewOnlyChange: Boolean = true } -case class RemoveFallbackLayer(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeAction { +case class RemoveFallbackLayerVolumeAction(actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("removeFallbackLayer", actionTimestamp, actionAuthorId, Json.obj()) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.clearFallbackLayer } -object RemoveFallbackLayer { - implicit val jsonFormat: OFormat[RemoveFallbackLayer] = Json.format[RemoveFallbackLayer] -} - -case class ImportVolumeData(largestSegmentId: Option[Long], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeAction { +case class ImportVolumeDataVolumeAction(actionTracingId: String, + largestSegmentId: Option[Long], + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("importVolumeTracing", - actionTimestamp, - actionAuthorId, - Json.obj("largestSegmentId" -> largestSegmentId)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.copy(largestSegmentId = largestSegmentId) } -object ImportVolumeData { - implicit val jsonFormat: OFormat[ImportVolumeData] = Json.format[ImportVolumeData] -} - -case class AddSegmentIndex(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeAction { +case class AddSegmentIndexVolumeAction(actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("addSegmentIndex", actionTimestamp, actionAuthorId, Json.obj()) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.copy(hasSegmentIndex = Some(true)) -} -object AddSegmentIndex { - implicit val jsonFormat: OFormat[AddSegmentIndex] = Json.format[AddSegmentIndex] -} - -case class UpdateTdCamera(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends VolumeUpdateAction { - - override def addTimestamp(timestamp: Long): VolumeUpdateAction = - this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = - this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateTdCamera", actionTimestamp, actionAuthorId, Json.obj()) - - override def isViewOnlyChange: Boolean = true -} - -object UpdateTdCamera { - implicit val jsonFormat: OFormat[UpdateTdCamera] = Json.format[UpdateTdCamera] } case class CreateSegmentVolumeAction(id: Long, @@ -240,20 +192,22 @@ case class CreateSegmentVolumeAction(id: Long, color: Option[com.scalableminds.util.image.Color], groupId: Option[Int], creationTime: Option[Long], + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + metadata: Option[Seq[MetadataEntry]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, - metadata: Option[Seq[MetadataEntry]] = None) - extends ApplyableVolumeAction + info: Option[String] = None) + extends ApplyableVolumeUpdateAction with ProtoGeometryImplicits { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: UpdateAction[VolumeTracing] = - CompactVolumeUpdateAction("createSegment", actionTimestamp, actionAuthorId, Json.obj("id" -> id)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = { val newSegment = @@ -271,21 +225,19 @@ case class CreateSegmentVolumeAction(id: Long, } } -object CreateSegmentVolumeAction { - implicit val jsonFormat: OFormat[CreateSegmentVolumeAction] = Json.format[CreateSegmentVolumeAction] -} - case class UpdateSegmentVolumeAction(id: Long, anchorPosition: Option[Vec3Int], name: Option[String], color: Option[com.scalableminds.util.image.Color], creationTime: Option[Long], groupId: Option[Int], + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + metadata: Option[Seq[MetadataEntry]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, - metadata: Option[Seq[MetadataEntry]] = None) - extends ApplyableVolumeAction + info: Option[String] = None) + extends ApplyableVolumeUpdateAction with ProtoGeometryImplicits with VolumeUpdateActionHelper { @@ -293,9 +245,9 @@ case class UpdateSegmentVolumeAction(id: Long, this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: UpdateAction[VolumeTracing] = - CompactVolumeUpdateAction("updateSegment", actionTimestamp, actionAuthorId, Json.obj("id" -> id)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = { def segmentTransform(segment: Segment): Segment = @@ -312,63 +264,54 @@ case class UpdateSegmentVolumeAction(id: Long, } } -object UpdateSegmentVolumeAction { - implicit val jsonFormat: OFormat[UpdateSegmentVolumeAction] = Json.format[UpdateSegmentVolumeAction] -} - case class DeleteSegmentVolumeAction(id: Long, + actionTracingId: String, actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None) - extends ApplyableVolumeAction { + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: UpdateAction[VolumeTracing] = - CompactVolumeUpdateAction("deleteSegment", actionTimestamp, actionAuthorId, Json.obj("id" -> id)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.withSegments(tracing.segments.filter(_.segmentId != id)) } -object DeleteSegmentVolumeAction { - implicit val jsonFormat: OFormat[DeleteSegmentVolumeAction] = Json.format[DeleteSegmentVolumeAction] -} - case class DeleteSegmentDataVolumeAction(id: Long, + actionTracingId: String, actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None) - extends VolumeUpdateAction { + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends BucketMutatingVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("deleteSegmentData", actionTimestamp, actionAuthorId, Json.obj()) -} - -object DeleteSegmentDataVolumeAction { - implicit val jsonFormat: OFormat[DeleteSegmentDataVolumeAction] = Json.format[DeleteSegmentDataVolumeAction] -} - -case class UpdateMappingNameAction(mappingName: Option[String], - isEditable: Option[Boolean], - isLocked: Option[Boolean], - actionTimestamp: Option[Long], - actionAuthorId: Option[String] = None) - extends ApplyableVolumeAction { - override def addTimestamp(timestamp: Long): VolumeUpdateAction = - this.copy(actionTimestamp = Some(timestamp)) - - override def transformToCompact: UpdateAction[VolumeTracing] = - CompactVolumeUpdateAction("updateMappingName", - actionTimestamp, - actionAuthorId, - Json.obj("mappingName" -> mappingName)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) +} + +case class UpdateMappingNameVolumeAction(mappingName: Option[String], + isEditable: Option[Boolean], + isLocked: Option[Boolean], + actionTracingId: String, + actionTimestamp: Option[Long], + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction { + override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = + this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = if (tracing.mappingIsLocked.getOrElse(false)) tracing // cannot change mapping name if it is locked @@ -378,101 +321,103 @@ case class UpdateMappingNameAction(mappingName: Option[String], mappingIsLocked = Some(isLocked.getOrElse(false))) } -object UpdateMappingNameAction { - implicit val jsonFormat: OFormat[UpdateMappingNameAction] = Json.format[UpdateMappingNameAction] +case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegmentGroup], + actionTracingId: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends ApplyableVolumeUpdateAction + with VolumeUpdateActionHelper { + override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.withSegmentGroups(segmentGroups.map(convertSegmentGroup)) + + override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = + this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } +// TODO this now exists only for UpdateBucket. Make it a slimmed down version of that rather than generic? case class CompactVolumeUpdateAction(name: String, + value: JsObject, + actionTracingId: String, actionTimestamp: Option[Long], actionAuthorId: Option[String] = None, - value: JsObject) - extends VolumeUpdateAction + info: Option[String] = None) + extends VolumeUpdateAction { + override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = + this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) +} object CompactVolumeUpdateAction { implicit object compactVolumeUpdateActionFormat extends Format[CompactVolumeUpdateAction] { override def reads(json: JsValue): JsResult[CompactVolumeUpdateAction] = for { name <- (json \ "name").validate[String] + actionTracingId <- (json \ "value" \ "actionTracingId").validate[String] actionTimestamp <- (json \ "value" \ "actionTimestamp").validateOpt[Long] actionAuthorId <- (json \ "value" \ "actionAuthorId").validateOpt[String] - value <- (json \ "value").validate[JsObject].map(_ - "actionTimestamp") - } yield CompactVolumeUpdateAction(name, actionTimestamp, actionAuthorId, value) + info <- (json \ "value" \ "info").validateOpt[String] + value <- (json \ "value") + .validate[JsObject] + .map(_ - "actionTimestamp" - "actionTimestamp" - "actionAuthorId" - "info") + } yield CompactVolumeUpdateAction(name, value, actionTracingId, actionTimestamp, actionAuthorId, info) override def writes(o: CompactVolumeUpdateAction): JsValue = - Json.obj("name" -> o.name, "value" -> (Json.obj("actionTimestamp" -> o.actionTimestamp) ++ o.value)) + Json.obj( + "name" -> o.name, + "value" -> (Json.obj("actionTracingId" -> o.actionTracingId, + "actionTimestamp" -> o.actionTimestamp, + "actionAuthorId" -> o.actionAuthorId, + "info" -> o.info) ++ o.value) + ) } } -case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegmentGroup], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeAction - with VolumeUpdateActionHelper { - override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.withSegmentGroups(segmentGroups.map(convertSegmentGroup)) - - override def addTimestamp(timestamp: Long): UpdateAction[VolumeTracing] = - this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): UpdateAction[VolumeTracing] = - this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction[VolumeTracing] = this.copy(info = info) +object UpdateBucketVolumeAction { + implicit val jsonFormat: OFormat[UpdateBucketVolumeAction] = Json.format[UpdateBucketVolumeAction] +} +object UpdateTracingVolumeAction { + implicit val jsonFormat: OFormat[UpdateTracingVolumeAction] = Json.format[UpdateTracingVolumeAction] +} +object UpdateUserBoundingBoxesVolumeAction { + implicit val jsonFormat: OFormat[UpdateUserBoundingBoxesVolumeAction] = + Json.format[UpdateUserBoundingBoxesVolumeAction] +} +object UpdateUserBoundingBoxVisibilityVolumeAction { + implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibilityVolumeAction] = + Json.format[UpdateUserBoundingBoxVisibilityVolumeAction] +} +object RemoveFallbackLayerVolumeAction { + implicit val jsonFormat: OFormat[RemoveFallbackLayerVolumeAction] = Json.format[RemoveFallbackLayerVolumeAction] +} +object ImportVolumeDataVolumeAction { + implicit val jsonFormat: OFormat[ImportVolumeDataVolumeAction] = Json.format[ImportVolumeDataVolumeAction] +} +object AddSegmentIndexVolumeAction { + implicit val jsonFormat: OFormat[AddSegmentIndexVolumeAction] = Json.format[AddSegmentIndexVolumeAction] +} +object CreateSegmentVolumeAction { + implicit val jsonFormat: OFormat[CreateSegmentVolumeAction] = Json.format[CreateSegmentVolumeAction] +} +object UpdateSegmentVolumeAction { + implicit val jsonFormat: OFormat[UpdateSegmentVolumeAction] = Json.format[UpdateSegmentVolumeAction] +} +object DeleteSegmentVolumeAction { + implicit val jsonFormat: OFormat[DeleteSegmentVolumeAction] = Json.format[DeleteSegmentVolumeAction] +} +object DeleteSegmentDataVolumeAction { + implicit val jsonFormat: OFormat[DeleteSegmentDataVolumeAction] = Json.format[DeleteSegmentDataVolumeAction] +} +object UpdateMappingNameVolumeAction { + implicit val jsonFormat: OFormat[UpdateMappingNameVolumeAction] = Json.format[UpdateMappingNameVolumeAction] } - object UpdateSegmentGroupsVolumeAction { implicit val jsonFormat: OFormat[UpdateSegmentGroupsVolumeAction] = Json.format[UpdateSegmentGroupsVolumeAction] } - -object VolumeUpdateAction { - - implicit object volumeUpdateActionFormat extends Format[VolumeUpdateAction] { - override def reads(json: JsValue): JsResult[VolumeUpdateAction] = - (json \ "name").validate[String].flatMap { - case "updateBucket" => (json \ "value").validate[UpdateBucketVolumeAction] - case "updateTracing" => (json \ "value").validate[UpdateTracingVolumeAction] - case "revertToVersion" => (json \ "value").validate[RevertToVersionVolumeAction] - case "updateUserBoundingBoxes" => (json \ "value").validate[UpdateUserBoundingBoxes] - case "updateUserBoundingBoxVisibility" => (json \ "value").validate[UpdateUserBoundingBoxVisibility] - case "removeFallbackLayer" => (json \ "value").validate[RemoveFallbackLayer] - case "importVolumeTracing" => (json \ "value").validate[ImportVolumeData] - case "updateTdCamera" => (json \ "value").validate[UpdateTdCamera] - case "createSegment" => (json \ "value").validate[CreateSegmentVolumeAction] - case "updateSegment" => (json \ "value").validate[UpdateSegmentVolumeAction] - case "updateSegmentGroups" => (json \ "value").validate[UpdateSegmentGroupsVolumeAction] - case "deleteSegment" => (json \ "value").validate[DeleteSegmentVolumeAction] - case "deleteSegmentData" => (json \ "value").validate[DeleteSegmentDataVolumeAction] - case "updateMappingName" => (json \ "value").validate[UpdateMappingNameAction] - case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") - } - - override def writes(o: VolumeUpdateAction): JsValue = o match { - case s: UpdateBucketVolumeAction => - Json.obj("name" -> "updateBucket", "value" -> Json.toJson(s)(UpdateBucketVolumeAction.jsonFormat)) - case s: UpdateTracingVolumeAction => - Json.obj("name" -> "updateTracing", "value" -> Json.toJson(s)(UpdateTracingVolumeAction.jsonFormat)) - case s: RevertToVersionVolumeAction => - Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionVolumeAction.jsonFormat)) - case s: UpdateUserBoundingBoxes => - Json.obj("name" -> "updateUserBoundingBoxes", "value" -> Json.toJson(s)(UpdateUserBoundingBoxes.jsonFormat)) - case s: UpdateUserBoundingBoxVisibility => - Json.obj("name" -> "updateUserBoundingBoxVisibility", - "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibility.jsonFormat)) - case s: RemoveFallbackLayer => - Json.obj("name" -> "removeFallbackLayer", "value" -> Json.toJson(s)(RemoveFallbackLayer.jsonFormat)) - case s: ImportVolumeData => - Json.obj("name" -> "importVolumeTracing", "value" -> Json.toJson(s)(ImportVolumeData.jsonFormat)) - case s: UpdateTdCamera => - Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCamera.jsonFormat)) - case s: CreateSegmentVolumeAction => - Json.obj("name" -> "createSegment", "value" -> Json.toJson(s)(CreateSegmentVolumeAction.jsonFormat)) - case s: UpdateSegmentVolumeAction => - Json.obj("name" -> "updateSegment", "value" -> Json.toJson(s)(UpdateSegmentVolumeAction.jsonFormat)) - case s: DeleteSegmentVolumeAction => - Json.obj("name" -> "deleteSegment", "value" -> Json.toJson(s)(DeleteSegmentVolumeAction.jsonFormat)) - case s: UpdateSegmentGroupsVolumeAction => - Json.obj("name" -> "updateSegmentGroups", "value" -> Json.toJson(s)(UpdateSegmentGroupsVolumeAction.jsonFormat)) - case s: CompactVolumeUpdateAction => Json.toJson(s)(CompactVolumeUpdateAction.compactVolumeUpdateActionFormat) - } - } - -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/WKWBucketStreamSink.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/WKWBucketStreamSink.scala index ec8df4f8f59..b1783fe8d9d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/WKWBucketStreamSink.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/WKWBucketStreamSink.scala @@ -18,7 +18,7 @@ import scala.concurrent.{ExecutionContext, Future} class WKWBucketStreamSink(val layer: DataLayer, tracingHasFallbackLayer: Boolean) extends WKWDataFormatHelper - with VolumeBucketReversionHelper + with ReversionHelper with ByteUtils { def apply(bucketStream: Iterator[(BucketPosition, Array[Byte])], mags: Seq[Vec3Int])( @@ -27,7 +27,7 @@ class WKWBucketStreamSink(val layer: DataLayer, tracingHasFallbackLayer: Boolean val header = WKWHeader(1, DataLayer.bucketLength, ChunkType.LZ4, voxelType, numChannels) bucketStream.flatMap { case (bucket, data) => - val skipBucket = if (tracingHasFallbackLayer) isRevertedBucket(data) else isAllZero(data) + val skipBucket = if (tracingHasFallbackLayer) isRevertedElement(data) else isAllZero(data) if (skipBucket) { // If the tracing has no fallback segmentation, all-zero buckets can be omitted entirely None diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala index 40b0c75db6d..be80c1dfb78 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala @@ -28,7 +28,7 @@ import scala.concurrent.{ExecutionContext, Future} // Creates data zip from volume tracings class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLayer: Boolean) extends ProtoGeometryImplicits - with VolumeBucketReversionHelper + with ReversionHelper with Zarr3OutputHelper with ByteUtils { @@ -44,7 +44,7 @@ class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLay val header = Zarr3ArrayHeader.fromDataLayer(layer, mags.headOption.getOrElse(Vec3Int.ones)) bucketStream.flatMap { case (bucket, data) => - val skipBucket = if (tracingHasFallbackLayer) isAllZero(data) else isRevertedBucket(data) + val skipBucket = if (tracingHasFallbackLayer) isAllZero(data) else isRevertedElement(data) if (skipBucket) { // If the tracing has no fallback segmentation, all-zero buckets can be omitted entirely None diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index d1384d8aa4b..5bb6a936629 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -1,82 +1,77 @@ -# Routes -# This file defines all application routes (Higher priority routes first) +# Defines tracingstore routes (Higher priority routes first) # ~~~~ # Health endpoint -GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health +GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health + +# Annotations (concerns AnnotationProto, not annotation info as stored in postgres) +POST /annotation/save @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.save(annotationId: String) +GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(annotationId: String, version: Option[Long]) +POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(annotationId: String) +GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) +GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) +GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) +POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, datasetBoundingBox: Option[String]) +POST /annotation/:annotationId/resetToBase @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.resetToBase(annotationId: String) +POST /annotation/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.mergedFromIds(persist: Boolean, newAnnotationId: String) # Volume tracings -POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(token: Option[String]) -POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(token: Option[String], tracingId: String, minMag: Option[Int], maxMag: Option[Int]) -POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(token: Option[String], tracingId: String) -GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(token: Option[String], tracingId: String, version: Option[Long]) -GET /volume/:tracingId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.newestVersion(token: Option[String], tracingId: String) -POST /volume/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.update(token: Option[String], tracingId: String) -GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(token: Option[String], tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) -POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(token: Option[String], tracingId: String) -POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(token: Option[String], tracingId: String, fromTask: Option[Boolean], minMag: Option[Int], maxMag: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) -GET /volume/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.updateActionLog(token: Option[String], tracingId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) -POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(token: Option[String], tracingId: String) -POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(token: Option[String], tracingId: String) -POST /volume/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(token: Option[String], tracingId: String, segmentId: Long) -POST /volume/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(token: Option[String], tracingId: String) -POST /volume/:tracingId/addSegmentIndex @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.addSegmentIndex(token: Option[String], tracingId: String, dryRun: Boolean) -GET /volume/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(token: Option[String], tracingId: String) -GET /volume/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateSkeleton(token: Option[String], tracingId: String, agglomerateId: Long) -POST /volume/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.makeMappingEditable(token: Option[String], tracingId: String) -POST /volume/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateGraphMinCut(token: Option[String], tracingId: String) -POST /volume/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateGraphNeighbors(token: Option[String], tracingId: String) -POST /volume/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(token: Option[String], tracingId: String) -POST /volume/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(token: Option[String], tracingId: String) -POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple(token: Option[String]) -POST /volume/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromIds(token: Option[String], persist: Boolean) -POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(token: Option[String], persist: Boolean) +POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save() +POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(tracingId: String, minMag: Option[Int], maxMag: Option[Int]) +POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(tracingId: String) +GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, version: Option[Long]) +GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) +POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String) +POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(tracingId: String) +POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(tracingId: String) +POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(tracingId: String, minMag: Option[Int], maxMag: Option[Int], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /volume/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(tracingId: String, segmentId: Long) +POST /volume/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(tracingId: String) +GET /volume/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(tracingId: String) +POST /volume/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(tracingId: String) +POST /volume/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(tracingId: String) +POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple +POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(persist: Boolean) # Editable Mappings -POST /mapping/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.updateEditableMapping(token: Option[String], tracingId: String) -GET /mapping/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingUpdateActionLog(token: Option[String], tracingId: String) -GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingInfo(token: Option[String], tracingId: String, version: Option[Long]) -GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingSegmentIdsForAgglomerate(token: Option[String], tracingId: String, agglomerateId: Long) -POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingAgglomerateIdsForSegments(token: Option[String], tracingId: String) +POST /mapping/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.makeMappingEditable(tracingId: String) +GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(tracingId: String, version: Option[Long]) +GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long) +POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String) +POST /mapping/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(tracingId: String) +POST /mapping/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(tracingId: String) +GET /mapping/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateSkeleton(tracingId: String, agglomerateId: Long) # Zarr endpoints for volume annotations # Zarr version 2 -GET /volume/zarr/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(token: Option[String], tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/json/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/.zgroup @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zGroup(token: Option[String], tracingId: String) -GET /volume/zarr/:tracingId/.zattrs @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zAttrs(token: Option[String], tracingId: String) -GET /volume/zarr/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(token: Option[String], tracingId: String, tracingName: Option[String], zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/:mag/.zarray @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zArray(token: Option[String], tracingId: String, mag: String) -GET /volume/zarr/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(token: Option[String], tracingId: String, mag: String, coordinates: String) +GET /volume/zarr/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/json/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/.zgroup @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zGroup(tracingId: String) +GET /volume/zarr/:tracingId/.zattrs @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zAttrs(tracingId: String) +GET /volume/zarr/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(tracingId: String, tracingName: Option[String], zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/:mag/.zarray @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zArray(tracingId: String, mag: String) +GET /volume/zarr/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(tracingId: String, mag: String, coordinates: String) # Zarr version 3 -GET /volume/zarr3_experimental/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(token: Option[String], tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/json/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(token: Option[String], tracingId: String, tracingName: Option[String], zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJson(token: Option[String], tracingId: String) -GET /volume/zarr3_experimental/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/:mag/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJsonForMag(token: Option[String], tracingId: String, mag: String) -GET /volume/zarr3_experimental/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(token: Option[String], tracingId: String, mag: String, coordinates: String) +GET /volume/zarr3_experimental/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/json/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(tracingId: String, tracingName: Option[String], zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJson(tracingId: String) +GET /volume/zarr3_experimental/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/:mag/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJsonForMag(tracingId: String, mag: String) +GET /volume/zarr3_experimental/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(tracingId: String, mag: String, coordinates: String) # Skeleton tracings -POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save(token: Option[String]) -POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple(token: Option[String]) - -POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents(token: Option[String], persist: Boolean) -POST /skeleton/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromIds(token: Option[String], persist: Boolean) - -GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(token: Option[String], tracingId: String, version: Option[Long]) -GET /skeleton/:tracingId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.newestVersion(token: Option[String], tracingId: String) -GET /skeleton/:tracingId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.updateActionStatistics(token: Option[String], tracingId: String) -GET /skeleton/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.updateActionLog(token: Option[String], tracingId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) -POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple(token: Option[String]) - -POST /skeleton/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.update(token: Option[String], tracingId: String) -POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(token: Option[String], tracingId: String, version: Option[Long], fromTask: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save() +POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple() +POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents(persist: Boolean) +GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, version: Option[Long]) +POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(tracingId: String, editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple