diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index f6d640f469d..7cc7b072eac 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -13,3 +13,4 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). - [121-worker-name.sql](conf/evolutions/121-worker-name.sql) - [122-resolution-to-mag.sql](conf/evolutions/122-resolution-to-mag.sql) - [123-more-model-categories.sql](conf/evolutions/123-more-model-categories.sql) +- [124-decouple-dataset-directory-from-name](conf/evolutions/124-decouple-dataset-directory-from-name) diff --git a/app/controllers/AiModelController.scala b/app/controllers/AiModelController.scala index e09d8a4f534..d47d95cbda8 100644 --- a/app/controllers/AiModelController.scala +++ b/app/controllers/AiModelController.scala @@ -11,7 +11,7 @@ import play.api.libs.json.{Json, OFormat} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import play.silhouette.api.Silhouette import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext @@ -40,7 +40,7 @@ object RunTrainingParameters { case class RunInferenceParameters(annotationId: Option[ObjectId], aiModelId: ObjectId, - datasetName: String, + datasetDirectoryName: String, colorLayerName: String, boundingBox: String, newDatasetName: String, @@ -143,7 +143,7 @@ class AiModelController @Inject()( jobCommand = JobCommand.train_model commandArgs = Json.obj( "training_annotations" -> Json.toJson(trainingAnnotations), - "organization_name" -> organization._id, + "organization_id" -> organization._id, "model_id" -> modelId, "custom_workflow_provided_by_user" -> request.body.workflowYaml ) @@ -173,15 +173,14 @@ class AiModelController @Inject()( for { _ <- userService.assertIsSuperUser(request.identity) organization <- organizationDAO.findOne(request.identity._organization) - dataset <- datasetDAO.findOneByNameAndOrganization(request.body.datasetName, organization._id) + dataset <- datasetDAO.findOneByDirectoryNameAndOrganization(request.body.datasetDirectoryName, organization._id) dataStore <- dataStoreDAO.findOneByName(dataset._dataStore) ?~> "dataStore.notFound" _ <- aiModelDAO.findOne(request.body.aiModelId) ?~> "aiModel.notFound" _ <- datasetService.assertValidDatasetName(request.body.newDatasetName) - _ <- datasetService.assertNewDatasetName(request.body.newDatasetName, organization._id) jobCommand = JobCommand.infer_with_model boundingBox <- BoundingBox.fromLiteral(request.body.boundingBox).toFox commandArgs = Json.obj( - "organization_name" -> organization._id, + "organization_id" -> organization._id, "dataset_name" -> dataset.name, "color_layer_name" -> request.body.colorLayerName, "bounding_box" -> boundingBox.toLiteral, diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 9279822fa10..a662625d3a6 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -4,6 +4,7 @@ import org.apache.pekko.util.Timeout import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.geometry.BoundingBox +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType @@ -34,7 +35,7 @@ import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{URLSharing, UserAwareRequestLogging, WkEnv} import telemetry.SlackNotificationService -import utils.{ObjectId, WkConf} +import utils.WkConf import javax.inject.Inject import scala.concurrent.ExecutionContext @@ -242,15 +243,11 @@ class AnnotationController @Inject()( } yield result } - def createExplorational(organizationId: String, datasetName: String): Action[List[AnnotationLayerParameters]] = + def createExplorational(datasetId: String): Action[List[AnnotationLayerParameters]] = sil.SecuredAction.async(validateJson[List[AnnotationLayerParameters]]) { implicit request => for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( - "organization.notFound", - organizationId) ~> NOT_FOUND - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> Messages("dataset.notFound", datasetIdValidated) ~> NOT_FOUND annotation <- annotationService.createExplorationalFor( request.identity, dataset._id, @@ -262,19 +259,12 @@ class AnnotationController @Inject()( } yield JsonOk(json) } - def getSandbox(organization: String, - datasetName: String, - typ: String, - sharingToken: Option[String]): Action[AnyContent] = + def getSandbox(datasetId: String, typ: String, sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) // users with dataset sharing token may also get a sandbox annotation for { - organization <- organizationDAO.findOne(organization)(GlobalAccessContext) ?~> Messages( - "organization.notFound", - organization) ~> NOT_FOUND - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id)(ctx) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated)(ctx) ?~> Messages("dataset.notFound", datasetIdValidated) ~> NOT_FOUND tracingType <- TracingType.fromString(typ).toFox _ <- bool2Fox(tracingType == TracingType.skeleton) ?~> "annotation.sandbox.skeletonOnly" annotation = Annotation( diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 8f183d84494..4845b90f3c8 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -9,6 +9,7 @@ import org.apache.pekko.stream.Materializer import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.io.ZipIO +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} @@ -35,6 +36,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ VolumeTracingDownsampling } import com.typesafe.scalalogging.LazyLogging +import net.liftweb.common.Empty import javax.inject.Inject import models.analytics.{AnalyticsService, DownloadAnnotationEvent, UploadAnnotationEvent} @@ -52,9 +54,9 @@ import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, MultipartFormData} import security.WkEnv -import utils.{ObjectId, WkConf} +import utils.WkConf -import scala.concurrent.{ExecutionContext, Future} +import scala.concurrent.ExecutionContext class AnnotationIOController @Inject()( nmlWriter: NmlWriter, @@ -101,58 +103,51 @@ class AnnotationIOController @Inject()( log() { val shouldCreateGroupForEachFile: Boolean = request.body.dataParts("createGroupForEachFile").headOption.contains("true") - val overwritingDatasetName: Option[String] = - request.body.dataParts.get("datasetName").flatMap(_.headOption) - val overwritingOrganizationId: Option[String] = - request.body.dataParts.get("organizationId").flatMap(_.headOption) + val overwritingDatasetId: Option[String] = + request.body.dataParts.get("datasetId").flatMap(_.headOption) val attachedFiles = request.body.files.map(f => (f.ref.path.toFile, f.filename)) - val parsedFiles = - annotationUploadService.extractFromFiles(attachedFiles, - useZipName = true, - overwritingDatasetName, - overwritingOrganizationId) - val parsedFilesWrapped = - annotationUploadService.wrapOrPrefixGroups(parsedFiles.parseResults, shouldCreateGroupForEachFile) - val parseResultsFiltered: List[NmlParseResult] = parsedFilesWrapped.filter(_.succeeded) - - if (parseResultsFiltered.isEmpty) { - returnError(parsedFiles) - } else { - for { - parseSuccesses <- Fox.serialCombined(parseResultsFiltered)(r => r.toSuccessBox) - name = nameForUploaded(parseResultsFiltered.map(_.fileName)) - description = descriptionForNMLs(parseResultsFiltered.map(_.description)) - wkUrl = wkUrlsForNMLs(parseResultsFiltered.map(_.wkUrl)) - _ <- assertNonEmpty(parseSuccesses) - skeletonTracings = parseSuccesses.flatMap(_.skeletonTracing) - // Create a list of volume layers for each uploaded (non-skeleton-only) annotation. - // This is what determines the merging strategy for volume layers - volumeLayersGroupedRaw = parseSuccesses.map(_.volumeLayers).filter(_.nonEmpty) - dataset <- findDatasetForUploadedAnnotations(skeletonTracings, - volumeLayersGroupedRaw.flatten.map(_.tracing), - wkUrl) - dataSource <- datasetService.dataSourceFor(dataset) ?~> Messages("dataset.notImported", dataset.name) - usableDataSource <- dataSource.toUsable.toFox ?~> Messages("dataset.notImported", dataset.name) - volumeLayersGrouped <- adaptVolumeTracingsToFallbackLayer(volumeLayersGroupedRaw, dataset, usableDataSource) - tracingStoreClient <- tracingStoreService.clientFor(dataset) - mergedVolumeLayers <- mergeAndSaveVolumeLayers(volumeLayersGrouped, - tracingStoreClient, - parsedFiles.otherFiles, - usableDataSource) - mergedSkeletonLayers <- mergeAndSaveSkeletonLayers(skeletonTracings, tracingStoreClient) - annotation <- annotationService.createFrom(request.identity, - dataset, - mergedSkeletonLayers ::: mergedVolumeLayers, - AnnotationType.Explorational, - name, - description) - _ = analyticsService.track(UploadAnnotationEvent(request.identity, annotation)) - } yield - JsonOk( - Json.obj("annotation" -> Json.obj("typ" -> annotation.typ, "id" -> annotation.id)), - Messages("nml.file.uploadSuccess") - ) - } + for { + parsedFiles <- annotationUploadService + .extractFromFiles(attachedFiles, SharedParsingParameters(useZipName = true, overwritingDatasetId)) + parsedFilesWrapped = annotationUploadService.wrapOrPrefixGroups(parsedFiles.parseResults, + shouldCreateGroupForEachFile) + parseResultsFiltered: List[NmlParseResult] = parsedFilesWrapped.filter(_.succeeded) + _ <- bool2Fox(parseResultsFiltered.isEmpty) ~> returnError(parsedFiles) // TODOM: Find a proper way to return an error when parseResultsFiltered.isEmpty + parseSuccesses <- Fox.serialCombined(parseResultsFiltered)(r => r.toSuccessBox) + name = nameForUploaded(parseResultsFiltered.map(_.fileName)) + description = descriptionForNMLs(parseResultsFiltered.map(_.description)) + wkUrl = wkUrlsForNMLs(parseResultsFiltered.map(_.wkUrl)) + _ <- assertNonEmpty(parseSuccesses) + skeletonTracings = parseSuccesses.flatMap(_.skeletonTracingOpt) + // Create a list of volume layers for each uploaded (non-skeleton-only) annotation. + // This is what determines the merging strategy for volume layers + volumeLayersGroupedRaw = parseSuccesses.map(_.volumeLayers).filter(_.nonEmpty) + datasetIds = parseSuccesses.map(_.datasetId) + dataset <- findDatasetForUploadedAnnotations(skeletonTracings, + volumeLayersGroupedRaw.flatten, + datasetIds, + wkUrl) + dataSource <- datasetService.dataSourceFor(dataset) ?~> Messages("dataset.notImported", dataset.name) + usableDataSource <- dataSource.toUsable.toFox ?~> Messages("dataset.notImported", dataset.name) + volumeLayersGrouped <- adaptVolumeTracingsToFallbackLayer(volumeLayersGroupedRaw, dataset, usableDataSource) + tracingStoreClient <- tracingStoreService.clientFor(dataset) + mergedVolumeLayers <- mergeAndSaveVolumeLayers(volumeLayersGrouped, + tracingStoreClient, + parsedFiles.otherFiles, + usableDataSource) + mergedSkeletonLayers <- mergeAndSaveSkeletonLayers(skeletonTracings, tracingStoreClient) + annotation <- annotationService.createFrom(request.identity, + dataset, + mergedSkeletonLayers ::: mergedVolumeLayers, + AnnotationType.Explorational, + name, + description) + _ = analyticsService.track(UploadAnnotationEvent(request.identity, annotation)) + } yield + JsonOk( + Json.obj("annotation" -> Json.obj("typ" -> annotation.typ, "id" -> annotation.id)), + Messages("nml.file.uploadSuccess") + ) } } @@ -217,14 +212,15 @@ class AnnotationIOController @Inject()( } private def assertNonEmpty(parseSuccesses: List[NmlParseSuccess]) = - bool2Fox(parseSuccesses.exists(p => p.skeletonTracing.nonEmpty || p.volumeLayers.nonEmpty)) ?~> "nml.file.noFile" + bool2Fox(parseSuccesses.exists(p => p.skeletonTracingOpt.nonEmpty || p.volumeLayers.nonEmpty)) ?~> "nml.file.noFile" private def findDatasetForUploadedAnnotations( skeletonTracings: List[SkeletonTracing], - volumeTracings: List[VolumeTracing], + volumeTracings: List[UploadedVolumeLayer], + datasetIds: List[ObjectId], wkUrl: String)(implicit mp: MessagesProvider, ctx: DBAccessContext): Fox[Dataset] = for { - datasetName <- assertAllOnSameDataset(skeletonTracings, volumeTracings) ?~> "nml.file.differentDatasets" + datasetId <- SequenceUtils.findUniqueElement(datasetIds).toFox ?~> "nml.file.differentDatasets" organizationIdOpt <- assertAllOnSameOrganization(skeletonTracings, volumeTracings) ?~> "nml.file.differentDatasets" organizationIdOpt <- Fox.runOptional(organizationIdOpt) { organizationDAO.findOne(_)(GlobalAccessContext).map(_._id) @@ -233,19 +229,17 @@ class AnnotationIOController @Inject()( } else { Messages("organization.notFound", organizationIdOpt.getOrElse("")) }) ~> NOT_FOUND organizationId <- Fox.fillOption(organizationIdOpt) { - datasetDAO.getOrganizationIdForDataset(datasetName)(GlobalAccessContext) - } ?~> Messages("dataset.noAccess", datasetName) ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> (if (wkUrl.nonEmpty && conf.Http.uri != wkUrl) { - Messages( - "dataset.noAccess.wrongHost", - datasetName, - wkUrl, - conf.Http.uri) - } else { - Messages( - "dataset.noAccess", - datasetName) - }) ~> FORBIDDEN + organizationDAO.findOrganizationIdForDataset(datasetId)(GlobalAccessContext) + } ?~> Messages("dataset.noAccess", datasetId) ~> FORBIDDEN + dataset <- datasetDAO.findOne(datasetId) ?~> (if (wkUrl.nonEmpty && conf.Http.uri != wkUrl) { + Messages("dataset.noAccess.wrongHost", + datasetId, + wkUrl, + conf.Http.uri) + } else { + Messages("dataset.noAccess", datasetId) + }) ~> FORBIDDEN + _ <- bool2Fox(organizationId == dataset._organization) ?~> Messages("dataset.noAccess", datasetId) ~> FORBIDDEN } yield dataset private def nameForUploaded(fileNames: Seq[String]) = @@ -260,25 +254,23 @@ class AnnotationIOController @Inject()( private def wkUrlsForNMLs(wkUrls: Seq[Option[String]]) = if (wkUrls.toSet.size == 1) wkUrls.headOption.flatten.getOrElse("") else "" - private def returnError(zipParseResult: NmlResults.MultiNmlParseResult)(implicit messagesProvider: MessagesProvider) = + private def returnError(zipParseResult: NmlResults.MultiNmlParseResult)( + implicit messagesProvider: MessagesProvider): Fox[Nothing] = if (zipParseResult.containsFailure) { val errors = zipParseResult.parseResults.flatMap { case result: NmlResults.NmlParseFailure => Some("error" -> Messages("nml.file.invalid", result.fileName, result.error)) case _ => None } - Future.successful(JsonBadRequest(errors)) + Fox.paramFailure("NML upload failed", Empty, Empty, errors) } else { - Future.successful(JsonBadRequest(Messages("nml.file.noFile"))) + Fox.paramFailure("NML upload failed", Empty, Empty, None) } - private def assertAllOnSameDataset(skeletons: List[SkeletonTracing], volumes: List[VolumeTracing]): Fox[String] = - SequenceUtils.findUniqueElement(volumes.map(_.datasetName) ++ skeletons.map(_.datasetName)).toFox - private def assertAllOnSameOrganization(skeletons: List[SkeletonTracing], - volumes: List[VolumeTracing]): Fox[Option[String]] = { + volumes: List[UploadedVolumeLayer]): Fox[Option[String]] = { // Note that organizationIds are optional. Tracings with no organization attribute are ignored here - val organizationIds = skeletons.flatMap(_.organizationId) ::: volumes.flatMap(_.organizationId) + val organizationIds = skeletons.flatMap(_.organizationId) ::: volumes.flatMap(_.tracing.organizationId) for { _ <- Fox.runOptional(organizationIds.headOption)(name => bool2Fox(organizationIds.forall(_ == name))) } yield organizationIds.headOption @@ -427,6 +419,7 @@ class AnnotationIOController @Inject()( organizationId, conf.Http.uri, dataset.name, + dataset._id, Some(user), taskOpt, skipVolumeData, @@ -467,6 +460,7 @@ class AnnotationIOController @Inject()( organizationId, conf.Http.uri, dataset.name, + dataset._id, Some(user), taskOpt, skipVolumeData, diff --git a/app/controllers/AnnotationPrivateLinkController.scala b/app/controllers/AnnotationPrivateLinkController.scala index ea3765154fe..631a95b4a75 100644 --- a/app/controllers/AnnotationPrivateLinkController.scala +++ b/app/controllers/AnnotationPrivateLinkController.scala @@ -13,7 +13,7 @@ import models.annotation._ import net.liftweb.common.Full import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{WkEnv, WkSilhouetteEnvironment} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext class AnnotationPrivateLinkController @Inject()( diff --git a/app/controllers/AuthenticationController.scala b/app/controllers/AuthenticationController.scala index f6609755e09..c5baa021fbc 100755 --- a/app/controllers/AuthenticationController.scala +++ b/app/controllers/AuthenticationController.scala @@ -8,6 +8,7 @@ import play.silhouette.api.util.{Credentials, PasswordInfo} import play.silhouette.api.{LoginInfo, Silhouette} import play.silhouette.impl.providers.CredentialsProvider import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} import mail.{DefaultMails, MailchimpClient, MailchimpTag, Send} import models.analytics.{AnalyticsService, InviteEvent, JoinOrganizationEvent, SignupEvent} @@ -21,7 +22,7 @@ import net.liftweb.common.{Box, Empty, Failure, Full} import org.apache.commons.codec.binary.Base64 import org.apache.commons.codec.digest.{HmacAlgorithms, HmacUtils} import play.api.data.Form -import play.api.data.Forms.{email, _} +import play.api.data.Forms._ import play.api.data.validation.Constraints._ import play.api.i18n.Messages import play.api.libs.json._ @@ -35,7 +36,7 @@ import security.{ WkEnv, WkSilhouetteEnvironment } -import utils.{ObjectId, WkConf} +import utils.WkConf import java.net.URLEncoder import java.nio.charset.StandardCharsets @@ -232,45 +233,40 @@ class AuthenticationController @Inject()( not superadmin - fetch all identities, construct access context, try until one works */ - def accessibleBySwitching(organizationId: Option[String], - datasetName: Option[String], + def accessibleBySwitching(datasetId: Option[String], annotationId: Option[String], workflowHash: Option[String]): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { + datasetIdValidated <- Fox.runOptional(datasetId)(ObjectId.fromString(_)) isSuperUser <- multiUserDAO.findOne(request.identity._multiUser).map(_.isSuperUser) selectedOrganization <- if (isSuperUser) - accessibleBySwitchingForSuperUser(organizationId, datasetName, annotationId, workflowHash) + accessibleBySwitchingForSuperUser(datasetIdValidated, annotationId, workflowHash) else - accessibleBySwitchingForMultiUser(request.identity._multiUser, - organizationId, - datasetName, - annotationId, - workflowHash) + accessibleBySwitchingForMultiUser(request.identity._multiUser, datasetIdValidated, annotationId, workflowHash) _ <- bool2Fox(selectedOrganization._id != request.identity._organization) // User is already in correct orga, but still could not see dataset. Assume this had a reason. selectedOrganizationJs <- organizationService.publicWrites(selectedOrganization) } yield Ok(selectedOrganizationJs) } - private def accessibleBySwitchingForSuperUser(organizationIdOpt: Option[String], - datasetNameOpt: Option[String], + private def accessibleBySwitchingForSuperUser(datasetIdOpt: Option[ObjectId], annotationIdOpt: Option[String], workflowHashOpt: Option[String]): Fox[Organization] = { implicit val ctx: DBAccessContext = GlobalAccessContext - (organizationIdOpt, datasetNameOpt, annotationIdOpt, workflowHashOpt) match { - case (Some(organizationId), Some(datasetName), None, None) => + (datasetIdOpt, annotationIdOpt, workflowHashOpt) match { + case (Some(datasetId), None, None) => for { - organization <- organizationDAO.findOne(organizationId) - _ <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) + dataset <- datasetDAO.findOne(datasetId) + organization <- organizationDAO.findOne(dataset._organization) } yield organization - case (None, None, Some(annotationId), None) => + case (None, Some(annotationId), None) => for { annotationObjectId <- ObjectId.fromString(annotationId) annotation <- annotationDAO.findOne(annotationObjectId) // Note: this does not work for compound annotations. user <- userDAO.findOne(annotation._user) organization <- organizationDAO.findOne(user._organization) } yield organization - case (None, None, None, Some(workflowHash)) => + case (None, None, Some(workflowHash)) => for { workflow <- voxelyticsDAO.findWorkflowByHash(workflowHash) organization <- organizationDAO.findOne(workflow._organization) @@ -280,41 +276,34 @@ class AuthenticationController @Inject()( } private def accessibleBySwitchingForMultiUser(multiUserId: ObjectId, - organizationIdOpt: Option[String], - datasetNameOpt: Option[String], + datasetIdOpt: Option[ObjectId], annotationIdOpt: Option[String], workflowHashOpt: Option[String]): Fox[Organization] = for { identities <- userDAO.findAllByMultiUser(multiUserId) - selectedIdentity <- Fox.find(identities)( - identity => - canAccessDatasetOrAnnotationOrWorkflow(identity, - organizationIdOpt, - datasetNameOpt, - annotationIdOpt, - workflowHashOpt)) + selectedIdentity <- Fox.find(identities)(identity => + canAccessDatasetOrAnnotationOrWorkflow(identity, datasetIdOpt, annotationIdOpt, workflowHashOpt)) selectedOrganization <- organizationDAO.findOne(selectedIdentity._organization)(GlobalAccessContext) } yield selectedOrganization private def canAccessDatasetOrAnnotationOrWorkflow(user: User, - organizationIdOpt: Option[String], - datasetNameOpt: Option[String], + datasetIdOpt: Option[ObjectId], annotationIdOpt: Option[String], workflowHashOpt: Option[String]): Fox[Boolean] = { val ctx = AuthorizedAccessContext(user) - (organizationIdOpt, datasetNameOpt, annotationIdOpt, workflowHashOpt) match { - case (Some(organizationId), Some(datasetName), None, None) => - canAccessDataset(ctx, organizationId, datasetName) - case (None, None, Some(annotationId), None) => + (datasetIdOpt, annotationIdOpt, workflowHashOpt) match { + case (Some(datasetId), None, None) => + canAccessDataset(ctx, datasetId) + case (None, Some(annotationId), None) => canAccessAnnotation(user, ctx, annotationId) - case (None, None, None, Some(workflowHash)) => + case (None, None, Some(workflowHash)) => canAccessWorkflow(user, workflowHash) case _ => Fox.failure("Can either test access for dataset or annotation or workflow, not a combination") } } - private def canAccessDataset(ctx: DBAccessContext, organizationId: String, datasetName: String): Fox[Boolean] = { - val foundFox = datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(ctx) + private def canAccessDataset(ctx: DBAccessContext, datasetId: ObjectId): Fox[Boolean] = { + val foundFox = datasetDAO.findOne(datasetId)(ctx) foundFox.futureBox.map(_.isDefined) } diff --git a/app/controllers/ConfigurationController.scala b/app/controllers/ConfigurationController.scala index 6b57bb28cb7..cbb40e3bf51 100755 --- a/app/controllers/ConfigurationController.scala +++ b/app/controllers/ConfigurationController.scala @@ -2,6 +2,8 @@ package controllers import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.GlobalAccessContext +import com.scalableminds.util.objectid.ObjectId + import javax.inject.Inject import models.dataset.{DatasetDAO, DatasetService} import models.configuration.DatasetConfigurationService @@ -33,51 +35,50 @@ class ConfigurationController @Inject()( } yield JsonOk(Messages("user.configuration.updated")) } - def readDatasetViewConfiguration(organizationId: String, - datasetName: String, - sharingToken: Option[String]): Action[List[String]] = + def readDatasetViewConfiguration(datasetId: String, sharingToken: Option[String]): Action[List[String]] = sil.UserAwareAction.async(validateJson[List[String]]) { implicit request => val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) - request.identity.toFox - .flatMap(user => - datasetConfigurationService.getDatasetViewConfigurationForUserAndDataset(request.body, - user, - datasetName, - organizationId)(GlobalAccessContext)) - .orElse( - datasetConfigurationService.getDatasetViewConfigurationForDataset(request.body, datasetName, organizationId)( - ctx) - ) - .getOrElse(Map.empty) - .map(configuration => Ok(Json.toJson(configuration))) + for { + datasetIdValidated <- ObjectId.fromString(datasetId) + configuration <- request.identity.toFox + .flatMap( + user => + datasetConfigurationService.getDatasetViewConfigurationForUserAndDataset( + request.body, + user, + datasetIdValidated)(GlobalAccessContext)) + .orElse( + datasetConfigurationService.getDatasetViewConfigurationForDataset(request.body, datasetIdValidated)(ctx) + ) + .getOrElse(Map.empty) + } yield Ok(Json.toJson(configuration)) } - def updateDatasetViewConfiguration(organizationId: String, datasetName: String): Action[JsValue] = + def updateDatasetViewConfiguration(datasetId: String): Action[JsValue] = sil.SecuredAction.async(parse.json(maxLength = 20480)) { implicit request => for { jsConfiguration <- request.body.asOpt[JsObject] ?~> "user.configuration.dataset.invalid" + datasetIdValidated <- ObjectId.fromString(datasetId) conf = jsConfiguration.fields.toMap datasetConf = conf - "layers" layerConf = conf.get("layers") - _ <- userService.updateDatasetViewConfiguration(request.identity, - datasetName, - organizationId, - datasetConf, - layerConf) + _ <- userService.updateDatasetViewConfiguration(request.identity, datasetIdValidated, datasetConf, layerConf) } yield JsonOk(Messages("user.configuration.dataset.updated")) } - def readDatasetAdminViewConfiguration(organizationId: String, datasetName: String): Action[AnyContent] = + def readDatasetAdminViewConfiguration(datasetId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => - datasetConfigurationService - .getCompleteAdminViewConfiguration(datasetName, organizationId) - .map(configuration => Ok(Json.toJson(configuration))) + for { + datasetIdValidated <- ObjectId.fromString(datasetId) + configuration <- datasetConfigurationService.getCompleteAdminViewConfiguration(datasetIdValidated) + } yield Ok(Json.toJson(configuration)) } - def updateDatasetAdminViewConfiguration(organizationId: String, datasetName: String): Action[JsValue] = + def updateDatasetAdminViewConfiguration(datasetNameAndId: String): Action[JsValue] = sil.SecuredAction.async(parse.json(maxLength = 20480)) { implicit request => for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> "dataset.notFound" ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetNameAndId) + dataset <- datasetDAO.findOne(datasetIdValidated)(GlobalAccessContext) _ <- datasetService.isEditableBy(dataset, Some(request.identity)) ?~> "notAllowed" ~> FORBIDDEN jsObject <- request.body.asOpt[JsObject].toFox ?~> "user.configuration.dataset.invalid" _ <- datasetConfigurationService.updateAdminViewConfigurationFor(dataset, jsObject.fields.toMap) diff --git a/app/controllers/CredentialController.scala b/app/controllers/CredentialController.scala index 786d3b4ddb4..e2e43df0aef 100644 --- a/app/controllers/CredentialController.scala +++ b/app/controllers/CredentialController.scala @@ -11,7 +11,7 @@ import models.dataset.credential.CredentialDAO import play.api.libs.json.{JsValue, Json, OFormat} import play.api.mvc.{Action, PlayBodyParsers} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index a01c573a2a7..86e97cab57f 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -3,6 +3,7 @@ package controllers import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.enumeration.ExtendedEnumeration import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, TristateOptionJsonHelper} import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate @@ -19,20 +20,21 @@ import models.folder.FolderService import models.organization.OrganizationDAO import models.team.{TeamDAO, TeamService} import models.user.{User, UserDAO, UserService} +import net.liftweb.common.{Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.functional.syntax._ import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import play.silhouette.api.Silhouette import security.{URLSharing, WkEnv} -import utils.{MetadataAssertions, ObjectId, WkConf} +import utils.{MetadataAssertions, WkConf} import javax.inject.Inject import scala.concurrent.{ExecutionContext, Future} case class DatasetUpdateParameters( description: Option[Option[String]] = Some(None), - displayName: Option[Option[String]] = Some(None), + name: Option[Option[String]] = Some(None), sortingKey: Option[Instant], isPublic: Option[Boolean], tags: Option[List[String]], @@ -92,6 +94,7 @@ class DatasetController @Inject()(userService: UserService, private val datasetPublicReads = ((__ \ "description").readNullable[String] and + (__ \ "name").readNullable[String] and (__ \ "displayName").readNullable[String] and (__ \ "sortingKey").readNullable[Instant] and (__ \ "isPublic").read[Boolean] and @@ -99,14 +102,15 @@ class DatasetController @Inject()(userService: UserService, (__ \ "metadata").readNullable[JsArray] and (__ \ "folderId").readNullable[ObjectId]).tupled - def removeFromThumbnailCache(organizationId: String, datasetName: String): Action[AnyContent] = - sil.SecuredAction { - thumbnailCachingService.removeFromCache(organizationId, datasetName) - Ok + def removeFromThumbnailCache(datasetId: String): Action[AnyContent] = + sil.SecuredAction.async { implicit request => + for { + datasetIdValidated <- ObjectId.fromString(datasetId) + _ <- thumbnailCachingService.removeFromCache(datasetIdValidated) + } yield Ok } - def thumbnail(organizationId: String, - datasetName: String, + def thumbnail(datasetId: String, dataLayerName: String, w: Option[Int], h: Option[Int], @@ -115,8 +119,9 @@ class DatasetController @Inject()(userService: UserService, sil.UserAwareAction.async { implicit request => val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) for { - _ <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(ctx) ?~> notFoundMessage(datasetName) ~> NOT_FOUND // To check Access Rights - image <- thumbnailService.getThumbnailWithCache(organizationId, datasetName, dataLayerName, w, h, mappingName) + datasetIdValidated <- ObjectId.fromString(datasetId) + _ <- datasetDAO.findOne(datasetIdValidated)(ctx) ?~> notFoundMessage(datasetId) ~> NOT_FOUND // To check Access Rights + image <- thumbnailService.getThumbnailWithCache(datasetIdValidated, dataLayerName, w, h, mappingName) } yield { addRemoteOriginHeaders(Ok(image)).as(jpegMimeType).withHeaders(CACHE_CONTROL -> "public, max-age=86400") } @@ -237,33 +242,29 @@ class DatasetController @Inject()(userService: UserService, } } yield js.flatten - def accessList(organizationId: String, datasetName: String): Action[AnyContent] = sil.SecuredAction.async { - implicit request => - for { - organization <- organizationDAO.findOne(organizationId) - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> notFoundMessage( - datasetName) ~> NOT_FOUND - allowedTeams <- teamService.allowedTeamIdsForDataset(dataset, cumulative = true) ?~> "allowedTeams.notFound" - usersByTeams <- userDAO.findAllByTeams(allowedTeams) - adminsAndDatasetManagers <- userDAO.findAdminsAndDatasetManagersByOrg(organization._id) - usersFiltered = (usersByTeams ++ adminsAndDatasetManagers).distinct.filter(!_.isUnlisted) - usersJs <- Fox.serialCombined(usersFiltered)(u => userService.compactWrites(u)) - } yield Ok(Json.toJson(usersJs)) + def accessList(datasetId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => + for { + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> notFoundMessage(datasetIdValidated.toString) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) + allowedTeams <- teamService.allowedTeamIdsForDataset(dataset, cumulative = true) ?~> "allowedTeams.notFound" + usersByTeams <- userDAO.findAllByTeams(allowedTeams) + adminsAndDatasetManagers <- userDAO.findAdminsAndDatasetManagersByOrg(organization._id) + usersFiltered = (usersByTeams ++ adminsAndDatasetManagers).distinct.filter(!_.isUnlisted) + usersJs <- Fox.serialCombined(usersFiltered)(u => userService.compactWrites(u)) + } yield Ok(Json.toJson(usersJs)) } - def read(organizationId: String, - datasetName: String, + def read(datasetId: String, // Optional sharing token allowing access to datasets your team does not normally have access to.") sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => log() { val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( - "organization.notFound", - organizationId) - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id)(ctx) ?~> notFoundMessage( - datasetName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated)(ctx) ?~> notFoundMessage(datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ~> NOT_FOUND _ <- Fox.runOptional(request.identity)(user => datasetLastUsedTimesDAO.updateForDatasetAndUser(dataset._id, user._id)) // Access checked above via dataset. In case of shared dataset/annotation, show datastore even if not otherwise accessible @@ -282,68 +283,71 @@ class DatasetController @Inject()(userService: UserService, } } - def health(organizationId: String, datasetName: String, sharingToken: Option[String]): Action[AnyContent] = + def health(datasetId: String, sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(ctx) ?~> notFoundMessage( - datasetName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated)(ctx) ?~> notFoundMessage(datasetIdValidated.toString) ~> NOT_FOUND dataSource <- datasetService.dataSourceFor(dataset) ?~> "dataSource.notFound" ~> NOT_FOUND usableDataSource <- dataSource.toUsable.toFox ?~> "dataset.notImported" datalayer <- usableDataSource.dataLayers.headOption.toFox ?~> "dataset.noLayers" _ <- datasetService .clientFor(dataset)(GlobalAccessContext) - .flatMap(_.findPositionWithData(organizationId, dataset, datalayer.name).flatMap(posWithData => + .flatMap(_.findPositionWithData(dataset, datalayer.name).flatMap(posWithData => bool2Fox(posWithData.value("position") != JsNull))) ?~> "dataset.loadingDataFailed" } yield Ok("Ok") } - def updatePartial(organizationId: String, datasetName: String): Action[DatasetUpdateParameters] = + def updatePartial(datasetId: String): Action[DatasetUpdateParameters] = sil.SecuredAction.async(validateJson[DatasetUpdateParameters]) { implicit request => for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, request.identity._organization) ?~> notFoundMessage( - datasetName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> notFoundMessage(datasetIdValidated.toString) ~> NOT_FOUND _ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN _ <- Fox.runOptional(request.body.metadata)(assertNoDuplicateMetadataKeys) _ <- datasetDAO.updatePartial(dataset._id, request.body) - updated <- datasetDAO.findOneByNameAndOrganization(datasetName, request.identity._organization) + updated <- datasetDAO.findOne(datasetIdValidated) _ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated)) js <- datasetService.publicWrites(updated, Some(request.identity)) } yield Ok(js) } // Note that there exists also updatePartial (which will only expect the changed fields) - def update(organizationId: String, datasetName: String): Action[JsValue] = + def update(datasetId: String): Action[JsValue] = sil.SecuredAction.async(parse.json) { implicit request => withJsonBodyUsing(datasetPublicReads) { - case (description, displayName, sortingKey, isPublic, tags, metadata, folderId) => + case (description, datasetName, legacyDatasetDisplayName, sortingKey, isPublic, tags, metadata, folderId) => { + val name = if (datasetName.isDefined) datasetName else legacyDatasetDisplayName for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, request.identity._organization) ?~> notFoundMessage( - datasetName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> notFoundMessage(datasetIdValidated.toString) ~> NOT_FOUND maybeUpdatedMetadata = metadata.getOrElse(dataset.metadata) _ <- assertNoDuplicateMetadataKeys(maybeUpdatedMetadata) _ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN _ <- datasetDAO.updateFields( dataset._id, description, - displayName, + name, sortingKey.getOrElse(dataset.created), isPublic, tags, maybeUpdatedMetadata, folderId.getOrElse(dataset._folder) ) - updated <- datasetDAO.findOneByNameAndOrganization(datasetName, request.identity._organization) + updated <- datasetDAO.findOne(datasetIdValidated) _ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated)) js <- datasetService.publicWrites(updated, Some(request.identity)) } yield Ok(Json.toJson(js)) + } } } - def updateTeams(organizationId: String, datasetName: String): Action[List[ObjectId]] = + def updateTeams(datasetId: String): Action[List[ObjectId]] = sil.SecuredAction.async(validateJson[List[ObjectId]]) { implicit request => for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> notFoundMessage(datasetIdValidated.toString) ~> NOT_FOUND _ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN includeMemberOnlyTeams = request.identity.isDatasetManager userTeams <- if (includeMemberOnlyTeams) teamDAO.findAll else teamDAO.findAllEditable @@ -355,44 +359,40 @@ class DatasetController @Inject()(userService: UserService, } yield Ok(Json.toJson(newTeams)) } - def getSharingToken(organizationId: String, datasetName: String): Action[AnyContent] = + def getSharingToken(datasetId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - organization <- organizationDAO.findOne(organizationId) - _ <- bool2Fox(organization._id == request.identity._organization) ~> FORBIDDEN - token <- datasetService.getSharingToken(datasetName, organization._id) + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> notFoundMessage(datasetIdValidated.toString) ~> NOT_FOUND + _ <- bool2Fox(dataset._organization == request.identity._organization) ~> FORBIDDEN + token <- datasetService.getSharingToken(dataset._id) } yield Ok(Json.obj("sharingToken" -> token.trim)) } - def deleteSharingToken(organizationId: String, datasetName: String): Action[AnyContent] = sil.SecuredAction.async { - implicit request => + def deleteSharingToken(datasetId: String): Action[AnyContent] = + sil.SecuredAction.async { implicit request => for { - organization <- organizationDAO.findOne(organizationId) - _ <- bool2Fox(organization._id == request.identity._organization) ~> FORBIDDEN - dataset <- datasetDAO - .findOneByNameAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> notFoundMessage(datasetIdValidated.toString) ~> NOT_FOUND + _ <- bool2Fox(dataset._organization == request.identity._organization) ~> FORBIDDEN _ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN - _ <- datasetDAO.updateSharingTokenByName(datasetName, organization._id, None) + _ <- datasetDAO.updateSharingTokenById(datasetIdValidated, None) } yield Ok - } + } def create(typ: String): Action[JsValue] = sil.SecuredAction.async(parse.json) { implicit request => Future.successful(JsonBadRequest(Messages("dataset.type.invalid", typ))) } - def isValidNewName(organizationId: String, datasetName: String): Action[AnyContent] = + def isValidNewName(datasetName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - organization <- organizationDAO.findOne(organizationId) - _ <- bool2Fox(organization._id == request.identity._organization) ~> FORBIDDEN validName <- datasetService.assertValidDatasetName(datasetName).futureBox - nameAlreadyExists <- (datasetService.assertNewDatasetName(datasetName, organization._id) ?~> "dataset.name.alreadyTaken").futureBox - errors = combineErrors(List(validName, nameAlreadyExists)) - valid = validName.isDefined && nameAlreadyExists.isDefined } yield - errors match { - case Some(e) => Ok(Json.obj("isValid" -> valid, "errors" -> e.map(Messages(_)))) - case None => Ok(Json.obj("isValid" -> valid)) + validName match { + case Full(_) => Ok(Json.obj("isValid" -> true)) + case Failure(msg, _, _) => Ok(Json.obj("isValid" -> false, "errors" -> Messages(msg))) + case _ => Ok(Json.obj("isValid" -> false, "errors" -> List("Unknown error"))) } } @@ -400,28 +400,38 @@ class DatasetController @Inject()(userService: UserService, implicit request => for { organizationId <- datasetDAO.getOrganizationIdForDataset(datasetName) - organization <- organizationDAO.findOne(organizationId) - } yield Ok(Json.obj("organization" -> organization._id)) + } yield Ok(Json.obj("organization" -> organizationId)) } + def getDatasetIdFromNameAndOrganization(datasetName: String, organizationId: String): Action[AnyContent] = + sil.UserAwareAction.async { implicit request => + for { + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND + } yield + Ok( + Json.obj("id" -> dataset._id, + "name" -> dataset.name, + "organization" -> dataset._organization, + "path" -> dataset.directoryName)) + } + private def notFoundMessage(datasetName: String)(implicit ctx: DBAccessContext, m: MessagesProvider): String = ctx.data match { case Some(_: User) => Messages("dataset.notFound", datasetName) case _ => Messages("dataset.notFoundConsiderLogin", datasetName) } - def segmentAnythingMask(organizationId: String, - datasetName: String, + def segmentAnythingMask(datasetId: String, dataLayerName: String, intensityMin: Option[Float], intensityMax: Option[Float]): Action[SegmentAnythingMaskParameters] = sil.SecuredAction.async(validateJson[SegmentAnythingMaskParameters]) { implicit request => log() { for { + datasetIdValidated <- ObjectId.fromString(datasetId) _ <- bool2Fox(conf.Features.segmentAnythingEnabled) ?~> "segmentAnything.notEnabled" _ <- bool2Fox(conf.SegmentAnything.uri.nonEmpty) ?~> "segmentAnything.noUri" - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> notFoundMessage( - datasetName) ~> NOT_FOUND + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> notFoundMessage(datasetId) ~> NOT_FOUND dataSource <- datasetService.dataSourceFor(dataset) ?~> "dataSource.notFound" ~> NOT_FOUND usableDataSource <- dataSource.toUsable ?~> "dataset.notImported" dataLayer <- usableDataSource.dataLayers.find(_.name == dataLayerName) ?~> "dataset.noLayers" @@ -439,7 +449,6 @@ class DatasetController @Inject()(userService: UserService, request.body.pointX.isDefined && request.body.pointY.isDefined)) ?~> "Missing pointX and pointY parameters for point interaction." beforeDataLoading = Instant.now data <- datastoreClient.getLayerData( - organizationId, dataset, dataLayer.name, request.body.surroundingBoundingBox, diff --git a/app/controllers/FolderController.scala b/app/controllers/FolderController.scala index 5a848239714..b4bc528d3ca 100644 --- a/app/controllers/FolderController.scala +++ b/app/controllers/FolderController.scala @@ -1,5 +1,6 @@ package controllers +import com.scalableminds.util.objectid.ObjectId import play.silhouette.api.Silhouette import com.scalableminds.util.tools.{Fox, FoxImplicits} import models.dataset.DatasetDAO @@ -10,7 +11,7 @@ import models.user.UserService import play.api.libs.json.{JsArray, Json} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.WkEnv -import utils.{MetadataAssertions, ObjectId} +import utils.MetadataAssertions import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/InitialDataController.scala b/app/controllers/InitialDataController.scala index d2c736a0648..95781606041 100644 --- a/app/controllers/InitialDataController.scala +++ b/app/controllers/InitialDataController.scala @@ -2,6 +2,7 @@ package controllers import play.silhouette.api.{LoginInfo, Silhouette} import com.scalableminds.util.accesscontext.GlobalAccessContext +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging @@ -14,7 +15,7 @@ import models.team._ import models.user._ import net.liftweb.common.{Box, Full} import play.api.libs.json.{JsArray, Json} -import utils.{ObjectId, StoreModules, WkConf} +import utils.{StoreModules, WkConf} import javax.inject.Inject import models.organization.{Organization, OrganizationDAO, OrganizationService} diff --git a/app/controllers/JobController.scala b/app/controllers/JobController.scala index 8d45f63ed0e..2648bacc2c5 100644 --- a/app/controllers/JobController.scala +++ b/app/controllers/JobController.scala @@ -13,15 +13,17 @@ import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{WkEnv, WkSilhouetteEnvironment} import telemetry.SlackNotificationService -import utils.{ObjectId, WkConf} +import utils.WkConf import java.util.Date import javax.inject.Inject import scala.concurrent.ExecutionContext import com.scalableminds.util.enumeration.ExtendedEnumeration +import com.scalableminds.util.objectid.ObjectId +import com.scalableminds.webknossos.datastore.models.{LengthUnit, VoxelSize} import com.scalableminds.webknossos.datastore.dataformats.zarr.Zarr3OutputHelper import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, FullAxisOrder, NDBoundingBox} -import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, LengthUnit, VoxelSize} +import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate import models.team.PricingPlan object MovieResolutionSetting extends ExtendedEnumeration { @@ -113,26 +115,25 @@ class JobController @Inject()( } // Note that the dataset has to be registered by reserveUpload via the datastore first. - def runConvertToWkwJob(organizationId: String, - datasetName: String, - scale: String, - unit: Option[String]): Action[AnyContent] = + def runConvertToWkwJob(datasetId: String, scale: String, unit: Option[String]): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND voxelSizeFactor <- Vec3Double.fromUriLiteral(scale).toFox voxelSizeUnit <- Fox.runOptional(unit)(u => LengthUnit.fromString(u).toFox) voxelSize = VoxelSize.fromFactorAndUnitWithDefault(voxelSizeFactor, voxelSizeUnit) + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND command = JobCommand.convert_to_wkw commandArgs = Json.obj( - "organization_name" -> organizationId, + "organization_id" -> organization._id, "organization_display_name" -> organization.name, - "dataset_name" -> datasetName, + "dataset_name" -> dataset.name, + "dataset_id" -> dataset._id, + "dataset_directory_name" -> dataset.directoryName, "voxel_size_factor" -> voxelSize.factor.toUriLiteral, "voxel_size_unit" -> voxelSize.unit ) @@ -142,25 +143,25 @@ class JobController @Inject()( } } - def runComputeMeshFileJob(organizationId: String, - datasetName: String, + def runComputeMeshFileJob(datasetId: String, layerName: String, mag: String, agglomerateView: Option[String]): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> Messages( "organization.notFound", - organizationId) + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.meshFile.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.compute_mesh_file commandArgs = Json.obj( - "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "organization_id" -> organization._id, + "dataset_name" -> dataset.name, + "dataset_id" -> dataset._id, + "dataset_directory_name" -> dataset.directoryName, "layer_name" -> layerName, "mag" -> mag, "agglomerate_view" -> agglomerateView @@ -170,22 +171,21 @@ class JobController @Inject()( } yield Ok(js) } - def runComputeSegmentIndexFileJob(organizationId: String, datasetName: String, layerName: String, - ): Action[AnyContent] = + def runComputeSegmentIndexFileJob(datasetId: String, layerName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> Messages( "organization.notFound", - organizationId) + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.segmentIndexFile.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.compute_segment_index_file commandArgs = Json.obj( - "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "organization_id" -> dataset._organization, + "dataset_name" -> dataset.name, + "dataset_directory_name" -> dataset.directoryName, "segmentation_layer_name" -> layerName, ) job <- jobService.submitJob(command, commandArgs, request.identity, dataset._dataStore) ?~> "job.couldNotRunSegmentIndexFile" @@ -193,26 +193,23 @@ class JobController @Inject()( } yield Ok(js) } - def runInferNucleiJob(organizationId: String, - datasetName: String, - layerName: String, - newDatasetName: String): Action[AnyContent] = + def runInferNucleiJob(datasetId: String, layerName: String, newDatasetName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> Messages( "organization.notFound", - organizationId) + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.inferNuclei.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.infer_nuclei commandArgs = Json.obj( - "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "organization_id" -> dataset._organization, + "dataset_name" -> dataset.name, + "dataset_directory_name" -> dataset.directoryName, "layer_name" -> layerName, "new_dataset_name" -> newDatasetName ) @@ -222,27 +219,27 @@ class JobController @Inject()( } } - def runInferNeuronsJob(organizationId: String, - datasetName: String, + def runInferNeuronsJob(datasetId: String, layerName: String, bbox: String, newDatasetName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.inferNeurons.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) multiUser <- multiUserDAO.findOne(request.identity._multiUser) _ <- Fox.runIf(!multiUser.isSuperUser)(jobService.assertBoundingBoxLimits(bbox, None)) command = JobCommand.infer_neurons commandArgs = Json.obj( - "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "organization_id" -> organization._id, + "dataset_name" -> dataset.name, + "dataset_directory_name" -> dataset.directoryName, "new_dataset_name" -> newDatasetName, "layer_name" -> layerName, "bbox" -> bbox, @@ -253,19 +250,18 @@ class JobController @Inject()( } } - def runInferMitochondriaJob(organizationId: String, - datasetName: String, + def runInferMitochondriaJob(datasetId: String, layerName: String, bbox: String, newDatasetName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.inferMitochondria.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) multiUser <- multiUserDAO.findOne(request.identity._multiUser) @@ -273,8 +269,9 @@ class JobController @Inject()( _ <- Fox.runIf(!multiUser.isSuperUser)(jobService.assertBoundingBoxLimits(bbox, None)) command = JobCommand.infer_mitochondria commandArgs = Json.obj( - "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "organization_id" -> dataset._organization, + "dataset_name" -> dataset.name, + "dataset_directory_name" -> dataset.directoryName, "new_dataset_name" -> newDatasetName, "layer_name" -> layerName, "bbox" -> bbox, @@ -285,26 +282,26 @@ class JobController @Inject()( } } - def runAlignSectionsJob(organizationId: String, - datasetName: String, + def runAlignSectionsJob(datasetId: String, layerName: String, newDatasetName: String, annotationId: Option[String] = None): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.alignSections.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) _ <- Fox.runOptional(annotationId)(ObjectId.fromString) command = JobCommand.align_sections commandArgs = Json.obj( - "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "organization_id" -> organization._id, + "dataset_name" -> dataset.name, + "dataset_directory_name" -> dataset.directoryName, "new_dataset_name" -> newDatasetName, "layer_name" -> layerName, "annotation_id" -> annotationId @@ -315,8 +312,7 @@ class JobController @Inject()( } } - def runExportTiffJob(organizationId: String, - datasetName: String, + def runExportTiffJob(datasetId: String, bbox: String, additionalCoordinates: Option[String], layerName: Option[String], @@ -327,9 +323,12 @@ class JobController @Inject()( sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> Messages( + "organization.notFound", + dataset._organization) + _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.meshFile.notAllowed.organization" ~> FORBIDDEN _ <- Fox.runOptional(layerName)(datasetService.assertValidLayerNameLax) _ <- Fox.runOptional(annotationLayerName)(datasetService.assertValidLayerNameLax) _ <- jobService.assertBoundingBoxLimits(bbox, mag) @@ -349,12 +348,13 @@ class JobController @Inject()( ndBoundingBox = NDBoundingBox(threeDBBox, additionalAxesOfNdBBox.getOrElse(Seq.empty), axisOrder) command = JobCommand.export_tiff exportFileName = if (asOmeTiff) - s"${formatDateForFilename(new Date())}__${datasetName}__${annotationLayerName.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.ome.tif" + s"${formatDateForFilename(new Date())}__${dataset.name}__${annotationLayerName.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.ome.tif" else - s"${formatDateForFilename(new Date())}__${datasetName}__${annotationLayerName.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.zip" + s"${formatDateForFilename(new Date())}__${dataset.name}__${annotationLayerName.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.zip" commandArgs = Json.obj( - "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "dataset_directory_name" -> dataset.directoryName, + "organization_id" -> organization._id, + "dataset_name" -> dataset.name, "nd_bbox" -> ndBoundingBox.toWkLibsDict, "export_file_name" -> exportFileName, "layer_name" -> layerName, @@ -368,8 +368,7 @@ class JobController @Inject()( } } - def runMaterializeVolumeAnnotationJob(organizationId: String, - datasetName: String, + def runMaterializeVolumeAnnotationJob(datasetId: String, fallbackLayerName: String, annotationId: String, annotationType: String, @@ -380,20 +379,20 @@ class JobController @Inject()( sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> Messages( "organization.notFound", - organizationId) + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.materializeVolumeAnnotation.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND _ <- datasetService.assertValidLayerNameLax(fallbackLayerName) command = JobCommand.materialize_volume_annotation _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(outputSegmentationLayerName) commandArgs = Json.obj( - "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "organization_id" -> organization._id, + "dataset_name" -> dataset.name, + "dataset_directory_name" -> dataset.directoryName, "fallback_layer_name" -> fallbackLayerName, "annotation_id" -> annotationId, "output_segmentation_layer_name" -> outputSegmentationLayerName, @@ -408,20 +407,21 @@ class JobController @Inject()( } } - def runFindLargestSegmentIdJob(organizationId: String, datasetName: String, layerName: String): Action[AnyContent] = + def runFindLargestSegmentIdJob(datasetId: String, layerName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.findLargestSegmentId.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.find_largest_segment_id commandArgs = Json.obj( - "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "organization_id" -> organization._id, + "dataset_name" -> dataset.name, + "dataset_directory_name" -> dataset.directoryName, "layer_name" -> layerName ) job <- jobService.submitJob(command, commandArgs, request.identity, dataset._dataStore) ?~> "job.couldNotRunFindLargestSegmentId" @@ -430,16 +430,16 @@ class JobController @Inject()( } } - def runRenderAnimationJob(organizationId: String, datasetName: String): Action[AnimationJobOptions] = + def runRenderAnimationJob(datasetId: String): Action[AnimationJobOptions] = sil.SecuredAction.async(validateJson[AnimationJobOptions]) { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) - userOrganization <- organizationDAO.findOne(request.identity._organization) + datasetIdValidated <- ObjectId.fromString(datasetId) + dataset <- datasetDAO.findOne(datasetIdValidated) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.renderAnimation.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND + userOrganization <- organizationDAO.findOne(request.identity._organization) animationJobOptions = request.body _ <- Fox.runIf(userOrganization.pricingPlan == PricingPlan.Basic) { bool2Fox(animationJobOptions.includeWatermark) ?~> "job.renderAnimation.mustIncludeWatermark" @@ -449,11 +449,12 @@ class JobController @Inject()( } layerName = animationJobOptions.layerName _ <- datasetService.assertValidLayerNameLax(layerName) - exportFileName = s"webknossos_animation_${formatDateForFilename(new Date())}__${datasetName}__$layerName.mp4" + exportFileName = s"webknossos_animation_${formatDateForFilename(new Date())}__${dataset.name}__$layerName.mp4" command = JobCommand.render_animation commandArgs = Json.obj( - "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "organization_id" -> organization._id, + "dataset_name" -> dataset.name, + "dataset_directory_name" -> dataset.directoryName, "export_file_name" -> exportFileName, "layer_name" -> animationJobOptions.layerName, "bounding_box" -> animationJobOptions.boundingBox.toLiteral, diff --git a/app/controllers/LegacyApiController.scala b/app/controllers/LegacyApiController.scala index 418e8285bac..a5f29aba38a 100644 --- a/app/controllers/LegacyApiController.scala +++ b/app/controllers/LegacyApiController.scala @@ -6,7 +6,7 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType} import com.scalableminds.webknossos.tracingstore.tracings.volume.MagRestrictions -import models.dataset.DatasetService +import models.dataset.{DatasetDAO, DatasetService} import models.organization.OrganizationDAO import javax.inject.Inject @@ -18,7 +18,7 @@ import play.api.http.HttpEntity import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers, Result} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext @@ -38,6 +38,7 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, organizationDAO: OrganizationDAO, datasetService: DatasetService, taskDAO: TaskDAO, + datasetDAO: DatasetDAO, taskService: TaskService, sil: Silhouette[WkEnv])(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller { @@ -92,7 +93,8 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, def readDatasetV6(organizationName: String, datasetName: String, sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => for { - result <- datasetController.read(organizationName, datasetName, sharingToken)(request) + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationName) + result <- datasetController.read(dataset._id.toString, sharingToken)(request) adaptedResult <- replaceInResult(replaceVoxelSize)(result) } yield adaptedResult } @@ -103,10 +105,44 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, organization <- organizationDAO.findOne(organizationName) // the old organizationName is now the organization id _ <- bool2Fox(organization._id == request.identity._organization) ~> FORBIDDEN _ <- datasetService.assertValidDatasetName(datasetName) - _ <- datasetService.assertNewDatasetName(datasetName, organization._id) ?~> "dataset.name.alreadyTaken" } yield Ok } + def updateDatasetV8(organizationId: String, datasetName: String): Action[JsValue] = + sil.SecuredAction.async(parse.json) { implicit request => + for { + _ <- Fox.successful(logVersioned(request)) + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + result <- datasetController.update(dataset._id.toString)(request) + } yield result + } + + def getDatasetSharingTokenV8(organizationId: String, datasetName: String): Action[AnyContent] = + sil.SecuredAction.async { implicit request => + for { + _ <- Fox.successful(logVersioned(request)) + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + sharingToken <- datasetController.getSharingToken(dataset._id.toString)(request) + } yield sharingToken + } + + def updateDatasetTeamsV8(organizationId: String, datasetName: String): Action[List[ObjectId]] = + sil.SecuredAction.async(validateJson[List[ObjectId]]) { implicit request => + for { + _ <- Fox.successful(logVersioned(request)) + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + result <- datasetController.updateTeams(dataset._id.toString)(request) + } yield result + } + + def readDatasetV8(organizationId: String, datasetName: String, sharingToken: Option[String]): Action[AnyContent] = + sil.UserAwareAction.async { implicit request => + for { + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + result <- datasetController.read(dataset._id.toString, sharingToken)(request) + } yield result + } + /* to provide v4 - replace new annotation layers by old tracing ids (changed in v5) */ @@ -234,7 +270,8 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, sil.SecuredAction.async(validateJson[LegacyCreateExplorationalParameters]) { implicit request => for { _ <- Fox.successful(logVersioned(request)) - result <- annotationController.createExplorational(organizationName, datasetName)( + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationName) + result <- annotationController.createExplorational(dataset._id.toString)( request.withBody(replaceCreateExplorationalParameters(request))) adaptedResult <- replaceInResult(replaceAnnotationLayers)(result) } yield adaptedResult diff --git a/app/controllers/MaintenanceController.scala b/app/controllers/MaintenanceController.scala index b1d6fb78150..cbe7ee820c9 100644 --- a/app/controllers/MaintenanceController.scala +++ b/app/controllers/MaintenanceController.scala @@ -7,7 +7,7 @@ import models.user.UserService import play.api.libs.json.{JsObject, Json, OFormat} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/controllers/MeshController.scala b/app/controllers/MeshController.scala index 66c81715698..3eddfb41ba7 100644 --- a/app/controllers/MeshController.scala +++ b/app/controllers/MeshController.scala @@ -6,7 +6,7 @@ import models.annotation.AnnotationDAO import models.mesh.{MeshDAO, MeshInfo, MeshInfoParameters, MeshService} import play.api.mvc.{Action, AnyContent, PlayBodyParsers, RawBuffer} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/ProjectController.scala b/app/controllers/ProjectController.scala index 74897a7f327..c96f27d642f 100644 --- a/app/controllers/ProjectController.scala +++ b/app/controllers/ProjectController.scala @@ -13,7 +13,7 @@ import play.api.i18n.Messages import play.api.libs.json.{JsValue, Json} import play.api.mvc.{Action, AnyContent} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/controllers/PublicationController.scala b/app/controllers/PublicationController.scala index 2bd629a4855..049d2cc4f6a 100755 --- a/app/controllers/PublicationController.scala +++ b/app/controllers/PublicationController.scala @@ -8,7 +8,7 @@ import models.dataset.{PublicationDAO, PublicationService} import play.api.libs.json._ import play.api.mvc.{Action, AnyContent} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/ReportController.scala b/app/controllers/ReportController.scala index a75f886464e..43a89df57cb 100644 --- a/app/controllers/ReportController.scala +++ b/app/controllers/ReportController.scala @@ -8,7 +8,7 @@ import models.user.{User, UserDAO, UserService} import play.api.libs.json.{Json, OFormat} import play.api.mvc.{Action, AnyContent} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SimpleSQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/controllers/ScriptController.scala b/app/controllers/ScriptController.scala index c0175ac8420..91f7821900c 100644 --- a/app/controllers/ScriptController.scala +++ b/app/controllers/ScriptController.scala @@ -10,7 +10,7 @@ import play.api.libs.json.Reads._ import play.api.libs.json._ import play.api.mvc.{Action, AnyContent} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/controllers/ShortLinkController.scala b/app/controllers/ShortLinkController.scala index 5c0194cc818..37d2900768f 100644 --- a/app/controllers/ShortLinkController.scala +++ b/app/controllers/ShortLinkController.scala @@ -1,12 +1,13 @@ package controllers +import com.scalableminds.util.objectid.ObjectId import play.silhouette.api.Silhouette import com.scalableminds.util.tools.FoxImplicits import models.shortlinks.{ShortLink, ShortLinkDAO} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{RandomIDGenerator, WkEnv} -import utils.{ObjectId, WkConf} +import utils.WkConf import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/TaskController.scala b/app/controllers/TaskController.scala index d5e48d8919a..53cdccc6ef2 100755 --- a/app/controllers/TaskController.scala +++ b/app/controllers/TaskController.scala @@ -9,7 +9,7 @@ import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import javax.inject.Inject -import models.annotation.{AnnotationUploadService, _} +import models.annotation._ import models.annotation.nml.NmlResults.TracingBoxContainer import models.project.ProjectDAO import models.task._ @@ -19,7 +19,7 @@ import play.api.i18n.Messages import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext @@ -47,10 +47,11 @@ class TaskController @Inject()(taskCreationService: TaskCreationService, implicit request => for { _ <- taskCreationService.assertBatchLimit(request.body.length, request.body.map(_.taskTypeId)) - taskParameters <- taskCreationService.createTracingsFromBaseAnnotations(request.body, + taskParameters <- taskCreationService.fillMissingDatasetIds(request.body, request.identity._organization) + taskParameters <- taskCreationService.createTracingsFromBaseAnnotations(taskParameters, request.identity._organization) - skeletonBaseOpts: List[Option[SkeletonTracing]] <- taskCreationService.createTaskSkeletonTracingBases( - taskParameters) + skeletonBaseOpts: List[Option[SkeletonTracing]] <- taskCreationService + .createTaskSkeletonTracingBases(taskParameters, request.identity._organization) volumeBaseOpts: List[Option[(VolumeTracing, Option[File])]] <- taskCreationService .createTaskVolumeTracingBases(taskParameters, request.identity._organization) paramsWithTracings = taskParameters.lazyZip(skeletonBaseOpts).lazyZip(volumeBaseOpts).map { @@ -86,13 +87,12 @@ class TaskController @Inject()(taskCreationService: TaskCreationService, project <- projectDAO .findOneByNameAndOrganization(params.projectName, request.identity._organization) ?~> "project.notFound" ~> NOT_FOUND _ <- Fox.assertTrue(userService.isTeamManagerOrAdminOf(request.identity, project._team)) - extractedFiles = nmlService.extractFromFiles(inputFiles.map(f => (f.ref.path.toFile, f.filename)), - useZipName = false, - isTaskUpload = true) + extractedFiles <- nmlService.extractFromFiles(inputFiles.map(f => (f.ref.path.toFile, f.filename)), + SharedParsingParameters(useZipName = false, isTaskUpload = true)) extractedTracingBoxesRaw: List[TracingBoxContainer] = extractedFiles.toBoxes - extractedTracingBoxes: List[TracingBoxContainer] <- taskCreationService - .addVolumeFallbackBoundingBoxes(extractedTracingBoxesRaw, request.identity._organization) - fullParams: List[Box[TaskParameters]] = taskCreationService.buildFullParamsFromFiles(params, + extractedTracingBoxes: List[TracingBoxContainer] <- taskCreationService.addVolumeFallbackBoundingBoxes( + extractedTracingBoxesRaw) + fullParams: List[Box[TaskParametersWithDatasetId]] = taskCreationService.buildFullParamsFromFiles(params, extractedTracingBoxes) (skeletonBases, volumeBases) <- taskCreationService.fillInMissingTracings( extractedTracingBoxes.map(_.skeleton), diff --git a/app/controllers/TaskTypeController.scala b/app/controllers/TaskTypeController.scala index 1f2e2831d3e..02cb856d80b 100755 --- a/app/controllers/TaskTypeController.scala +++ b/app/controllers/TaskTypeController.scala @@ -11,7 +11,7 @@ import play.api.i18n.Messages import play.api.libs.functional.syntax._ import play.api.libs.json.Reads._ import play.api.libs.json._ -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import play.api.mvc.{Action, AnyContent} import security.WkEnv diff --git a/app/controllers/TeamController.scala b/app/controllers/TeamController.scala index 38d5ad24a5d..47e22bec899 100755 --- a/app/controllers/TeamController.scala +++ b/app/controllers/TeamController.scala @@ -9,7 +9,7 @@ import play.api.i18n.Messages import play.api.libs.json._ import play.api.mvc.{Action, AnyContent} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/TimeController.scala b/app/controllers/TimeController.scala index 2499cf0df98..95216464d5a 100644 --- a/app/controllers/TimeController.scala +++ b/app/controllers/TimeController.scala @@ -13,7 +13,7 @@ import net.liftweb.common.Box import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext import scala.concurrent.duration.Duration diff --git a/app/controllers/UserController.scala b/app/controllers/UserController.scala index 6fbd048f64c..a3625ecaaf7 100755 --- a/app/controllers/UserController.scala +++ b/app/controllers/UserController.scala @@ -13,7 +13,7 @@ import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.functional.syntax._ import play.api.libs.json._ import play.api.mvc._ -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import models.user.Theme.Theme diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index 83dd2c2175b..52e0b24207b 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -2,6 +2,7 @@ package controllers import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.services.AccessMode.AccessMode @@ -23,7 +24,7 @@ import net.liftweb.common.{Box, Full} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers, Result} import security.{RandomIDGenerator, URLSharing, WkEnv, WkSilhouetteEnvironment} -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.concurrent.ExecutionContext @@ -97,9 +98,9 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, case AccessResourceType.datasource => handleDataSourceAccess(accessRequest.resourceId, accessRequest.mode, userBox)(sharingTokenAccessCtx) case AccessResourceType.tracing => - handleTracingAccess(accessRequest.resourceId.name, accessRequest.mode, userBox, token) + handleTracingAccess(accessRequest.resourceId.directoryName, accessRequest.mode, userBox, token) case AccessResourceType.jobExport => - handleJobExportAccess(accessRequest.resourceId.name, accessRequest.mode, userBox) + handleJobExportAccess(accessRequest.resourceId.directoryName, accessRequest.mode, userBox) case _ => Fox.successful(UserAccessAnswer(granted = false, Some("Invalid access token."))) } @@ -113,7 +114,7 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, def tryRead: Fox[UserAccessAnswer] = for { - dataSourceBox <- datasetDAO.findOneByNameAndOrganization(dataSourceId.name, dataSourceId.team).futureBox + dataSourceBox <- datasetDAO.findOneByDataSourceId(dataSourceId).futureBox } yield dataSourceBox match { case Full(_) => UserAccessAnswer(granted = true) @@ -122,7 +123,7 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, def tryWrite: Fox[UserAccessAnswer] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(dataSourceId.name, dataSourceId.team) ?~> "datasource.notFound" + dataset <- datasetDAO.findOneByDataSourceId(dataSourceId) ?~> "datasource.notFound" user <- userBox.toFox ?~> "auth.token.noUser" isAllowed <- datasetService.isEditableBy(dataset, Some(user)) } yield UserAccessAnswer(isAllowed) @@ -132,9 +133,9 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, case Full(user) => for { // if dataSourceId is empty, the request asks if the user may administrate in *any* (i.e. their own) organization - relevantOrganization <- if (dataSourceId.team.isEmpty) + relevantOrganization <- if (dataSourceId.organizationId.isEmpty) Fox.successful(user._organization) - else organizationDAO.findOne(dataSourceId.team).map(_._id) + else organizationDAO.findOne(dataSourceId.organizationId).map(_._id) isTeamManagerOrAdmin <- userService.isTeamManagerOrAdminOfOrg(user, relevantOrganization) } yield UserAccessAnswer(isTeamManagerOrAdmin || user.isDatasetManager) case _ => Fox.successful(UserAccessAnswer(granted = false, Some("invalid access token"))) @@ -143,7 +144,7 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, def tryDelete: Fox[UserAccessAnswer] = for { _ <- bool2Fox(conf.Features.allowDeleteDatasets) ?~> "dataset.delete.disabled" - dataset <- datasetDAO.findOneByNameAndOrganization(dataSourceId.name, dataSourceId.team)(GlobalAccessContext) ?~> "datasource.notFound" + dataset <- datasetDAO.findOneByDataSourceId(dataSourceId)(GlobalAccessContext) ?~> "datasource.notFound" user <- userBox.toFox ?~> "auth.token.noUser" } yield UserAccessAnswer(user._organization == dataset._organization && user.isAdmin) diff --git a/app/controllers/VoxelyticsController.scala b/app/controllers/VoxelyticsController.scala index 0400c317771..6ee4b6d6220 100644 --- a/app/controllers/VoxelyticsController.scala +++ b/app/controllers/VoxelyticsController.scala @@ -1,5 +1,6 @@ package controllers +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import models.organization.OrganizationDAO @@ -10,7 +11,7 @@ import play.api.mvc._ import play.silhouette.api.Silhouette import play.silhouette.api.actions.SecuredRequest import security.WkEnv -import utils.{ObjectId, WkConf} +import utils.WkConf import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 085465c3207..fd8d35859b3 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -1,6 +1,7 @@ package controllers import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, GlobalAccessContext} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.controllers.JobExportProperties import com.scalableminds.webknossos.datastore.models.UnfinishedUpload @@ -28,7 +29,7 @@ import play.api.libs.json.{JsError, JsSuccess, JsValue, Json} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{WebknossosBearerTokenAuthenticatorService, WkSilhouetteEnvironment} import telemetry.SlackNotificationService -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.concurrent.{ExecutionContext, Future} @@ -72,16 +73,20 @@ class WKRemoteDataStoreController @Inject()( bool2Fox(usedStorageBytes <= includedStorage)) ?~> "dataset.upload.storageExceeded" ~> FORBIDDEN _ <- bool2Fox(organization._id == user._organization) ?~> "notAllowed" ~> FORBIDDEN _ <- datasetService.assertValidDatasetName(uploadInfo.name) - _ <- datasetService.assertNewDatasetName(uploadInfo.name, organization._id) ?~> "dataset.name.alreadyTaken" _ <- bool2Fox(dataStore.onlyAllowedOrganization.forall(_ == organization._id)) ?~> "dataset.upload.Datastore.restricted" folderId <- ObjectId.fromString(uploadInfo.folderId.getOrElse(organization._rootFolder.toString)) ?~> "dataset.upload.folderId.invalid" _ <- folderDAO.assertUpdateAccess(folderId)(AuthorizedAccessContext(user)) ?~> "folder.noWriteAccess" - _ <- Fox.serialCombined(uploadInfo.layersToLink.getOrElse(List.empty))(l => validateLayerToLink(l, user)) ?~> "dataset.upload.invalidLinkedLayers" + layersToLinkWithDatasetId <- Fox.serialCombined(uploadInfo.layersToLink.getOrElse(List.empty))(l => + validateLayerToLink(l, user)) ?~> "dataset.upload.invalidLinkedLayers" dataset <- datasetService.createPreliminaryDataset(uploadInfo.name, uploadInfo.organization, dataStore) ?~> "dataset.name.alreadyTaken" _ <- datasetDAO.updateFolder(dataset._id, folderId)(GlobalAccessContext) _ <- datasetService.addInitialTeams(dataset, uploadInfo.initialTeams, user)(AuthorizedAccessContext(user)) _ <- datasetService.addUploader(dataset, user._id)(AuthorizedAccessContext(user)) - } yield Ok + // Update newDatasetId and directoryName according to the newly created dataset. + updatedInfo = uploadInfo.copy(newDatasetId = dataset._id.toString, + directoryName = dataset.directoryName, + layersToLink = Some(layersToLinkWithDatasetId)) + } yield Ok(Json.toJson(updatedInfo)) } } @@ -104,6 +109,7 @@ class WKRemoteDataStoreController @Inject()( case (d, teamIds) => new UnfinishedUpload("", d.dataSourceId, + d.name, d.folderId.toString, d.created, None, // Filled by datastore. @@ -113,8 +119,9 @@ class WKRemoteDataStoreController @Inject()( } } - private def validateLayerToLink(layerIdentifier: LinkedLayerIdentifier, - requestingUser: User)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Unit] = + private def validateLayerToLink(layerIdentifier: LinkedLayerIdentifier, requestingUser: User)( + implicit ec: ExecutionContext, + m: MessagesProvider): Fox[LinkedLayerIdentifier] = for { organization <- organizationDAO.findOne(layerIdentifier.getOrganizationId)(GlobalAccessContext) ?~> Messages( "organization.notFound", @@ -123,12 +130,12 @@ class WKRemoteDataStoreController @Inject()( AuthorizedAccessContext(requestingUser)) ?~> Messages("dataset.notFound", layerIdentifier.dataSetName) isTeamManagerOrAdmin <- userService.isTeamManagerOrAdminOfOrg(requestingUser, dataset._organization) _ <- Fox.bool2Fox(isTeamManagerOrAdmin || requestingUser.isDatasetManager || dataset.isPublic) ?~> "dataset.upload.linkRestricted" - } yield () + } yield layerIdentifier.copy(datasetDirectoryName = Some(dataset.directoryName)) def reportDatasetUpload(name: String, key: String, token: String, - datasetName: String, + datasetDirectoryName: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean): Action[AnyContent] = @@ -136,22 +143,21 @@ class WKRemoteDataStoreController @Inject()( dataStoreService.validateAccess(name, key) { dataStore => for { user <- bearerTokenService.userForToken(token) - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, user._organization)(GlobalAccessContext) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND + dataset <- datasetDAO.findOneByDirectoryNameAndOrganization(datasetDirectoryName, user._organization)( + GlobalAccessContext) ?~> Messages("dataset.notFound", datasetDirectoryName) ~> NOT_FOUND _ <- Fox.runIf(!needsConversion && !viaAddRoute)(usedStorageService.refreshStorageReportForDataset(dataset)) - _ <- Fox.runIf(!needsConversion)(logUploadToSlack(user, datasetName, viaAddRoute)) + _ <- Fox.runIf(!needsConversion)(logUploadToSlack(user, dataset._id, viaAddRoute)) _ = analyticsService.track(UploadDatasetEvent(user, dataset, dataStore, datasetSizeBytes)) _ = if (!needsConversion) mailchimpClient.tagUser(user, MailchimpTag.HasUploadedOwnDataset) - } yield Ok + } yield Ok(Json.obj("id" -> dataset._id)) } } - private def logUploadToSlack(user: User, datasetName: String, viaAddRoute: Boolean): Fox[Unit] = + private def logUploadToSlack(user: User, datasetId: ObjectId, viaAddRoute: Boolean): Fox[Unit] = for { organization <- organizationDAO.findOne(user._organization)(GlobalAccessContext) multiUser <- multiUserDAO.findOne(user._multiUser)(GlobalAccessContext) - resultLink = s"${conf.Http.uri}/datasets/${organization._id}/$datasetName" + resultLink = s"${conf.Http.uri}/datasets/$datasetId" addLabel = if (viaAddRoute) "(via explore+add)" else "(upload without conversion)" superUserLabel = if (multiUser.isSuperUser) " (for superuser)" else "" _ = slackNotificationService.info(s"Dataset added $addLabel$superUserLabel", @@ -216,9 +222,7 @@ class WKRemoteDataStoreController @Inject()( dataStoreService.validateAccess(name, key) { _ => for { datasourceId <- request.body.validate[DataSourceId].asOpt.toFox ?~> "dataStore.upload.invalid" - existingDataset = datasetDAO - .findOneByNameAndOrganization(datasourceId.name, datasourceId.team)(GlobalAccessContext) - .futureBox + existingDataset = datasetDAO.findOneByDataSourceId(datasourceId)(GlobalAccessContext).futureBox _ <- existingDataset.flatMap { case Full(dataset) => diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 99b14f82c45..26d5c7d5f53 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -109,24 +109,24 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore annotation <- annotationInformationProvider.annotationForTracing(tracingId) ?~> s"No annotation for tracing $tracingId" dataset <- datasetDAO.findOne(annotation._dataset) organization <- organizationDAO.findOne(dataset._organization) - } yield Ok(Json.toJson(DataSourceId(dataset.name, organization._id))) + } yield Ok(Json.toJson(DataSourceId(dataset.directoryName, organization._id))) } } def dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], - datasetName: String): Action[AnyContent] = + datasetDirectory: String): Action[AnyContent] = Action.async { implicit request => tracingStoreService.validateAccess(name, key) { _ => implicit val ctx: DBAccessContext = GlobalAccessContext for { organizationIdWithFallback <- Fox.fillOption(organizationId) { - datasetDAO.getOrganizationIdForDataset(datasetName)(GlobalAccessContext) - } ?~> Messages("dataset.noAccess", datasetName) ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationIdWithFallback) ?~> Messages( + datasetDAO.getOrganizationIdForDataset(datasetDirectory)(GlobalAccessContext) + } ?~> Messages("dataset.noAccess", datasetDirectory) ~> FORBIDDEN + dataset <- datasetDAO.findOneByDirectoryNameAndOrganization(datasetDirectory, organizationIdWithFallback) ?~> Messages( "dataset.noAccess", - datasetName) ~> FORBIDDEN + datasetDirectory) ~> FORBIDDEN dataStore <- datasetService.dataStoreFor(dataset) } yield Ok(Json.toJson(dataStore.url)) } diff --git a/app/controllers/WKRemoteWorkerController.scala b/app/controllers/WKRemoteWorkerController.scala index 8df45323cba..b9edd6bce67 100644 --- a/app/controllers/WKRemoteWorkerController.scala +++ b/app/controllers/WKRemoteWorkerController.scala @@ -1,6 +1,7 @@ package controllers import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import models.aimodels.AiInferenceDAO import models.dataset.DatasetDAO @@ -12,7 +13,7 @@ import models.voxelytics.VoxelyticsDAO import net.liftweb.common.{Empty, Failure, Full} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.concurrent.ExecutionContext @@ -109,7 +110,7 @@ class WKRemoteWorkerController @Inject()(jobDAO: JobDAO, _ <- workerDAO.findOneByKey(key) ?~> "jobs.worker.notFound" jobIdParsed <- ObjectId.fromString(id) organizationId <- jobDAO.organizationIdForJobId(jobIdParsed) ?~> "job.notFound" - dataset <- datasetDAO.findOneByNameAndOrganization(request.body, organizationId) + dataset <- datasetDAO.findOneByDirectoryNameAndOrganization(request.body, organizationId) aiInference <- aiInferenceDAO.findOneByJobId(jobIdParsed) ?~> "aiInference.notFound" _ <- aiInferenceDAO.updateDataset(aiInference._id, dataset._id) } yield Ok diff --git a/app/models/aimodels/AiInference.scala b/app/models/aimodels/AiInference.scala index d7595b68e54..d40f846db7b 100644 --- a/app/models/aimodels/AiInference.scala +++ b/app/models/aimodels/AiInference.scala @@ -10,7 +10,7 @@ import models.job.{JobDAO, JobService} import models.user.{User, UserDAO, UserService} import play.api.libs.json.{JsObject, Json} import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/aimodels/AiModel.scala b/app/models/aimodels/AiModel.scala index 5857f85e63d..a58f5dbba8b 100644 --- a/app/models/aimodels/AiModel.scala +++ b/app/models/aimodels/AiModel.scala @@ -13,7 +13,7 @@ import slick.dbio.{DBIO, Effect, NoStream} import slick.jdbc.PostgresProfile.api._ import slick.lifted.Rep import slick.sql.SqlAction -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/analytics/AnalyticsDAO.scala b/app/models/analytics/AnalyticsDAO.scala index 4fa65f8a3d8..b7ea0ee2a6d 100644 --- a/app/models/analytics/AnalyticsDAO.scala +++ b/app/models/analytics/AnalyticsDAO.scala @@ -1,7 +1,7 @@ package models.analytics import com.scalableminds.util.tools.Fox -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SimpleSQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/analytics/AnalyticsEvent.scala b/app/models/analytics/AnalyticsEvent.scala index cab884c9e6a..8871850cc70 100644 --- a/app/models/analytics/AnalyticsEvent.scala +++ b/app/models/analytics/AnalyticsEvent.scala @@ -8,7 +8,7 @@ import models.job.JobCommand.JobCommand import models.organization.Organization import models.user.User import play.api.libs.json._ -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/analytics/AnalyticsService.scala b/app/models/analytics/AnalyticsService.scala index 86b2cec45c1..5df8c4a2cf8 100644 --- a/app/models/analytics/AnalyticsService.scala +++ b/app/models/analytics/AnalyticsService.scala @@ -1,6 +1,7 @@ package models.analytics import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox} import com.scalableminds.webknossos.datastore.rpc.RPC @@ -9,7 +10,7 @@ import models.user.{MultiUserDAO, UserDAO} import net.liftweb.common.Box.tryo import play.api.http.Status.UNAUTHORIZED import play.api.libs.json._ -import utils.{ObjectId, WkConf} +import utils.WkConf import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/Annotation.scala b/app/models/annotation/Annotation.scala index 887bf7439c9..227c5de7881 100755 --- a/app/models/annotation/Annotation.scala +++ b/app/models/annotation/Annotation.scala @@ -15,7 +15,7 @@ import slick.jdbc.GetResult import slick.jdbc.TransactionIsolation.Serializable import slick.lifted.Rep import slick.sql.SqlAction -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SQLDAO, SimpleSQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/annotation/AnnotationIdentifier.scala b/app/models/annotation/AnnotationIdentifier.scala index 3b555e4fa9b..91cb054d2cd 100644 --- a/app/models/annotation/AnnotationIdentifier.scala +++ b/app/models/annotation/AnnotationIdentifier.scala @@ -2,7 +2,7 @@ package models.annotation import com.scalableminds.util.tools.{Fox, FoxImplicits} import models.annotation.AnnotationType.AnnotationType -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/AnnotationInformationProvider.scala b/app/models/annotation/AnnotationInformationProvider.scala index 61a6561ffde..9d7e4dd35f9 100755 --- a/app/models/annotation/AnnotationInformationProvider.scala +++ b/app/models/annotation/AnnotationInformationProvider.scala @@ -8,7 +8,7 @@ import models.annotation.AnnotationType.AnnotationType import models.annotation.handler.AnnotationInformationHandlerSelector import models.user.User import net.liftweb.common.Full -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/AnnotationMerger.scala b/app/models/annotation/AnnotationMerger.scala index 2c251f01462..48aedf4a9da 100644 --- a/app/models/annotation/AnnotationMerger.scala +++ b/app/models/annotation/AnnotationMerger.scala @@ -13,7 +13,7 @@ import javax.inject.Inject import models.annotation.AnnotationType.AnnotationType import models.dataset.DatasetDAO import models.user.User -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/AnnotationMutexService.scala b/app/models/annotation/AnnotationMutexService.scala index 6495b1537cc..35d26f681f1 100644 --- a/app/models/annotation/AnnotationMutexService.scala +++ b/app/models/annotation/AnnotationMutexService.scala @@ -2,6 +2,7 @@ package models.annotation import org.apache.pekko.actor.ActorSystem import com.scalableminds.util.accesscontext.GlobalAccessContext +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.helpers.IntervalScheduler @@ -11,7 +12,7 @@ import models.user.{UserDAO, UserService} import net.liftweb.common.Full import play.api.inject.ApplicationLifecycle import play.api.libs.json.{JsObject, Json} -import utils.{ObjectId, WkConf} +import utils.WkConf import utils.sql.{SimpleSQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/models/annotation/AnnotationPrivateLink.scala b/app/models/annotation/AnnotationPrivateLink.scala index 5370f7add64..62347d307d2 100644 --- a/app/models/annotation/AnnotationPrivateLink.scala +++ b/app/models/annotation/AnnotationPrivateLink.scala @@ -7,7 +7,7 @@ import com.scalableminds.webknossos.schema.Tables._ import play.api.libs.json.{JsValue, Json, OFormat} import security.RandomIDGenerator import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 05020dc622e..67630db4d13 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -5,6 +5,7 @@ import org.apache.pekko.stream.Materializer import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext} import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits, TextUtils} import com.scalableminds.webknossos.datastore.SkeletonTracing._ @@ -56,7 +57,7 @@ import net.liftweb.common.{Box, Full} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator} import play.api.libs.json.{JsNull, JsObject, JsValue, Json} -import utils.{ObjectId, WkConf} +import utils.WkConf import java.io.{BufferedOutputStream, File, FileOutputStream} import javax.inject.Inject @@ -73,7 +74,8 @@ case class DownloadAnnotation(skeletonTracingIdOpt: Option[String], user: User, taskOpt: Option[Task], organizationId: String, - datasetName: String) + datasetName: String, + datasetId: ObjectId) // Used to pass duplicate properties when creating a new tracing to avoid masking them. // Uses the proto-generated geometry classes, hence the full qualifiers. @@ -156,7 +158,7 @@ class AnnotationService @Inject()( remoteDatastoreClient = new WKRemoteDataStoreClient(datasetDataStore, rpc) fallbackLayerHasSegmentIndex <- fallbackLayer match { case Some(layer) => - remoteDatastoreClient.hasSegmentIndexFile(datasetOrganizationId, dataSource.id.name, layer.name) + remoteDatastoreClient.hasSegmentIndexFile(datasetOrganizationId, dataSource.id.directoryName, layer.name) case None => Fox.successful(false) } } yield @@ -164,7 +166,7 @@ class AnnotationService @Inject()( None, boundingBoxToProto(boundingBox.getOrElse(dataSource.boundingBox)), System.currentTimeMillis(), - dataSource.id.name, + dataSource.id.directoryName, vec3IntToProto(startPosition.getOrElse(dataSource.center)), vec3DoubleToProto(startRotation.getOrElse(vec3DoubleFromProto(VolumeTracingDefaults.editRotation))), elementClassToProto( @@ -287,7 +289,7 @@ class AnnotationService @Inject()( dataStore, fallbackLayer, magRestrictions = annotationLayerParameters.magRestrictions.getOrElse(MagRestrictions.empty), - mappingName = annotationLayerParameters.mappingName + mappingName = annotationLayerParameters.mappingName, ) volumeTracingAdapted = oldPrecedenceLayerProperties.map { p => volumeTracing.copy( @@ -391,7 +393,7 @@ class AnnotationService @Inject()( dataset <- datasetDAO.findOne(datasetId) ?~> "dataset.noAccessById" dataSource <- datasetService.dataSourceFor(dataset) datasetOrganization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> "organization.notFound" - usableDataSource <- dataSource.toUsable ?~> Messages("dataset.notImported", dataSource.id.name) + usableDataSource <- dataSource.toUsable ?~> Messages("dataset.notImported", dataSource.id.directoryName) annotationLayers <- createTracingsForExplorational(dataset, usableDataSource, annotationLayerParameters, @@ -523,10 +525,11 @@ class AnnotationService @Inject()( } yield result } - def createSkeletonTracingBase(datasetName: String, - boundingBox: Option[BoundingBox], - startPosition: Vec3Int, - startRotation: Vec3Double): SkeletonTracing = { + def createSkeletonTracingBase( + datasetId: ObjectId, + boundingBox: Option[BoundingBox], + startPosition: Vec3Int, + startRotation: Vec3Double)(implicit ctx: DBAccessContext): Fox[SkeletonTracing] = { val initialNode = NodeDefaults.createInstance.withId(1).withPosition(startPosition).withRotation(startRotation) val initialTree = Tree( 1, @@ -538,30 +541,30 @@ class AnnotationService @Inject()( "", System.currentTimeMillis() ) - SkeletonTracingDefaults.createInstance.copy( - datasetName = datasetName, - boundingBox = boundingBox.flatMap { box => - if (box.isEmpty) None else Some(box) - }, - editPosition = startPosition, - editRotation = startRotation, - activeNodeId = Some(1), - trees = Seq(initialTree) - ) + for { + dataset <- datasetDAO.findOne(datasetId) + } yield + SkeletonTracingDefaults.createInstance.copy( + datasetName = dataset.name, + boundingBox = boundingBox.flatMap { box => + if (box.isEmpty) None else Some(box) + }, + editPosition = startPosition, + editRotation = startRotation, + activeNodeId = Some(1), + trees = Seq(initialTree) + ) } def createVolumeTracingBase( - datasetName: String, - organizationId: String, + datasetId: ObjectId, boundingBox: Option[BoundingBox], startPosition: Vec3Int, startRotation: Vec3Double, volumeShowFallbackLayer: Boolean, magRestrictions: MagRestrictions)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[VolumeTracing] = for { - organization <- organizationDAO.findOne(organizationId) - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> Messages("dataset.notFound", - datasetName) + dataset <- datasetDAO.findOne(datasetId) ?~> Messages("dataset.notFound", datasetId) dataSource <- datasetService.dataSourceFor(dataset).flatMap(_.toUsable) dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) fallbackLayer = if (volumeShowFallbackLayer) { @@ -574,7 +577,7 @@ class AnnotationService @Inject()( volumeTracing <- createVolumeTracing( dataSource, - organization._id, + dataset._organization, dataStore, fallbackLayer = fallbackLayer, boundingBox = boundingBox.flatMap { box => @@ -663,7 +666,8 @@ class AnnotationService @Inject()( user, taskOpt, organizationId, - datasetName) => + datasetName, + datasetId) => for { fetchedAnnotationLayersForAnnotation <- FetchedAnnotationLayer.layersFromTracings(skeletonTracingIdOpt, volumeTracingIdOpt, @@ -678,6 +682,7 @@ class AnnotationService @Inject()( organizationId, conf.Http.uri, datasetName, + datasetId, Some(user), taskOpt, skipVolumeData, @@ -714,7 +719,8 @@ class AnnotationService @Inject()( user, taskOpt, organizationId, - dataset.name) + dataset.name, + dataset._id) def getSkeletonTracings(datasetId: ObjectId, tracingIds: List[Option[String]]): Fox[List[Option[SkeletonTracing]]] = for { @@ -908,6 +914,7 @@ class AnnotationService @Inject()( "stats" -> Json.obj(), // included for legacy parsers "restrictions" -> restrictionsJs, "annotationLayers" -> Json.toJson(annotation.annotationLayers), + "datasetId" -> dataset._id, "dataSetName" -> dataset.name, "organization" -> organization._id, "dataStore" -> dataStoreJs, @@ -953,7 +960,7 @@ class AnnotationService @Inject()( annotationSource = AnnotationSource( id = annotation.id, annotationLayers = annotation.annotationLayers, - datasetName = dataset.name, + datasetDirectoryName = dataset.directoryName, organizationId = organization._id, dataStoreUrl = dataStore.publicUrl, tracingStoreUrl = tracingStore.publicUrl, diff --git a/app/models/annotation/AnnotationUploadService.scala b/app/models/annotation/AnnotationUploadService.scala index bceaee9d59d..bf4814957a9 100644 --- a/app/models/annotation/AnnotationUploadService.scala +++ b/app/models/annotation/AnnotationUploadService.scala @@ -1,8 +1,11 @@ package models.annotation +import com.scalableminds.util.accesscontext.DBAccessContext + import java.io.{File, FileInputStream, InputStream} import java.nio.file.{Files, Path, StandardCopyOption} import com.scalableminds.util.io.ZipIO +import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, TreeGroup} import com.scalableminds.webknossos.datastore.VolumeTracing.{SegmentGroup, VolumeTracing} import com.typesafe.scalalogging.LazyLogging @@ -10,24 +13,29 @@ import files.TempFileService import javax.inject.Inject import models.annotation.nml.NmlResults._ -import models.annotation.nml.{NmlParser, NmlResults} +import models.annotation.nml.{NmlParseSuccessWithoutFile, NmlParser, NmlResults} import net.liftweb.common.{Box, Empty, Failure, Full} import net.liftweb.common.Box.tryo import play.api.i18n.MessagesProvider +import scala.concurrent.ExecutionContext + case class UploadedVolumeLayer(tracing: VolumeTracing, dataZipLocation: String, name: Option[String]) { def getDataZipFrom(otherFiles: Map[String, File]): Option[File] = otherFiles.get(dataZipLocation) } -class AnnotationUploadService @Inject()(tempFileService: TempFileService) extends LazyLogging { +case class SharedParsingParameters(useZipName: Boolean, + overwritingDatasetId: Option[String] = None, + isTaskUpload: Boolean = false) - private def extractFromNmlFile(file: File, - name: String, - overwritingDatasetName: Option[String], - overwritingOrganizationId: Option[String], - isTaskUpload: Boolean)(implicit m: MessagesProvider): NmlParseResult = - extractFromNml(new FileInputStream(file), name, overwritingDatasetName, overwritingOrganizationId, isTaskUpload) +class AnnotationUploadService @Inject()(tempFileService: TempFileService, nmlParser: NmlParser) extends LazyLogging { + + private def extractFromNmlFile(file: File, name: String, sharedParsingParameters: SharedParsingParameters)( + implicit m: MessagesProvider, + ec: ExecutionContext, + ctx: DBAccessContext): Fox[NmlParseResult] = + extractFromNml(new FileInputStream(file), name, sharedParsingParameters) private def formatChain(chain: Box[Failure]): String = chain match { case Full(failure) => @@ -37,44 +45,45 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend private def extractFromNml(inputStream: InputStream, name: String, - overwritingDatasetName: Option[String], - overwritingOrganizationId: Option[String], - isTaskUpload: Boolean, - basePath: Option[String] = None)(implicit m: MessagesProvider): NmlParseResult = - NmlParser.parse(name, inputStream, overwritingDatasetName, overwritingOrganizationId, isTaskUpload, basePath) match { - case Full((skeletonTracing, uploadedVolumeLayers, description, wkUrl)) => - NmlParseSuccess(name, skeletonTracing, uploadedVolumeLayers, description, wkUrl) + sharedParsingParameters: SharedParsingParameters, + basePath: Option[String] = None)(implicit m: MessagesProvider, + ec: ExecutionContext, + ctx: DBAccessContext): Fox[NmlParseResult] = { + val parserOutput = + nmlParser.parse( + name, + inputStream, + sharedParsingParameters.overwritingDatasetId, + sharedParsingParameters.isTaskUpload, + basePath + ) + parserOutput.futureBox.map { + case Full(NmlParseSuccessWithoutFile(skeletonTracingOpt, uploadedVolumeLayers, datasetId, description, wkUrl)) => + NmlParseSuccess(name, skeletonTracingOpt, uploadedVolumeLayers, datasetId, description, wkUrl) case Failure(msg, _, chain) => NmlParseFailure(name, msg + chain.map(_ => formatChain(chain)).getOrElse("")) case Empty => NmlParseEmpty(name) } + } - private def extractFromZip(file: File, - zipFileName: Option[String], - useZipName: Boolean, - overwritingDatasetName: Option[String], - overwritingOrganizationId: Option[String], - isTaskUpload: Boolean)(implicit m: MessagesProvider): MultiNmlParseResult = { + private def extractFromZip(file: File, zipFileName: Option[String], sharedParsingParameters: SharedParsingParameters, + )(implicit m: MessagesProvider, ec: ExecutionContext, ctx: DBAccessContext): Fox[MultiNmlParseResult] = { val name = zipFileName getOrElse file.getName var otherFiles = Map.empty[String, File] - var parseResults = List.empty[NmlParseResult] + var pendingResults = List.empty[Fox[NmlParseResult]] ZipIO.withUnziped(file) { (filename, inputStream) => if (filename.toString.endsWith(".nml")) { - val result = - extractFromNml(inputStream, - filename.toString, - overwritingDatasetName, - overwritingOrganizationId, - isTaskUpload, - Some(file.getPath)) - parseResults ::= (if (useZipName) result.withName(name) else result) + val parsedResult = for { + result <- extractFromNml(inputStream, filename.toString, sharedParsingParameters, Some(file.getPath)) + } yield if (sharedParsingParameters.useZipName) result.withName(name) else result + pendingResults ::= parsedResult } else { val tempFile: Path = tempFileService.create(file.getPath.replaceAll("/", "_") + filename.toString) Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING) otherFiles += (file.getPath + filename.toString -> tempFile.toFile) } } - MultiNmlParseResult(parseResults, otherFiles) + Fox.combined(pendingResults).map(parsedResults => MultiNmlParseResult(parsedResults, otherFiles)) } def wrapOrPrefixGroups(parseResults: List[NmlParseResult], @@ -94,8 +103,13 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend if (parseResults.length > 1) { parseResults.map { - case NmlParseSuccess(name, Some(skeletonTracing), uploadedVolumeLayers, description, wkUrl) => - NmlParseSuccess(name, Some(renameTrees(name, skeletonTracing)), uploadedVolumeLayers, description, wkUrl) + case NmlParseSuccess(name, Some(skeletonTracing), uploadedVolumeLayers, datasetId, description, wkUrl) => + NmlParseSuccess(name, + Some(renameTrees(name, skeletonTracing)), + uploadedVolumeLayers, + datasetId, + description, + wkUrl) case r => r } } else { @@ -131,68 +145,59 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend volumeLayers.map(v => v.copy(tracing = wrapSegmentsInGroup(name, v.tracing))) parseResults.map { - case NmlParseSuccess(name, Some(skeletonTracing), uploadedVolumeLayers, description, wkUrl) => + case NmlParseSuccess(name, Some(skeletonTracing), uploadedVolumeLayers, datasetId, description, wkUrl) => NmlParseSuccess(name, Some(wrapTreesInGroup(name, skeletonTracing)), wrapVolumeLayers(name, uploadedVolumeLayers), + datasetId, description, wkUrl) case r => r } } - def extractFromFiles(files: Seq[(File, String)], - useZipName: Boolean, - overwritingDatasetName: Option[String] = None, - overwritingOrganizationId: Option[String] = None, - isTaskUpload: Boolean = false)(implicit m: MessagesProvider): MultiNmlParseResult = - files.foldLeft(NmlResults.MultiNmlParseResult()) { - case (acc, (file, name)) => - if (name.endsWith(".zip")) + def extractFromFiles(files: Seq[(File, String)], sharedParams: SharedParsingParameters)( + implicit m: MessagesProvider, + ec: ExecutionContext, + ctx: DBAccessContext): Fox[MultiNmlParseResult] = + Fox.foldLeft(files.iterator, NmlResults.MultiNmlParseResult()) { + case (collectedResults, (file, name)) => + if (name.endsWith(".zip")) { tryo(new java.util.zip.ZipFile(file)).map(ZipIO.forallZipEntries(_)(_.getName.endsWith(".zip"))) match { case Full(allZips) => - if (allZips) - acc.combineWith( - extractFromFiles( - extractFromZip(file, - Some(name), - useZipName, - overwritingDatasetName, - overwritingOrganizationId, - isTaskUpload).otherFiles.toSeq.map(tuple => (tuple._2, tuple._1)), - useZipName, - overwritingDatasetName, - overwritingOrganizationId, - isTaskUpload - )) - else - acc.combineWith( - extractFromFile(file, - name, - useZipName, - overwritingDatasetName, - overwritingOrganizationId, - isTaskUpload)) - case _ => acc - } else - acc.combineWith( - extractFromFile(file, name, useZipName, overwritingDatasetName, overwritingOrganizationId, isTaskUpload)) + if (allZips) { + for { + parsedZipResult <- extractFromZip(file, Some(name), sharedParams) + otherFiles = parsedZipResult.otherFiles.toSeq.map(tuple => (tuple._2, tuple._1)) + parsedFileResults <- extractFromFiles(otherFiles, sharedParams) + } yield collectedResults.combineWith(parsedFileResults) + } else { + for { + parsedFile <- extractFromFile(file, name, sharedParams) + } yield collectedResults.combineWith(parsedFile) + } + case _ => Fox.successful(collectedResults) + } + } else { + for { + parsedFromFile <- extractFromFile(file, name, sharedParams) + } yield collectedResults.combineWith(parsedFromFile) + + } } - private def extractFromFile(file: File, - fileName: String, - useZipName: Boolean, - overwritingDatasetName: Option[String], - overwritingOrganizationId: Option[String], - isTaskUpload: Boolean)(implicit m: MessagesProvider): MultiNmlParseResult = + private def extractFromFile(file: File, fileName: String, sharedParsingParameters: SharedParsingParameters)( + implicit m: MessagesProvider, + ec: ExecutionContext, + ctx: DBAccessContext): Fox[MultiNmlParseResult] = if (fileName.endsWith(".zip")) { logger.trace("Extracting from Zip file") - extractFromZip(file, Some(fileName), useZipName, overwritingDatasetName, overwritingOrganizationId, isTaskUpload) + extractFromZip(file, Some(fileName), sharedParsingParameters) } else { logger.trace("Extracting from Nml file") - val parseResult = - extractFromNmlFile(file, fileName, overwritingDatasetName, overwritingOrganizationId, isTaskUpload) - MultiNmlParseResult(List(parseResult), Map.empty) + for { + parseResult <- extractFromNmlFile(file, fileName, sharedParsingParameters) + } yield MultiNmlParseResult(List(parseResult), Map.empty) } } diff --git a/app/models/annotation/handler/AnnotationInformationHandler.scala b/app/models/annotation/handler/AnnotationInformationHandler.scala index 405e8e8173c..428142ceae5 100755 --- a/app/models/annotation/handler/AnnotationInformationHandler.scala +++ b/app/models/annotation/handler/AnnotationInformationHandler.scala @@ -6,7 +6,7 @@ import javax.inject.Inject import models.annotation.AnnotationType.AnnotationType import models.annotation._ import models.user.User -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.annotation.{nowarn, tailrec} import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/handler/ProjectInformationHandler.scala b/app/models/annotation/handler/ProjectInformationHandler.scala index 5a54e31d04e..4d38c9c1758 100755 --- a/app/models/annotation/handler/ProjectInformationHandler.scala +++ b/app/models/annotation/handler/ProjectInformationHandler.scala @@ -7,7 +7,7 @@ import models.annotation._ import models.project.ProjectDAO import models.user.{User, UserService} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/handler/SavedTracingInformationHandler.scala b/app/models/annotation/handler/SavedTracingInformationHandler.scala index 1629290e9f7..90f2ff89826 100755 --- a/app/models/annotation/handler/SavedTracingInformationHandler.scala +++ b/app/models/annotation/handler/SavedTracingInformationHandler.scala @@ -8,7 +8,7 @@ import javax.inject.Inject import models.annotation._ import models.dataset.DatasetDAO import models.user.{User, UserService} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/handler/TaskInformationHandler.scala b/app/models/annotation/handler/TaskInformationHandler.scala index e57b5955c29..1ba9f90ba7f 100755 --- a/app/models/annotation/handler/TaskInformationHandler.scala +++ b/app/models/annotation/handler/TaskInformationHandler.scala @@ -8,7 +8,7 @@ import models.task.TaskDAO import models.user.{User, UserService} import models.annotation.AnnotationState._ import models.project.ProjectDAO -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/handler/TaskTypeInformationHandler.scala b/app/models/annotation/handler/TaskTypeInformationHandler.scala index d70427921aa..f51a3b22f55 100755 --- a/app/models/annotation/handler/TaskTypeInformationHandler.scala +++ b/app/models/annotation/handler/TaskTypeInformationHandler.scala @@ -7,7 +7,7 @@ import models.annotation._ import models.task.{TaskDAO, TaskTypeDAO} import models.user.{User, UserService} import models.annotation.AnnotationState._ -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/nml/NmlParser.scala b/app/models/annotation/nml/NmlParser.scala index 47061403075..b1d84d5c647 100755 --- a/app/models/annotation/nml/NmlParser.scala +++ b/app/models/annotation/nml/NmlParser.scala @@ -1,7 +1,10 @@ package models.annotation.nml +import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.ExtendedTypes.{ExtendedDouble, ExtendedString} +import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.JsonHelper.bool2Box import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.datastore.MetadataEntry.MetadataEntryProto @@ -21,15 +24,18 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.Tree import com.scalableminds.webknossos.tracingstore.tracings.skeleton.{MultiComponentTreeSplitter, TreeValidator} import com.typesafe.scalalogging.LazyLogging import models.annotation.UploadedVolumeLayer +import models.dataset.DatasetDAO import net.liftweb.common.Box._ import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import java.io.InputStream +import javax.inject.Inject import scala.collection.{immutable, mutable} +import scala.concurrent.ExecutionContext import scala.xml.{Attribute, NodeSeq, XML, Node => XMLNode} -object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGenerator { +class NmlParser @Inject()(datasetDAO: DatasetDAO) extends LazyLogging with ProtoGeometryImplicits with ColorGenerator { private val DEFAULT_TIME = 0L private val DEFAULT_VIEWPORT = 0 @@ -41,12 +47,12 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener def parse(name: String, nmlInputStream: InputStream, - overwritingDatasetName: Option[String], - overwritingOrganizationId: Option[String], + overwritingDatasetId: Option[String], isTaskUpload: Boolean, - basePath: Option[String] = None)( - implicit m: MessagesProvider): Box[(Option[SkeletonTracing], List[UploadedVolumeLayer], String, Option[String])] = - try { + basePath: Option[String] = None)(implicit m: MessagesProvider, + ec: ExecutionContext, + ctx: DBAccessContext): Fox[NmlParseSuccessWithoutFile] = { + val foxInABox = try { val data = XML.load(nmlInputStream) for { parameters <- (data \ "parameters").headOption ?~ Messages("nml.parameters.notFound") @@ -63,9 +69,10 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener treeGroupsAfterSplit = treesAndGroupsAfterSplitting._2 _ <- TreeValidator.validateTrees(treesSplit, treeGroupsAfterSplit, branchPoints, comments) additionalAxisProtos <- parseAdditionalAxes(parameters \ "additionalAxes") - datasetName = overwritingDatasetName.getOrElse(parseDatasetName(parameters \ "experiment")) - organizationId = if (overwritingDatasetName.isDefined) overwritingOrganizationId - else parseOrganizationId(parameters \ "experiment") + datasetName = parseDatasetName(parameters \ "experiment") + datasetIdOpt = if (overwritingDatasetId.isDefined) overwritingDatasetId + else parseDatasetId(parameters \ "experiment") + organizationId = parseOrganizationId(parameters \ "experiment") } yield { val description = parseDescription(parameters \ "experiment") val wkUrl = parseWkUrl(parameters \ "experiment") @@ -84,14 +91,16 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener logger.debug(s"Parsed NML file. Trees: ${treesSplit.size}, Volumes: ${volumes.size}") - val volumeLayers: List[UploadedVolumeLayer] = - volumes.toList.map { v => + for { + datasetIdValidatedOpt <- Fox.runOptional(datasetIdOpt)(ObjectId.fromString) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetIdValidatedOpt, datasetName, organizationId) + volumeLayers: List[UploadedVolumeLayer] = volumes.toList.map { v => UploadedVolumeLayer( VolumeTracing( activeSegmentId = None, boundingBox = boundingBoxToProto(taskBoundingBox.getOrElse(BoundingBox.empty)), // Note: this property may be adapted later in adaptPropertiesToFallbackLayer createdTimestamp = timestamp, - datasetName = datasetName, + datasetName = dataset.name, editPosition = editPosition, editRotation = editRotation, elementClass = ElementClass.uint32, // Note: this property may be adapted later in adaptPropertiesToFallbackLayer @@ -101,7 +110,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener zoomLevel = zoomLevel, userBoundingBox = None, userBoundingBoxes = userBoundingBoxes, - organizationId = organizationId, + organizationId = Some(organizationId), segments = v.segments, mappingName = v.mappingName, mappingIsLocked = v.mappingIsLocked, @@ -114,13 +123,12 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener v.name, ) } - - val skeletonTracingOpt: Option[SkeletonTracing] = - if (treesSplit.isEmpty && userBoundingBoxes.isEmpty) None + skeletonTracingOpt: Option[SkeletonTracing] = if (treesSplit.isEmpty && userBoundingBoxes.isEmpty) + None else Some( SkeletonTracing( - datasetName, + dataset.name, treesSplit, timestamp, taskBoundingBox, @@ -132,13 +140,12 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener None, treeGroupsAfterSplit, userBoundingBoxes, - organizationId, + Some(organizationId), editPositionAdditionalCoordinates, additionalAxes = additionalAxisProtos ) ) - - (skeletonTracingOpt, volumeLayers, description, wkUrl) + } yield NmlParseSuccessWithoutFile(skeletonTracingOpt, volumeLayers, dataset._id, description, wkUrl) } } catch { case e: org.xml.sax.SAXParseException if e.getMessage.startsWith("Premature end of file") => @@ -153,6 +160,16 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener logger.error(s"Failed to parse NML $name due to " + e) Failure(s"Failed to parse NML '$name': " + e.toString) } + foxInABox match { + case Full(value) => value + case Failure(message, cause, _chain) => + logger.error(s"Failed to parse NML $name due to " + cause) + Failure(s"Failed to parse NML '$name': " + message) + case Empty => + logger.error(s"Failed to parse NML $name. Parser returned empty") + Failure(s"Failed to parse NML '$name': Parser returned empty") + } + } private def extractTreeGroups(treeGroupContainerNodes: NodeSeq)( implicit m: MessagesProvider): Box[List[TreeGroup]] = { @@ -330,14 +347,17 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener private def parseDatasetName(nodes: NodeSeq): String = nodes.headOption.map(node => getSingleAttribute(node, "name")).getOrElse("") + private def parseDatasetId(nodes: NodeSeq) = + nodes.headOption.flatMap(node => getSingleAttributeOpt(node, "datasetId")) + private def parseDescription(nodes: NodeSeq): String = nodes.headOption.map(node => getSingleAttribute(node, "description")).getOrElse(DEFAULT_DESCRIPTION) private def parseWkUrl(nodes: NodeSeq): Option[String] = nodes.headOption.map(node => getSingleAttribute(node, "wkUrl")) - private def parseOrganizationId(nodes: NodeSeq): Option[String] = - nodes.headOption.flatMap(node => getSingleAttributeOpt(node, "organization")) + private def parseOrganizationId(nodes: NodeSeq): String = + nodes.headOption.map(node => getSingleAttribute(node, "organization")).getOrElse("") private def parseActiveNode(nodes: NodeSeq): Option[Int] = nodes.headOption.flatMap(node => getSingleAttribute(node, "id").toIntOpt) diff --git a/app/models/annotation/nml/NmlResults.scala b/app/models/annotation/nml/NmlResults.scala index a89b6ffac40..55649604abf 100644 --- a/app/models/annotation/nml/NmlResults.scala +++ b/app/models/annotation/nml/NmlResults.scala @@ -1,13 +1,19 @@ package models.annotation.nml -import java.io.File - +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing + +import java.io.File import com.typesafe.scalalogging.LazyLogging import models.annotation.UploadedVolumeLayer import net.liftweb.common.{Box, Empty, Failure, Full} +case class NmlParseSuccessWithoutFile(skeletonTracing: Option[SkeletonTracing], + volumeLayers: List[UploadedVolumeLayer], + datasetId: ObjectId, + description: String, + wkUrl: Option[String]) + object NmlResults extends LazyLogging { sealed trait NmlParseResult { @@ -31,8 +37,9 @@ object NmlResults extends LazyLogging { } case class NmlParseSuccess(fileName: String, - skeletonTracing: Option[SkeletonTracing], + skeletonTracingOpt: Option[SkeletonTracing], volumeLayers: List[UploadedVolumeLayer], + datasetId: ObjectId, _description: String, _wkUrl: Option[String]) extends NmlParseResult { @@ -72,7 +79,7 @@ object NmlResults extends LazyLogging { val successBox = parseResult.toSuccessBox val skeletonBox = successBox match { case Full(success) => - success.skeletonTracing match { + success.skeletonTracingOpt match { case Some(skeleton) => Full(skeleton) case None => Empty } @@ -82,8 +89,8 @@ object NmlResults extends LazyLogging { val volumeBox = successBox match { case Full(success) if success.volumeLayers.length <= 1 => success.volumeLayers.headOption match { - case Some(UploadedVolumeLayer(tracing, dataZipLocation, _)) => - Full((tracing, otherFiles.get(dataZipLocation))) + case Some(volumeLayer) => + Full((volumeLayer, otherFiles.get(volumeLayer.dataZipLocation))) case None => Empty } case Full(success) if success.volumeLayers.length > 1 => @@ -91,13 +98,18 @@ object NmlResults extends LazyLogging { case f: Failure => f case _ => Failure("") } - TracingBoxContainer(successBox.map(_.fileName), successBox.map(_.description), skeletonBox, volumeBox) + TracingBoxContainer(successBox.map(_.fileName), + successBox.map(_.description), + skeletonBox, + volumeBox, + successBox.map(_.datasetId)) } } case class TracingBoxContainer(fileName: Box[String], description: Box[Option[String]], skeleton: Box[SkeletonTracing], - volume: Box[(VolumeTracing, Option[File])]) + volume: Box[(UploadedVolumeLayer, Option[File])], + datasetId: Box[ObjectId]) } diff --git a/app/models/annotation/nml/NmlWriter.scala b/app/models/annotation/nml/NmlWriter.scala index ef8ddd14437..3ea35670c17 100644 --- a/app/models/annotation/nml/NmlWriter.scala +++ b/app/models/annotation/nml/NmlWriter.scala @@ -1,6 +1,7 @@ package models.annotation.nml import com.scalableminds.util.io.NamedFunctionStream +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.util.xml.Xml @@ -21,6 +22,7 @@ import javax.xml.stream.{XMLOutputFactory, XMLStreamWriter} import scala.concurrent.ExecutionContext case class NmlParameters( + datasetId: ObjectId, datasetName: String, organizationId: String, description: Option[String], @@ -48,6 +50,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { organizationId: String, wkUrl: String, datasetName: String, + datasetId: ObjectId, annotationOwner: Option[User], annotationTask: Option[Task], skipVolumeData: Boolean = false, @@ -59,17 +62,20 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { new IndentingXMLStreamWriter(outputService.createXMLStreamWriter(os)) for { - nml <- toNmlWithImplicitWriter(annotationLayers, - annotation, - scale, - volumeFilename, - organizationId, - wkUrl, - datasetName, - annotationOwner, - annotationTask, - skipVolumeData, - volumeDataZipFormat) + nml <- toNmlWithImplicitWriter( + annotationLayers, + annotation, + scale, + volumeFilename, + organizationId, + wkUrl, + datasetName, + datasetId, + annotationOwner, + annotationTask, + skipVolumeData, + volumeDataZipFormat + ) } yield nml } ) @@ -82,6 +88,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { organizationId: String, wkUrl: String, datasetName: String, + datasetId: ObjectId, annotationOwner: Option[User], annotationTask: Option[Task], skipVolumeData: Boolean, @@ -100,6 +107,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { organizationId, wkUrl, datasetName, + datasetId, voxelSize) _ = writeParameters(parameters) _ = annotationLayers.filter(_.typ == AnnotationLayerType.Skeleton).map(_.tracing).foreach { @@ -127,12 +135,14 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { organizationId: String, wkUrl: String, datasetName: String, + datasetId: ObjectId, voxelSize: Option[VoxelSize]): Fox[NmlParameters] = for { parameterSourceAnnotationLayer <- selectLayerWithPrecedence(skeletonLayers, volumeLayers) nmlParameters = parameterSourceAnnotationLayer.tracing match { case Left(s) => NmlParameters( + datasetId, datasetName, organizationId, annotation.map(_.description), @@ -150,6 +160,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { ) case Right(v) => NmlParameters( + datasetId, datasetName, organizationId, annotation.map(_.description), @@ -182,6 +193,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { Xml.withinElementSync("experiment") { writer.writeAttribute("name", parameters.datasetName) writer.writeAttribute("organization", parameters.organizationId) + writer.writeAttribute("datasetId", parameters.datasetId.toString) parameters.description.foreach(writer.writeAttribute("description", _)) writer.writeAttribute("wkUrl", parameters.wkUrl) } diff --git a/app/models/configuration/DatasetConfigurationService.scala b/app/models/configuration/DatasetConfigurationService.scala index 304ab039b6b..85bd80d905b 100644 --- a/app/models/configuration/DatasetConfigurationService.scala +++ b/app/models/configuration/DatasetConfigurationService.scala @@ -1,6 +1,7 @@ package models.configuration import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.datasource.DataLayerLike import com.scalableminds.webknossos.datastore.models.datasource.DatasetViewConfiguration.DatasetViewConfiguration @@ -19,26 +20,21 @@ class DatasetConfigurationService @Inject()(datasetService: DatasetService, datasetDAO: DatasetDAO, thumbnailCachingService: ThumbnailCachingService, datasetDataLayerDAO: DatasetLayerDAO)(implicit ec: ExecutionContext) { - def getDatasetViewConfigurationForUserAndDataset( - requestedVolumeIds: List[String], - user: User, - datasetName: String, - organizationId: String)(implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = + def getDatasetViewConfigurationForUserAndDataset(requestedVolumeIds: List[String], user: User, datasetId: ObjectId)( + implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + dataset <- datasetDAO.findOne(datasetId) datasetViewConfiguration <- userDatasetConfigurationDAO.findOneForUserAndDataset(user._id, dataset._id) datasetLayers <- datasetService.allLayersFor(dataset) layerConfigurations <- getLayerConfigurations(datasetLayers, requestedVolumeIds, dataset, Some(user)) } yield buildCompleteDatasetConfiguration(datasetViewConfiguration, layerConfigurations) - def getDatasetViewConfigurationForDataset( - requestedVolumeIds: List[String], - datasetName: String, - organizationId: String)(implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = + def getDatasetViewConfigurationForDataset(requestedVolumeIds: List[String], datasetId: ObjectId)( + implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + dataset <- datasetDAO.findOne(datasetId) datasetViewConfiguration = getDatasetViewConfigurationFromDefaultAndAdmin(dataset) @@ -52,10 +48,10 @@ class DatasetConfigurationService @Inject()(datasetService: DatasetService, defaultVC ++ adminVC } - def getCompleteAdminViewConfiguration(datasetName: String, organizationId: String)( + def getCompleteAdminViewConfiguration(datasetId: ObjectId)( implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + dataset <- datasetDAO.findOne(datasetId) datasetViewConfiguration = getDatasetViewConfigurationFromDefaultAndAdmin(dataset) datasetLayers <- datasetService.allLayersFor(dataset) layerConfigurations = getAllLayerAdminViewConfigForDataset(datasetLayers).view.mapValues(Json.toJson(_)).toMap diff --git a/app/models/dataset/DataStore.scala b/app/models/dataset/DataStore.scala index 9f14f10ac7b..e4450c3cf15 100644 --- a/app/models/dataset/DataStore.scala +++ b/app/models/dataset/DataStore.scala @@ -1,6 +1,7 @@ package models.dataset import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.schema.Tables._ import models.job.JobService @@ -12,7 +13,7 @@ import play.api.mvc.{Result, Results} import slick.jdbc.PostgresProfile.api._ import slick.lifted.Rep import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.concurrent.{ExecutionContext, Future} diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index 1e25d2fef91..64136053acd 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -2,6 +2,7 @@ package models.dataset import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.datastore.models.{LengthUnit, VoxelSize} @@ -25,6 +26,7 @@ import controllers.DatasetUpdateParameters import javax.inject.Inject import models.organization.OrganizationDAO +import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json._ import play.utils.UriEncoding import slick.jdbc.PostgresProfile.api._ @@ -32,7 +34,6 @@ import slick.jdbc.TransactionIsolation.Serializable import slick.lifted.Rep import slick.sql.SqlAction import utils.sql.{SQLDAO, SimpleSQLDAO, SqlClient, SqlToken} -import utils.ObjectId import scala.concurrent.ExecutionContext @@ -46,7 +47,7 @@ case class Dataset(_id: ObjectId, defaultViewConfiguration: Option[DatasetViewConfiguration] = None, adminViewConfiguration: Option[DatasetViewConfiguration] = None, description: Option[String] = None, - displayName: Option[String] = None, + directoryName: String, isPublic: Boolean, isUsable: Boolean, name: String, @@ -71,7 +72,7 @@ case class DatasetCompactInfo( owningOrganization: String, folderId: ObjectId, isActive: Boolean, - displayName: String, + directoryName: String, created: Instant, isEditable: Boolean, lastUsedByUser: Instant, @@ -81,7 +82,7 @@ case class DatasetCompactInfo( colorLayerNames: List[String], segmentationLayerNames: List[String], ) { - def dataSourceId = new DataSourceId(name, owningOrganization) + def dataSourceId = new DataSourceId(directoryName, owningOrganization) } object DatasetCompactInfo { @@ -132,7 +133,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA defaultViewConfigurationOpt, adminViewConfigurationOpt, r.description, - r.displayname, + r.directoryname, r.ispublic, r.isusable, r.name, @@ -252,7 +253,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA o._id, d._folder, d.isUsable, - d.displayName, + d.directoryName, d.created, COALESCE( ( @@ -318,7 +319,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA owningOrganization = row._3, folderId = row._4, isActive = row._5, - displayName = row._6, + directoryName = row._6, created = row._7, isEditable = row._8, lastUsedByUser = row._9, @@ -400,24 +401,68 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA r <- rList.headOption } yield r + def findOneByDirectoryNameAndOrganization(directoryName: String, organizationId: String)( + implicit ctx: DBAccessContext): Fox[Dataset] = + for { + accessQuery <- readAccessQuery + r <- run(q"""SELECT $columns + FROM $existingCollectionName + WHERE directoryName = $directoryName + AND _organization = $organizationId + AND $accessQuery + LIMIT 1""".as[DatasetsRow]) + parsed <- parseFirst(r, s"$organizationId/$directoryName") + } yield parsed + + def findOneByDataSourceId(dataSourceId: DataSourceId)(implicit ctx: DBAccessContext): Fox[Dataset] = + findOneByDirectoryNameAndOrganization(dataSourceId.directoryName, dataSourceId.organizationId) + + def doesDatasetDirectoryExistInOrganization(directoryName: String, organizationId: String)( + implicit ctx: DBAccessContext): Fox[Boolean] = + for { + accessQuery <- readAccessQuery + r <- run(q"""SELECT EXISTS(SELECT 1 + FROM $existingCollectionName + WHERE directoryName = $directoryName + AND _organization = $organizationId + AND $accessQuery + LIMIT 1)""".as[Boolean]) + exists <- r.headOption + } yield exists + + // Datasets are looked up by name and directoryName, as datasets from before dataset renaming was possible + // should have their directory name equal to their name during the time the link was created. This heuristic should + // have the best expected outcome as it expect to find the dataset by directoryName and it to be the oldest. In case + // someone renamed a dataset and created the link with a tool that uses the outdated dataset identification, the dataset should still be found. def findOneByNameAndOrganization(name: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Dataset] = for { accessQuery <- readAccessQuery r <- run(q"""SELECT $columns FROM $existingCollectionName - WHERE name = $name + WHERE (directoryName = $name OR name = $name) AND _organization = $organizationId - AND $accessQuery""".as[DatasetsRow]) + AND $accessQuery + ORDER BY created ASC + LIMIT 1""".as[DatasetsRow]) parsed <- parseFirst(r, s"$organizationId/$name") } yield parsed - def findAllByNamesAndOrganization(names: List[String], organizationId: String)( + def findOneByIdOrNameAndOrganization(datasetIdOpt: Option[ObjectId], datasetName: String, organizationId: String)( + implicit ctx: DBAccessContext, + m: MessagesProvider): Fox[Dataset] = + datasetIdOpt + .map(datasetId => findOne(datasetId)) + .getOrElse(findOneByNameAndOrganization(datasetName, organizationId)) ?~> Messages( + "dataset.notFoundByIdOrName", + datasetIdOpt.map(_.toString).getOrElse(datasetName)) + + def findAllByDirectoryNamesAndOrganization(directoryNames: List[String], organizationId: String)( implicit ctx: DBAccessContext): Fox[List[Dataset]] = for { accessQuery <- readAccessQuery r <- run(q"""SELECT $columns FROM $existingCollectionName - WHERE name IN ${SqlToken.tupleFromList(names)} + WHERE directoryName IN ${SqlToken.tupleFromList(directoryNames)} AND _organization = $organizationId AND $accessQuery""".as[DatasetsRow]).map(_.toList) parsed <- parseAll(r) @@ -442,7 +487,8 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA FROM $existingCollectionName WHERE name = $datasetName AND $accessQuery - ORDER BY created ASC""".as[String]) + ORDER BY created ASC + LIMIT 1""".as[String]) r <- rList.headOption.toFox } yield r @@ -453,32 +499,30 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA r <- rList.headOption.toFox } yield r - def getSharingTokenByName(name: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Option[String]] = + def getSharingTokenById(datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[Option[String]] = for { accessQuery <- readAccessQuery rList <- run(q"""SELECT sharingToken FROM webknossos.datasets_ - WHERE name = $name - AND _organization = $organizationId + WHERE _id = $datasetId AND $accessQuery""".as[Option[String]]) r <- rList.headOption.toFox } yield r - def updateSharingTokenByName(name: String, organizationId: String, sharingToken: Option[String])( + def updateSharingTokenById(datasetId: ObjectId, sharingToken: Option[String])( implicit ctx: DBAccessContext): Fox[Unit] = for { - accessQuery <- readAccessQuery + accessQuery <- readAccessQuery // Read access is enough here, we want to allow anyone who can see this data to create url sharing links. _ <- run(q"""UPDATE webknossos.datasets SET sharingToken = $sharingToken - WHERE name = $name - AND _organization = $organizationId + WHERE _id = $datasetId AND $accessQuery""".asUpdate) } yield () def updatePartial(datasetId: ObjectId, params: DatasetUpdateParameters)(implicit ctx: DBAccessContext): Fox[Unit] = { val setQueries = List( params.description.map(d => q"description = $d"), - params.displayName.map(v => q"displayName = $v"), + params.name.map(v => q"name = $v"), params.sortingKey.map(v => q"sortingKey = $v"), params.isPublic.map(v => q"isPublic = $v"), params.tags.map(v => q"tags = $v"), @@ -502,7 +546,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA def updateFields(datasetId: ObjectId, description: Option[String], - displayName: Option[String], + name: Option[String], sortingKey: Instant, isPublic: Boolean, tags: List[String], @@ -510,7 +554,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA folderId: ObjectId)(implicit ctx: DBAccessContext): Fox[Unit] = { val updateParameters = new DatasetUpdateParameters( description = Some(description), - displayName = Some(displayName), + name = Some(name), sortingKey = Some(sortingKey), isPublic = Some(isPublic), tags = Some(tags), @@ -559,7 +603,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA _id, _dataStore, _organization, _publication, _uploader, _folder, inboxSourceHash, defaultViewConfiguration, adminViewConfiguration, - description, displayName, isPublic, isUsable, + description, directoryName, isPublic, isUsable, name, voxelSizeFactor, voxelSizeUnit, status, sharingToken, sortingKey, metadata, tags, created, isDeleted @@ -568,7 +612,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA ${d._id}, ${d._dataStore}, ${d._organization}, ${d._publication}, ${d._uploader}, ${d._folder}, ${d.inboxSourceHash}, $defaultViewConfiguration, $adminViewConfiguration, - ${d.description}, ${d.displayName}, ${d.isPublic}, ${d.isUsable}, + ${d.description}, ${d.directoryName}, ${d.isPublic}, ${d.isUsable}, ${d.name}, ${d.voxelSize.map(_.factor)}, ${d.voxelSize.map(_.unit)}, ${d.status.take(1024)}, ${d.sharingToken}, ${d.sortingKey}, ${d.metadata}, ${d.tags}, ${d.created}, ${d.isDeleted} @@ -576,13 +620,13 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA } yield () } - def updateDataSourceByNameAndOrganization(id: ObjectId, - dataStoreName: String, - inboxSourceHash: Int, - source: InboxDataSource, - isUsable: Boolean)(implicit ctx: DBAccessContext): Fox[Unit] = + def updateDataSourceByDatasetId(id: ObjectId, + dataStoreName: String, + inboxSourceHash: Int, + source: InboxDataSource, + isUsable: Boolean)(implicit ctx: DBAccessContext): Fox[Unit] = for { - organization <- organizationDAO.findOne(source.id.team) + organization <- organizationDAO.findOne(source.id.organizationId) defaultViewConfiguration: Option[JsValue] = source.defaultViewConfiguration.map(Json.toJson(_)) _ <- run(q"""UPDATE webknossos.datasets SET diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index a2a0c704e5f..fe92c3be2ea 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -1,6 +1,7 @@ package models.dataset import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.datasource.inbox.{ @@ -8,8 +9,8 @@ import com.scalableminds.webknossos.datastore.models.datasource.inbox.{ InboxDataSourceLike => InboxDataSource } import com.scalableminds.webknossos.datastore.models.datasource.{ - DataSourceId, GenericDataSource, + DataSourceId, DataLayerLike => DataLayer } import com.scalableminds.webknossos.datastore.rpc.RPC @@ -21,7 +22,7 @@ import models.user.{User, UserService} import net.liftweb.common.{Box, Full} import play.api.libs.json.{JsObject, Json} import security.RandomIDGenerator -import utils.{ObjectId, WkConf} +import utils.WkConf import javax.inject.Inject import scala.concurrent.duration._ @@ -62,12 +63,9 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, _ <- bool2Fox(!name.startsWith(".")) ?~> "dataset.layer.name.invalid.startsWithDot" } yield () - def assertNewDatasetName(name: String, organizationId: String): Fox[Unit] = - datasetDAO.findOneByNameAndOrganization(name, organizationId)(GlobalAccessContext).reverse - def createPreliminaryDataset(datasetName: String, organizationId: String, dataStore: DataStore): Fox[Dataset] = { val unreportedDatasource = UnusableDataSource(DataSourceId(datasetName, organizationId), notYetUploadedStatus) - createDataset(dataStore, organizationId, unreportedDatasource) + createDataset(dataStore, organizationId, datasetName, unreportedDatasource) } def getAllUnfinishedDatasetUploadsOfUser(userId: ObjectId, organizationId: String)( @@ -85,6 +83,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, private def createDataset( dataStore: DataStore, owningOrganization: String, + datasetName: String, dataSource: InboxDataSource, publication: Option[ObjectId] = None ): Fox[Dataset] = { @@ -103,6 +102,10 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, for { organization <- organizationDAO.findOne(owningOrganization) organizationRootFolder <- folderDAO.findOne(organization._rootFolder) + datasetDirectoryName <- datasetDAO + .doesDatasetDirectoryExistInOrganization(datasetName, organization._id) + .map(if (_) s"${datasetName}-${newId.toString}" else datasetName) + newDataSource = dataSource.withUpdatedId(dataSource.id.copy(directoryName = datasetDirectoryName)) dataset = Dataset( newId, dataStore.name, @@ -111,21 +114,21 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, None, organizationRootFolder._id, dataSourceHash, - dataSource.defaultViewConfiguration, + newDataSource.defaultViewConfiguration, adminViewConfiguration = None, description = None, - displayName = None, + directoryName = datasetDirectoryName, isPublic = false, - isUsable = dataSource.isUsable, - name = dataSource.id.name, - voxelSize = dataSource.voxelSizeOpt, + isUsable = newDataSource.isUsable, + name = datasetName, + voxelSize = newDataSource.voxelSizeOpt, sharingToken = None, - status = dataSource.statusOpt.getOrElse(""), + status = newDataSource.statusOpt.getOrElse(""), logoUrl = None, metadata = metadata ) _ <- datasetDAO.insertOne(dataset) - _ <- datasetDataLayerDAO.updateLayers(newId, dataSource) + _ <- datasetDataLayerDAO.updateLayers(newId, newDataSource) _ <- teamDAO.updateAllowedTeamsForDataset(newId, List()) } yield dataset } @@ -133,7 +136,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, def updateDataSources(dataStore: DataStore, dataSources: List[InboxDataSource])( implicit ctx: DBAccessContext): Fox[List[ObjectId]] = { - val groupedByOrga = dataSources.groupBy(_.id.team).toList + val groupedByOrga = dataSources.groupBy(_.id.organizationId).toList Fox .serialCombined(groupedByOrga) { orgaTuple: (String, List[InboxDataSource]) => organizationDAO @@ -146,10 +149,11 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, Fox.successful(List.empty) case Full(organization) => for { - foundDatasets <- datasetDAO.findAllByNamesAndOrganization(orgaTuple._2.map(_.id.name), organization._id) - foundDatasetsByName = foundDatasets.groupBy(_.name) + foundDatasets <- datasetDAO.findAllByDirectoryNamesAndOrganization(orgaTuple._2.map(_.id.directoryName), + organization._id) + foundDatasetsByDirectoryName = foundDatasets.groupBy(_.directoryName) existingIds <- Fox.serialCombined(orgaTuple._2)(dataSource => - updateDataSource(dataStore, dataSource, foundDatasetsByName)) + updateDataSource(dataStore, dataSource, foundDatasetsByDirectoryName)) } yield existingIds.flatten case _ => logger.info( @@ -164,16 +168,17 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, private def updateDataSource( dataStore: DataStore, dataSource: InboxDataSource, - foundDatasets: Map[String, List[Dataset]] + foundDatasetsByDirectoryName: Map[String, List[Dataset]] )(implicit ctx: DBAccessContext): Fox[Option[ObjectId]] = { - val foundDatasetOpt = foundDatasets.get(dataSource.id.name).flatMap(_.headOption) + val foundDatasetOpt = foundDatasetsByDirectoryName.get(dataSource.id.directoryName).flatMap(_.headOption) foundDatasetOpt match { case Some(foundDataset) if foundDataset._dataStore == dataStore.name => updateKnownDataSource(foundDataset, dataSource, dataStore).toFox.map(Some(_)) case Some(foundDataset) => // This only returns None for Datasets that are present on a normal Datastore but also got reported from a scratch Datastore updateDataSourceDifferentDataStore(foundDataset, dataSource, dataStore) case _ => - insertNewDataset(dataSource, dataStore).toFox.map(Some(_)) + insertNewDataset(dataSource, dataSource.id.directoryName, dataStore).toFox + .map(Some(_)) // TODO: Discuss how to better handle this case } } @@ -184,11 +189,11 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, else for { _ <- thumbnailCachingService.removeFromCache(foundDataset._id) - _ <- datasetDAO.updateDataSourceByNameAndOrganization(foundDataset._id, - dataStore.name, - dataSource.hashCode, - dataSource, - dataSource.isUsable) + _ <- datasetDAO.updateDataSourceByDatasetId(foundDataset._id, + dataStore.name, + dataSource.hashCode, + dataSource, + dataSource.isUsable) } yield foundDataset._id private def updateDataSourceDifferentDataStore( @@ -201,26 +206,26 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, } yield { if (originalDataStore.isScratch && !dataStore.isScratch || isUnreported(foundDataset)) { logger.info( - s"Replacing dataset ${foundDataset.name} (status: ${foundDataset.status}) from datastore ${originalDataStore.name} by the one from ${dataStore.name}" + s"Replacing dataset ${foundDataset.name} (with id ${foundDataset._id} and status: ${foundDataset.status}) from datastore ${originalDataStore.name} by the one from ${dataStore.name}" ) for { _ <- thumbnailCachingService.removeFromCache(foundDataset._id) - _ <- datasetDAO.updateDataSourceByNameAndOrganization(foundDataset._id, - dataStore.name, - dataSource.hashCode, - dataSource, - dataSource.isUsable)(GlobalAccessContext) + _ <- datasetDAO.updateDataSourceByDatasetId(foundDataset._id, + dataStore.name, + dataSource.hashCode, + dataSource, + dataSource.isUsable)(GlobalAccessContext) } yield Some(foundDataset._id) } else { logger.info( - s"Dataset ${foundDataset.name}, as reported from ${dataStore.name} is already present from datastore ${originalDataStore.name} and will not be replaced.") + s"Dataset ${foundDataset.name}, as reported from ${dataStore.name}, is already present as id ${foundDataset._id} from datastore ${originalDataStore.name} and will not be replaced.") Fox.successful(None) } }).flatten.futureBox - private def insertNewDataset(dataSource: InboxDataSource, dataStore: DataStore) = + private def insertNewDataset(dataSource: InboxDataSource, datasetName: String, dataStore: DataStore) = publicationForFirstDataset.flatMap { publicationId: Option[ObjectId] => - createDataset(dataStore, dataSource.id.team, dataSource, publicationId).map(_._id) + createDataset(dataStore, dataSource.id.organizationId, datasetName, dataSource, publicationId).map(_._id) }.futureBox private def publicationForFirstDataset: Fox[Option[ObjectId]] = @@ -236,17 +241,17 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, def deactivateUnreportedDataSources(existingDatasetIds: List[ObjectId], dataStore: DataStore): Fox[Unit] = datasetDAO.deactivateUnreported(existingDatasetIds, dataStore.name, unreportedStatus, inactiveStatusList) - def getSharingToken(datasetName: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[String] = { + def getSharingToken(datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[String] = { - def createAndSaveSharingToken(datasetName: String)(implicit ctx: DBAccessContext): Fox[String] = + def createAndSaveSharingToken(datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[String] = for { tokenValue <- new RandomIDGenerator().generate - _ <- datasetDAO.updateSharingTokenByName(datasetName, organizationId, Some(tokenValue)) + _ <- datasetDAO.updateSharingTokenById(datasetId, Some(tokenValue)) } yield tokenValue - datasetDAO.getSharingTokenByName(datasetName, organizationId).flatMap { + datasetDAO.getSharingTokenById(datasetId).flatMap { case Some(oldToken) => Fox.successful(oldToken) - case None => createAndSaveSharingToken(datasetName) + case None => createAndSaveSharingToken(datasetId) } } @@ -256,7 +261,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> "organization.notFound" } dataLayers <- datasetDataLayerDAO.findAllForDataset(dataset._id) - dataSourceId = DataSourceId(dataset.name, organization._id) + dataSourceId = DataSourceId(dataset.directoryName, organization._id) } yield { if (dataset.isUsable) for { @@ -358,6 +363,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, organizationDAO.getUsedStorageForDataset(dataset._id)) } yield { Json.obj( + "id" -> dataset._id, "name" -> dataset.name, "dataSource" -> dataSource, "dataStore" -> dataStoreJs, @@ -367,7 +373,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, "isActive" -> dataset.isUsable, "isPublic" -> dataset.isPublic, "description" -> dataset.description, - "displayName" -> dataset.displayName, + "directoryName" -> dataset.directoryName, "created" -> dataset.created, "isEditable" -> isEditable, "lastUsedByUser" -> lastUsedByUser, diff --git a/app/models/dataset/Publication.scala b/app/models/dataset/Publication.scala index ee42cc4a9cf..e15153d3c14 100644 --- a/app/models/dataset/Publication.scala +++ b/app/models/dataset/Publication.scala @@ -9,7 +9,7 @@ import play.api.http.Status.NOT_FOUND import play.api.libs.json.Format.GenericFormat import play.api.libs.json.{JsObject, Json} import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/models/dataset/ThumbnailService.scala b/app/models/dataset/ThumbnailService.scala index ff4e4e5ecc3..992fa688088 100644 --- a/app/models/dataset/ThumbnailService.scala +++ b/app/models/dataset/ThumbnailService.scala @@ -14,8 +14,8 @@ import models.configuration.DatasetConfigurationService import net.liftweb.common.Full import play.api.http.Status.NOT_FOUND import play.api.i18n.{Messages, MessagesProvider} +import com.scalableminds.util.objectid.ObjectId import play.api.libs.json.{JsArray, JsObject} -import utils.ObjectId import utils.sql.{SimpleSQLDAO, SqlClient} import javax.inject.Inject @@ -36,8 +36,7 @@ class ThumbnailService @Inject()(datasetService: DatasetService, private val MaxThumbnailHeight = 4000 def getThumbnailWithCache( - organizationId: String, - datasetName: String, + datasetIdValidated: ObjectId, layerName: String, w: Option[Int], h: Option[Int], @@ -45,35 +44,28 @@ class ThumbnailService @Inject()(datasetService: DatasetService, val width = com.scalableminds.util.tools.Math.clamp(w.getOrElse(DefaultThumbnailWidth), 1, MaxThumbnailWidth) val height = com.scalableminds.util.tools.Math.clamp(h.getOrElse(DefaultThumbnailHeight), 1, MaxThumbnailHeight) for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(GlobalAccessContext) + dataset <- datasetDAO.findOne(datasetIdValidated)(GlobalAccessContext) image <- thumbnailCachingService.getOrLoad( dataset._id, layerName, width, height, mappingName, - _ => - getThumbnail(organizationId, datasetName, layerName, width, height, mappingName)(ec, GlobalAccessContext, mp) + _ => getThumbnail(dataset, layerName, width, height, mappingName)(ec, GlobalAccessContext, mp) ) } yield image } - private def getThumbnail(organizationId: String, - datasetName: String, - layerName: String, - width: Int, - height: Int, - mappingName: Option[String])(implicit ec: ExecutionContext, - ctx: DBAccessContext, - mp: MessagesProvider): Fox[Array[Byte]] = + private def getThumbnail(dataset: Dataset, layerName: String, width: Int, height: Int, mappingName: Option[String])( + implicit ec: ExecutionContext, + ctx: DBAccessContext, + mp: MessagesProvider): Fox[Array[Byte]] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) dataSource <- datasetService.dataSourceFor(dataset) ?~> "dataSource.notFound" ~> NOT_FOUND usableDataSource <- dataSource.toUsable.toFox ?~> "dataset.notImported" layer <- usableDataSource.dataLayers.find(_.name == layerName) ?~> Messages("dataLayer.notFound", layerName) ~> NOT_FOUND - viewConfiguration <- datasetConfigurationService.getDatasetViewConfigurationForDataset(List.empty, - datasetName, - organizationId)(ctx) + viewConfiguration <- datasetConfigurationService.getDatasetViewConfigurationForDataset(List.empty, dataset._id)( + ctx) (mag1BoundingBox, mag, intensityRangeOpt, colorSettingsOpt, mapping) = selectParameters(viewConfiguration, usableDataSource, layerName, @@ -82,8 +74,7 @@ class ThumbnailService @Inject()(datasetService: DatasetService, height, mappingName) client <- datasetService.clientFor(dataset) - image <- client.getDataLayerThumbnail(organizationId, - dataset, + image <- client.getDataLayerThumbnail(dataset, layerName, mag1BoundingBox, mag, @@ -169,7 +160,7 @@ class ThumbnailService @Inject()(datasetService: DatasetService, case class ThumbnailColorSettings(color: Color, isInverted: Boolean) -class ThumbnailCachingService @Inject()(datasetDAO: DatasetDAO, thumbnailDAO: ThumbnailDAO) { +class ThumbnailCachingService @Inject()(thumbnailDAO: ThumbnailDAO) { private val ThumbnailCacheDuration = 10 days // First cache is in memory, then in postgres. @@ -197,12 +188,6 @@ class ThumbnailCachingService @Inject()(datasetDAO: DatasetDAO, thumbnailDAO: Th } yield fromDbOrNew ) - def removeFromCache(organizationId: String, datasetName: String): Fox[Unit] = - for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(GlobalAccessContext) - _ <- removeFromCache(dataset._id) - } yield () - def removeFromCache(datasetId: ObjectId): Fox[Unit] = { inMemoryThumbnailCache.clear(keyTuple => keyTuple._1 == datasetId) thumbnailDAO.removeAllForDataset(datasetId) diff --git a/app/models/dataset/WKRemoteDataStoreClient.scala b/app/models/dataset/WKRemoteDataStoreClient.scala index 4f21d737eec..d9f20c0b477 100644 --- a/app/models/dataset/WKRemoteDataStoreClient.scala +++ b/app/models/dataset/WKRemoteDataStoreClient.scala @@ -16,7 +16,7 @@ import com.typesafe.scalalogging.LazyLogging import controllers.RpcTokenHolder import play.api.libs.json.JsObject import play.utils.UriEncoding -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt @@ -26,8 +26,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin private lazy val hasSegmentIndexFileCache: AlfuCache[(String, String, String), Boolean] = AlfuCache(timeToLive = 1 minute) - def getDataLayerThumbnail(organizationId: String, - dataset: Dataset, + def getDataLayerThumbnail(dataset: Dataset, dataLayerName: String, mag1BoundingBox: BoundingBox, mag: Vec3Int, @@ -35,8 +34,9 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin intensityRangeOpt: Option[(Double, Double)], colorSettingsOpt: Option[ThumbnailColorSettings]): Fox[Array[Byte]] = { val targetMagBoundingBox = mag1BoundingBox / mag - logger.debug(s"Thumbnail called for: $organizationId/${dataset.name}, Layer: $dataLayerName") - rpc(s"${dataStore.url}/data/datasets/${urlEncode(organizationId)}/${dataset.urlEncodedName}/layers/$dataLayerName/thumbnail.jpg") + logger.debug(s"Thumbnail called for: ${dataset._id}, organization: ${dataset._organization}, directoryName: ${dataset.directoryName}, Layer: $dataLayerName") + rpc( + s"${dataStore.url}/data/datasets/${urlEncode(dataset._organization)}/${urlEncode(dataset.directoryName)}/layers/$dataLayerName/thumbnail.jpg") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("mag" -> mag.toMagLiteral()) .addQueryString("x" -> mag1BoundingBox.topLeft.x.toString) @@ -52,8 +52,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin .getWithBytesResponse } - def getLayerData(organizationId: String, - dataset: Dataset, + def getLayerData(dataset: Dataset, layerName: String, mag1BoundingBox: BoundingBox, mag: Vec3Int, @@ -61,15 +60,14 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin val targetMagBoundingBox = mag1BoundingBox / mag logger.debug(s"Fetching raw data. Mag $mag, mag1 bbox: $mag1BoundingBox, target-mag bbox: $targetMagBoundingBox") rpc( - s"${dataStore.url}/data/datasets/${urlEncode(organizationId)}/${dataset.urlEncodedName}/layers/$layerName/readData") + s"${dataStore.url}/data/datasets/${urlEncode(dataset._organization)}/${urlEncode(dataset.directoryName)}/layers/$layerName/readData") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .postJsonWithBytesResponse( RawCuboidRequest(mag1BoundingBox.topLeft, targetMagBoundingBox.size, mag, additionalCoordinates)) } - def findPositionWithData(organizationId: String, dataset: Dataset, dataLayerName: String): Fox[JsObject] = - rpc( - s"${dataStore.url}/data/datasets/${urlEncode(organizationId)}/${dataset.urlEncodedName}/layers/$dataLayerName/findData") + def findPositionWithData(dataset: Dataset, dataLayerName: String): Fox[JsObject] = + rpc(s"${dataStore.url}/data/datasets/${dataset._id}/layers/$dataLayerName/findData") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .getWithJsonResponse[JsObject] diff --git a/app/models/dataset/credential/CredentialDAO.scala b/app/models/dataset/credential/CredentialDAO.scala index bb74ca7b842..0ad7d5746d5 100644 --- a/app/models/dataset/credential/CredentialDAO.scala +++ b/app/models/dataset/credential/CredentialDAO.scala @@ -11,7 +11,7 @@ import com.scalableminds.webknossos.schema.Tables.{Credentials, CredentialsRow} import net.liftweb.common.Box.tryo import play.api.libs.json.Json import utils.sql.{SecuredSQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/dataset/credential/CredentialService.scala b/app/models/dataset/credential/CredentialService.scala index 9fdacdadbdd..46e3f082fcc 100644 --- a/app/models/dataset/credential/CredentialService.scala +++ b/app/models/dataset/credential/CredentialService.scala @@ -1,5 +1,6 @@ package models.dataset.credential +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.storage.{ DataVaultCredential, @@ -10,7 +11,6 @@ import com.scalableminds.webknossos.datastore.storage.{ } import net.liftweb.common.Box.tryo import play.api.libs.json.Json -import utils.ObjectId import java.net.URI import javax.inject.Inject diff --git a/app/models/dataset/explore/WKExploreRemoteLayerService.scala b/app/models/dataset/explore/WKExploreRemoteLayerService.scala index 1b809e183d7..08872ca8805 100644 --- a/app/models/dataset/explore/WKExploreRemoteLayerService.scala +++ b/app/models/dataset/explore/WKExploreRemoteLayerService.scala @@ -19,7 +19,7 @@ import models.user.User import net.liftweb.common.Box.tryo import play.api.libs.json.{Json, OFormat} import security.WkSilhouetteEnvironment -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import java.net.URI import javax.inject.Inject @@ -111,7 +111,6 @@ class WKExploreRemoteLayerService @Inject()(credentialService: CredentialService organization <- organizationDAO.findOne(user._organization) dataStore <- dataStoreDAO.findOneWithUploadsAllowed _ <- datasetService.assertValidDatasetName(datasetName) - _ <- datasetService.assertNewDatasetName(datasetName, organization._id) ?~> "dataset.name.alreadyTaken" client = new WKRemoteDataStoreClient(dataStore, rpc) userToken <- bearerTokenService.createAndInitDataStoreTokenForUser(user) _ <- client.addDataSource(organization._id, datasetName, dataSource, folderId, userToken) diff --git a/app/models/folder/Folder.scala b/app/models/folder/Folder.scala index 8ac15596ef3..3770935c54e 100644 --- a/app/models/folder/Folder.scala +++ b/app/models/folder/Folder.scala @@ -13,7 +13,7 @@ import slick.jdbc.PostgresProfile.api._ import slick.lifted.Rep import slick.sql.SqlAction import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import scala.annotation.tailrec diff --git a/app/models/job/Job.scala b/app/models/job/Job.scala index 4fe3fe6ac3b..d083890c44d 100644 --- a/app/models/job/Job.scala +++ b/app/models/job/Job.scala @@ -11,7 +11,7 @@ import slick.jdbc.PostgresProfile.api._ import slick.jdbc.TransactionIsolation.Serializable import slick.lifted.Rep import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext @@ -51,6 +51,8 @@ case class Job( def datasetName: Option[String] = argAsStringOpt("dataset_name") + def datasetId: Option[String] = argAsStringOpt("dataset_id") + private def argAsStringOpt(key: String) = (commandArgs \ key).toOption.flatMap(_.asOpt[String]) def resultLink(organizationId: String): Option[String] = @@ -58,15 +60,16 @@ case class Job( else { command match { case JobCommand.convert_to_wkw | JobCommand.compute_mesh_file => - datasetName.map { dsName => - s"/datasets/$organizationId/$dsName/view" - } + datasetId.map { datasetId => + val datasetNameMaybe = datasetName.map(name => s"$name-").getOrElse("") + Some(s"/datasets/$datasetNameMaybe$datasetId/view") + }.getOrElse(datasetName.map(name => s"datasets/$organizationId/$name/view")) case JobCommand.export_tiff | JobCommand.render_animation => Some(s"/api/jobs/${this._id}/export") case JobCommand.infer_nuclei | JobCommand.infer_neurons | JobCommand.materialize_volume_annotation | JobCommand.infer_with_model | JobCommand.infer_mitochondria | JobCommand.align_sections => - returnValue.map { resultDatasetName => - s"/datasets/$organizationId/$resultDatasetName/view" + returnValue.map { resultDatasetDirectoryName => // TODO: Adjust worker + s"/datasets/$organizationId/$resultDatasetDirectoryName/view" } case _ => None } diff --git a/app/models/job/JobService.scala b/app/models/job/JobService.scala index 0b595516a94..5384ac256f1 100644 --- a/app/models/job/JobService.scala +++ b/app/models/job/JobService.scala @@ -3,6 +3,7 @@ package models.job import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.mvc.Formatter +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging import mail.{DefaultMails, MailchimpClient, MailchimpTag, Send} @@ -15,7 +16,7 @@ import org.apache.pekko.actor.ActorSystem import play.api.libs.json.{JsObject, Json} import security.WkSilhouetteEnvironment import telemetry.SlackNotificationService -import utils.{ObjectId, WkConf} +import utils.WkConf import javax.inject.Inject import scala.concurrent.ExecutionContext @@ -151,9 +152,10 @@ class JobService @Inject()(wkConf: WkConf, logger.info(s"WKW conversion job ${job._id} failed. Deleting dataset from the database, freeing the name...") val commandArgs = job.commandArgs.value for { - datasetName <- commandArgs.get("dataset_name").map(_.as[String]).toFox + datasetDirectoryName <- commandArgs.get("dataset_directory_name").map(_.as[String]).toFox organizationId <- commandArgs.get("organization_name").map(_.as[String]).toFox - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(GlobalAccessContext) + dataset <- datasetDAO.findOneByDirectoryNameAndOrganization(datasetDirectoryName, organizationId)( + GlobalAccessContext) _ <- datasetDAO.deleteDataset(dataset._id) } yield () } else Fox.successful(()) diff --git a/app/models/job/Worker.scala b/app/models/job/Worker.scala index 6600a38832f..1a3591a8594 100644 --- a/app/models/job/Worker.scala +++ b/app/models/job/Worker.scala @@ -3,6 +3,7 @@ package models.job import org.apache.pekko.actor.ActorSystem import com.scalableminds.util.accesscontext.GlobalAccessContext import com.scalableminds.util.mvc.Formatter +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.helpers.IntervalScheduler @@ -15,7 +16,7 @@ import play.api.libs.json.{JsObject, Json} import slick.lifted.Rep import telemetry.SlackNotificationService import utils.sql.{SQLDAO, SqlClient} -import utils.{ObjectId, WkConf} +import utils.WkConf import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/mesh/Mesh.scala b/app/models/mesh/Mesh.scala index 37c123b5726..b3bd840dd7b 100644 --- a/app/models/mesh/Mesh.scala +++ b/app/models/mesh/Mesh.scala @@ -12,7 +12,7 @@ import play.api.libs.json.Json._ import play.api.libs.json._ import slick.lifted.Rep import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/organization/Organization.scala b/app/models/organization/Organization.scala index 185e18f0135..f7896604abf 100644 --- a/app/models/organization/Organization.scala +++ b/app/models/organization/Organization.scala @@ -8,7 +8,7 @@ import com.scalableminds.webknossos.schema.Tables._ import models.team.PricingPlan import models.team.PricingPlan.PricingPlan import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SQLDAO, SqlClient, SqlToken} import javax.inject.Inject @@ -88,7 +88,7 @@ class OrganizationDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionCont for { accessQuery <- readAccessQuery r <- run( - q"select $columns from $existingCollectionName where _id = $organizationId and $accessQuery" + q"SELECT $columns FROM $existingCollectionName WHERE _id = $organizationId AND $accessQuery" .as[OrganizationsRow]) parsed <- parseFirst(r, organizationId) } yield parsed @@ -129,6 +129,15 @@ class OrganizationDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionCont r <- rList.headOption.toFox } yield r + def findOrganizationIdForDataset(datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[String] = + for { + accessQuery <- readAccessQuery + rList <- run(q"""SELECT o._id FROM webknossos.organizations_ o + JOIN webknossos.datasets_ d ON o._id = d._organization + WHERE d._id = $datasetId WHERE $accessQuery""".as[String]) + r <- rList.headOption.toFox + } yield r + def updateFields(organizationId: String, name: String, newUserMailingList: String)( implicit ctx: DBAccessContext): Fox[Unit] = for { diff --git a/app/models/organization/OrganizationService.scala b/app/models/organization/OrganizationService.scala index 6e105e18c2c..b9d36e0e181 100644 --- a/app/models/organization/OrganizationService.scala +++ b/app/models/organization/OrganizationService.scala @@ -1,6 +1,7 @@ package models.organization import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} import com.scalableminds.webknossos.datastore.rpc.RPC import com.typesafe.scalalogging.LazyLogging @@ -11,7 +12,7 @@ import models.folder.{Folder, FolderDAO, FolderService} import models.team.{PricingPlan, Team, TeamDAO} import models.user.{Invite, MultiUserDAO, User, UserDAO, UserService} import play.api.libs.json.{JsArray, JsObject, Json} -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.concurrent.{ExecutionContext, Future} diff --git a/app/models/project/Project.scala b/app/models/project/Project.scala index ce5b1890e23..3579e76dc32 100755 --- a/app/models/project/Project.scala +++ b/app/models/project/Project.scala @@ -13,7 +13,7 @@ import net.liftweb.common.Full import play.api.libs.functional.syntax._ import play.api.libs.json._ import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/models/shortlinks/ShortLink.scala b/app/models/shortlinks/ShortLink.scala index eb4f0d94f38..c4a3c7b52ca 100644 --- a/app/models/shortlinks/ShortLink.scala +++ b/app/models/shortlinks/ShortLink.scala @@ -7,7 +7,7 @@ import play.api.libs.json.{Json, OFormat} import slick.jdbc.PostgresProfile.api._ import slick.lifted.Rep import utils.sql.{SqlClient, SQLDAO} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/task/Script.scala b/app/models/task/Script.scala index 0b689eb2f17..59e2bf50805 100644 --- a/app/models/task/Script.scala +++ b/app/models/task/Script.scala @@ -7,7 +7,7 @@ import com.scalableminds.webknossos.schema.Tables._ import models.user.{UserDAO, UserService} import play.api.libs.json._ import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/task/Task.scala b/app/models/task/Task.scala index dabd1ca434f..fa814bc29f5 100755 --- a/app/models/task/Task.scala +++ b/app/models/task/Task.scala @@ -2,6 +2,7 @@ package models.task import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.schema.Tables._ @@ -12,7 +13,6 @@ import models.user.Experience import slick.jdbc.PostgresProfile.api._ import slick.jdbc.TransactionIsolation.Serializable import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.ObjectId import scala.concurrent.ExecutionContext import scala.concurrent.duration.FiniteDuration diff --git a/app/models/task/TaskCreationParameters.scala b/app/models/task/TaskCreationParameters.scala index e9594cecb2b..72709c92efd 100644 --- a/app/models/task/TaskCreationParameters.scala +++ b/app/models/task/TaskCreationParameters.scala @@ -1,9 +1,25 @@ package models.task -import com.scalableminds.util.geometry.{BoundingBox, Vec3Int, Vec3Double} +import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.objectid.ObjectId import models.user.Experience import play.api.libs.json.{Format, Json} +trait TaskParametersTrait[T] { + val taskTypeId: String + val neededExperience: Experience + val pendingInstances: Int + val projectName: String + val scriptId: Option[String] + val boundingBox: Option[BoundingBox] + val datasetId: T + val editPosition: Vec3Int + val editRotation: Vec3Double + val creationInfo: Option[String] + val description: Option[String] + val baseAnnotation: Option[BaseAnnotation] +} + case class TaskParameters(taskTypeId: String, neededExperience: Experience, pendingInstances: Int, @@ -11,16 +27,49 @@ case class TaskParameters(taskTypeId: String, scriptId: Option[String], boundingBox: Option[BoundingBox], dataSet: String, + datasetId: Option[ObjectId], editPosition: Vec3Int, editRotation: Vec3Double, creationInfo: Option[String], description: Option[String], baseAnnotation: Option[BaseAnnotation]) + extends TaskParametersTrait[Option[ObjectId]] object TaskParameters { implicit val taskParametersFormat: Format[TaskParameters] = Json.format[TaskParameters] } +case class TaskParametersWithDatasetId(taskTypeId: String, + neededExperience: Experience, + pendingInstances: Int, + projectName: String, + scriptId: Option[String], + boundingBox: Option[BoundingBox], + datasetId: ObjectId, + editPosition: Vec3Int, + editRotation: Vec3Double, + creationInfo: Option[String], + description: Option[String], + baseAnnotation: Option[BaseAnnotation]) + extends TaskParametersTrait[ObjectId] + +object TaskParametersWithDatasetId { + def fromTaskParameters(t: TaskParameters, datasetId: ObjectId) = new TaskParametersWithDatasetId( + t.taskTypeId, + t.neededExperience, + t.pendingInstances, + t.projectName, + t.scriptId, + t.boundingBox, + datasetId, + t.editPosition, + t.editRotation, + t.creationInfo, + t.description, + t.baseAnnotation + ) +} + case class NmlTaskParameters(taskTypeId: String, neededExperience: Experience, pendingInstances: Int, diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala index 865316eef8b..2086126b150 100644 --- a/app/models/task/TaskCreationService.scala +++ b/app/models/task/TaskCreationService.scala @@ -1,5 +1,7 @@ package models.task +import collections.SequenceUtils + import java.io.File import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} @@ -21,7 +23,9 @@ import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json.{JsObject, Json} import telemetry.SlackNotificationService -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId +import models.organization.OrganizationDAO +import play.api.http.Status.FORBIDDEN import scala.concurrent.ExecutionContext @@ -29,6 +33,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, taskTypeDAO: TaskTypeDAO, annotationService: AnnotationService, taskDAO: TaskDAO, + organizationDAO: OrganizationDAO, taskService: TaskService, userService: UserService, teamDAO: TeamDAO, @@ -54,12 +59,22 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, _ <- bool2Fox(batchSize <= batchLimit) ?~> Messages("task.create.limitExceeded", batchLimit) } yield () + def fillMissingDatasetIds(taskParametersList: List[TaskParameters], organizationId: String)( + implicit ctx: DBAccessContext, + m: MessagesProvider): Fox[List[TaskParametersWithDatasetId]] = + Fox.serialCombined(taskParametersList)( + params => + for { + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(params.datasetId, params.dataSet, organizationId) + } yield TaskParametersWithDatasetId.fromTaskParameters(params, dataset._id) + ) + // Used in create (without files) in case of base annotation // Note that the requested task’s tracingType is always fulfilled here, // either by duplicating the base annotation’s tracings or creating new tracings - def createTracingsFromBaseAnnotations(taskParametersList: List[TaskParameters], organizationId: String)( + def createTracingsFromBaseAnnotations(taskParametersList: List[TaskParametersWithDatasetId], organizationId: String)( implicit ctx: DBAccessContext, - m: MessagesProvider): Fox[List[TaskParameters]] = + m: MessagesProvider): Fox[List[TaskParametersWithDatasetId]] = Fox.serialCombined(taskParametersList)( params => Fox @@ -69,12 +84,13 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, // Used in create (without files) in case of base annotation private def createTracingsFromBaseAnnotation( baseAnnotation: BaseAnnotation, - taskParameters: TaskParameters, + taskParameters: TaskParametersWithDatasetId, organizationId: String)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[BaseAnnotation] = for { taskTypeIdValidated <- ObjectId.fromString(taskParameters.taskTypeId) ?~> "taskType.id.invalid" taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" - dataset <- datasetDAO.findOneByNameAndOrganization(taskParameters.dataSet, organizationId) + dataset <- datasetDAO.findOne(taskParameters.datasetId) + _ <- bool2Fox(dataset._organization == organizationId) ?~> "dataset" baseAnnotationIdValidated <- ObjectId.fromString(baseAnnotation.baseId) annotation <- resolveBaseAnnotationId(baseAnnotationIdValidated) tracingStoreClient <- tracingStoreService.clientFor(dataset) @@ -82,11 +98,8 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, duplicateOrCreateSkeletonBase(annotation, taskParameters, tracingStoreClient).map(Some(_)) else Fox.successful(None) newVolumeId <- if (taskType.tracingType == TracingType.volume || taskType.tracingType == TracingType.hybrid) - duplicateOrCreateVolumeBase(annotation, - taskParameters, - tracingStoreClient, - organizationId, - taskType.settings.magRestrictions).map(Some(_)) + duplicateOrCreateVolumeBase(annotation, taskParameters, tracingStoreClient, taskType.settings.magRestrictions) + .map(Some(_)) else Fox.successful(None) } yield BaseAnnotation(baseAnnotationIdValidated.id, newSkeletonId, newVolumeId) @@ -118,9 +131,10 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, }).flatten // Used in create (without files) in case of base annotation - private def duplicateOrCreateSkeletonBase(baseAnnotation: Annotation, - params: TaskParameters, - tracingStoreClient: WKRemoteTracingStoreClient): Fox[String] = + private def duplicateOrCreateSkeletonBase( + baseAnnotation: Annotation, + params: TaskParametersWithDatasetId, + tracingStoreClient: WKRemoteTracingStoreClient)(implicit ctx: DBAccessContext): Fox[String] = for { baseSkeletonTracingIdOpt <- baseAnnotation.skeletonTracingId newTracingId <- baseSkeletonTracingIdOpt @@ -131,21 +145,21 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, editRotation = Some(params.editRotation), boundingBox = params.boundingBox)) .getOrElse( - tracingStoreClient.saveSkeletonTracing( - annotationService.createSkeletonTracingBase( - params.dataSet, + annotationService + .createSkeletonTracingBase( + params.datasetId, params.boundingBox, params.editPosition, params.editRotation - ))) + ) + .flatMap(tracingStoreClient.saveSkeletonTracing)) } yield newTracingId // Used in create (without files) in case of base annotation private def duplicateOrCreateVolumeBase( baseAnnotation: Annotation, - params: TaskParameters, + params: TaskParametersWithDatasetId, tracingStoreClient: WKRemoteTracingStoreClient, - organizationId: String, magRestrictions: MagRestrictions)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[String] = for { volumeTracingOpt <- baseAnnotation.volumeTracingId @@ -154,8 +168,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, .getOrElse( annotationService .createVolumeTracingBase( - params.dataSet, - organizationId, + params.datasetId, params.boundingBox, params.editPosition, params.editRotation, @@ -166,26 +179,28 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, } yield newVolumeTracingId // Used in create (without files). If base annotations were used, this does nothing. - def createTaskSkeletonTracingBases(paramsList: List[TaskParameters])( + def createTaskSkeletonTracingBases(paramsList: List[TaskParametersWithDatasetId], organizationId: String)( implicit ctx: DBAccessContext): Fox[List[Option[SkeletonTracing]]] = Fox.serialCombined(paramsList) { params => for { taskTypeIdValidated <- ObjectId.fromString(params.taskTypeId) ?~> "taskType.id.invalid" taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" - skeletonTracingOpt = if ((taskType.tracingType == TracingType.skeleton || taskType.tracingType == TracingType.hybrid) && params.baseAnnotation.isEmpty) { - Some( - annotationService.createSkeletonTracingBase( - params.dataSet, + skeletonTracingOpt <- if ((taskType.tracingType == TracingType.skeleton || taskType.tracingType == TracingType.hybrid) && params.baseAnnotation.isEmpty) { + + annotationService + .createSkeletonTracingBase( + params.datasetId, params.boundingBox, params.editPosition, params.editRotation - )) - } else None + ) + .map(Some(_)) + } else Fox.successful(None) } yield skeletonTracingOpt } // Used in create (without files). If base annotations were used, this does nothing. - def createTaskVolumeTracingBases(paramsList: List[TaskParameters], organizationId: String)( + def createTaskVolumeTracingBases(paramsList: List[TaskParametersWithDatasetId], organizationId: String)( implicit ctx: DBAccessContext, m: MessagesProvider): Fox[List[Option[(VolumeTracing, Option[File])]]] = Fox.serialCombined(paramsList) { params => @@ -195,8 +210,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, volumeTracingOpt <- if ((taskType.tracingType == TracingType.volume || taskType.tracingType == TracingType.hybrid) && params.baseAnnotation.isEmpty) { annotationService .createVolumeTracingBase( - params.dataSet, - organizationId, + params.datasetId, params.boundingBox, params.editPosition, params.editRotation, @@ -209,59 +223,65 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, } def buildFullParamsFromFiles(params: NmlTaskParameters, extractedTracingBoxes: List[TracingBoxContainer])( - implicit m: MessagesProvider): List[Box[TaskParameters]] = + implicit m: MessagesProvider): List[Box[TaskParametersWithDatasetId]] = extractedTracingBoxes.map { boxContainer => buildFullParamsFromFilesForSingleTask(params, boxContainer.skeleton, boxContainer.volume.map(_._1), + boxContainer.datasetId, boxContainer.fileName, boxContainer.description) } // Used in createFromFiles. For all volume tracings that have an empty bounding box, reset it to the dataset bounding box - def addVolumeFallbackBoundingBoxes(tracingBoxes: List[TracingBoxContainer], - organizationId: String): Fox[List[TracingBoxContainer]] = + def addVolumeFallbackBoundingBoxes(tracingBoxes: List[TracingBoxContainer]): Fox[List[TracingBoxContainer]] = Fox.serialCombined(tracingBoxes) { tracingBox: TracingBoxContainer => - tracingBox.volume match { - case Full(v) => - for { volumeAdapted <- addVolumeFallbackBoundingBox(v._1, organizationId) } yield + tracingBox match { + case TracingBoxContainer(_, _, _, Full(v), Full(datasetId)) => + for { volumeAdapted <- addVolumeFallbackBoundingBox(v._1, datasetId) } yield tracingBox.copy(volume = Full(volumeAdapted, v._2)) case _ => Fox.successful(tracingBox) } } // Used in createFromFiles. Called once per requested task if volume tracing is passed - private def addVolumeFallbackBoundingBox(volume: VolumeTracing, organizationId: String): Fox[VolumeTracing] = - if (volume.boundingBox.isEmpty) { + private def addVolumeFallbackBoundingBox(volume: UploadedVolumeLayer, datasetId: ObjectId): Fox[UploadedVolumeLayer] = + if (volume.tracing.boundingBox.isEmpty) { for { - dataset <- datasetDAO.findOneByNameAndOrganization(volume.datasetName, organizationId)(GlobalAccessContext) + dataset <- datasetDAO.findOne(datasetId)(GlobalAccessContext) dataSource <- datasetService.dataSourceFor(dataset).flatMap(_.toUsable) - } yield volume.copy(boundingBox = dataSource.boundingBox) + } yield volume.copy(tracing = volume.tracing.copy(boundingBox = dataSource.boundingBox)) } else Fox.successful(volume) // Used in createFromFiles. Called once per requested task private def buildFullParamsFromFilesForSingleTask( nmlFormParams: NmlTaskParameters, skeletonTracing: Box[SkeletonTracing], - volumeTracing: Box[VolumeTracing], + uploadedVolumeLayer: Box[UploadedVolumeLayer], + datasetIdBox: Box[ObjectId], fileName: Box[String], - description: Box[Option[String]])(implicit m: MessagesProvider): Box[TaskParameters] = { - val paramBox: Box[(Option[BoundingBox], String, Vec3Int, Vec3Double)] = skeletonTracing match { - case Full(tracing) => Full((tracing.boundingBox, tracing.datasetName, tracing.editPosition, tracing.editRotation)) - case f: Failure => f - case Empty => - volumeTracing match { - case Full(tracing) => - Full((Some(tracing.boundingBox), tracing.datasetName, tracing.editPosition, tracing.editRotation)) - case f: Failure => f - case Empty => Failure(Messages("task.create.needsEitherSkeletonOrVolume")) - } - } + description: Box[Option[String]])(implicit m: MessagesProvider): Box[TaskParametersWithDatasetId] = { + val paramBox: Box[(Option[BoundingBox], ObjectId, Vec3Int, Vec3Double)] = + (skeletonTracing, datasetIdBox) match { + case (Full(tracing), Full(datasetId)) => + Full((tracing.boundingBox, datasetId, tracing.editPosition, tracing.editRotation)) + case (f: Failure, _) => f + case (_, f: Failure) => f + case (_, Empty) => Failure(Messages("Could not find dataset for task creation.")) + case (Empty, _) => + (uploadedVolumeLayer, datasetIdBox) match { + case (Full(layer), Full(datasetId)) => + Full((Some(layer.tracing.boundingBox), datasetId, layer.tracing.editPosition, layer.tracing.editRotation)) + case (f: Failure, _) => f + case (_, f: Failure) => f + case _ => Failure(Messages("task.create.needsEitherSkeletonOrVolume")) + } + } paramBox map { params => val parsedNmlTracingBoundingBox = params._1.map(b => BoundingBox(b.topLeft, b.width, b.height, b.depth)) val bbox = if (nmlFormParams.boundingBox.isDefined) nmlFormParams.boundingBox else parsedNmlTracingBoundingBox - TaskParameters( + TaskParametersWithDatasetId( nmlFormParams.taskTypeId, nmlFormParams.neededExperience, nmlFormParams.pendingInstances, @@ -280,8 +300,8 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, // used in createFromFiles route def fillInMissingTracings(skeletons: List[Box[SkeletonTracing]], - volumes: List[Box[(VolumeTracing, Option[File])]], - fullParams: List[Box[TaskParameters]], + volumes: List[Box[(UploadedVolumeLayer, Option[File])]], + fullParams: List[Box[TaskParametersWithDatasetId]], taskType: TaskType, organizationId: String)( implicit ctx: DBAccessContext, @@ -306,7 +326,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, case (skeletonTracingBox, volumeTracingBox) => skeletonTracingBox match { case Full(_) => (Empty, Failure(Messages("taskType.mismatch", "volume", "skeleton"))) - case _ => (Empty, volumeTracingBox) + case _ => (Empty, volumeTracingBox.map(box => (box._1.tracing, box._2))) } } .unzip) @@ -316,28 +336,30 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, case (paramBox, skeleton, volume) => paramBox match { case Full(params) => - val skeletonBox = Full( - skeleton.openOr( - annotationService.createSkeletonTracingBase(params.dataSet, - params.boundingBox, - params.editPosition, - params.editRotation))) + val skeletonFox = + skeleton + .map(s => Fox.successful(s)) + .openOr( + annotationService.createSkeletonTracingBase(params.datasetId, + params.boundingBox, + params.editPosition, + params.editRotation)) val volumeFox = volume - .map(Fox.successful(_)) - .openOr( - annotationService - .createVolumeTracingBase( - params.dataSet, - organizationId, - params.boundingBox, - params.editPosition, - params.editRotation, - volumeShowFallbackLayer = false, - magRestrictions = taskType.settings.magRestrictions - ) - .map(v => (v, None))) - - volumeFox.map(v => (skeletonBox, Full(v))) + .map(v => Fox.successful(v._1.tracing, v._2)) + .openOr(annotationService + .createVolumeTracingBase( + params.datasetId, + params.boundingBox, + params.editPosition, + params.editRotation, + volumeShowFallbackLayer = false, + magRestrictions = taskType.settings.magRestrictions + ) + .map(v => (v, None))) + for { + skeleton <- skeletonFox + volume <- volumeFox + } yield (Full(skeleton), Full(volume)) case f: Failure => Fox.failure(f.msg, Empty, f.chain) case _ => Fox.failure("") } @@ -346,10 +368,10 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, // used in createFromFiles route @SuppressWarnings(Array("OptionGet")) //We suppress this warning because we check the option beforehand - def combineParamsWithTracings(fullParams: List[Box[TaskParameters]], + def combineParamsWithTracings(fullParams: List[Box[TaskParametersWithDatasetId]], skeletonBases: List[Box[SkeletonTracing]], volumeBases: List[Box[(VolumeTracing, Option[File])]]) - : List[Box[(TaskParameters, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])]] = + : List[Box[(TaskParametersWithDatasetId, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])]] = fullParams.lazyZip(skeletonBases).lazyZip(volumeBases).map { case (paramBox, skeletonBox, volumeBox) => paramBox match { @@ -376,7 +398,8 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, // and on to create task and annotation objects // Both createFromFiles and create use this method def createTasks( - requestedTasks: List[Box[(TaskParameters, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])]], + requestedTasks: List[ + Box[(TaskParametersWithDatasetId, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])]], requestingUser: User)(implicit mp: MessagesProvider, ctx: DBAccessContext): Fox[TaskCreationResult] = { val fullTasks = requestedTasks.flatten if (fullTasks.isEmpty) { @@ -385,11 +408,10 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, } else { for { _ <- assertEachHasEitherSkeletonOrVolume(fullTasks) ?~> "task.create.needsEitherSkeletonOrVolume" - firstDatasetName <- fullTasks.headOption.map(_._1.dataSet).toFox - _ <- assertAllOnSameDataset(fullTasks, firstDatasetName) - dataset <- datasetDAO.findOneByNameAndOrganization(firstDatasetName, requestingUser._organization) ?~> Messages( - "dataset.notFound", - firstDatasetName) + firstDatasetId <- fullTasks.headOption.map(_._1.datasetId).toFox + _ <- assertAllOnSameDataset(fullTasks) + dataset <- datasetDAO.findOne(firstDatasetId) ?~> Messages("dataset.notFound", firstDatasetId) + _ <- bool2Fox(dataset._organization == requestingUser._organization) ~> FORBIDDEN _ = if (fullTasks.exists(task => task._1.baseAnnotation.isDefined)) slackNotificationService.noticeBaseAnnotationTaskCreation(fullTasks.map(_._1.taskTypeId).distinct, fullTasks.count(_._1.baseAnnotation.isDefined)) @@ -440,30 +462,24 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, } private def assertEachHasEitherSkeletonOrVolume( - requestedTasks: List[(TaskParameters, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])]) - : Fox[Unit] = + requestedTasks: List[ + (TaskParametersWithDatasetId, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])]): Fox[Unit] = bool2Fox( requestedTasks.forall(tuple => tuple._1.baseAnnotation.isDefined || tuple._2.isDefined || tuple._3.isDefined)) private def assertAllOnSameDataset( - requestedTasks: List[(TaskParameters, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])], - firstDatasetName: String)(implicit mp: MessagesProvider): Fox[String] = { + requestedTasks: List[ + (TaskParametersWithDatasetId, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])])( + implicit mp: MessagesProvider): Fox[Unit] = { @scala.annotation.tailrec - def allOnSameDatasetIter( - requestedTasksRest: List[(TaskParameters, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])], - datasetName: String): Boolean = - requestedTasksRest match { - case List() => true - case head :: tail => head._1.dataSet == datasetName && allOnSameDatasetIter(tail, datasetName) - } - - if (allOnSameDatasetIter(requestedTasks, firstDatasetName)) - Fox.successful(firstDatasetName) + val areAllIdsEqual = SequenceUtils.findUniqueElement(requestedTasks.map(_._1.datasetId)) + if (areAllIdsEqual.isDefined) + Fox.successful(()) else Fox.failure(Messages("task.notOnSameDataset")) } - private def mergeTracingIds(list: List[(Box[TaskParameters], Box[Option[String]])], + private def mergeTracingIds(list: List[(Box[TaskParametersWithDatasetId], Box[Option[String]])], isSkeletonId: Boolean): List[Box[Option[String]]] = list.map { case (paramBox, idBox) => @@ -475,12 +491,13 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, } private def saveVolumeTracingIfPresent( - requestedTaskBox: Box[(TaskParameters, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])], + requestedTaskBox: Box[ + (TaskParametersWithDatasetId, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])], tracingStoreClient: WKRemoteTracingStoreClient)(implicit ctx: DBAccessContext): Fox[Option[String]] = requestedTaskBox.map { tuple => (tuple._1, tuple._3) } match { - case Full((params: TaskParameters, Some((tracing, initialFile)))) => + case Full((params: TaskParametersWithDatasetId, Some((tracing, initialFile)))) => for { taskTypeIdValidated <- ObjectId.fromString(params.taskTypeId) ?~> "taskType.id.invalid" taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" @@ -492,8 +509,9 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, case _ => Fox.successful(None) } - private def warnIfTeamHasNoAccess(requestedTasks: List[TaskParameters], dataset: Dataset, requestingUser: User)( - implicit ctx: DBAccessContext): Fox[List[String]] = { + private def warnIfTeamHasNoAccess(requestedTasks: List[TaskParametersWithDatasetId], + dataset: Dataset, + requestingUser: User)(implicit ctx: DBAccessContext): Fox[List[String]] = { val projectNames = requestedTasks.map(_.projectName).distinct for { projects: List[Project] <- Fox.serialCombined(projectNames)( @@ -526,7 +544,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, case _ => Fox.successful(()) } - private def createTaskWithoutAnnotationBase(paramBox: Box[TaskParameters], + private def createTaskWithoutAnnotationBase(paramBox: Box[TaskParametersWithDatasetId], skeletonTracingIdBox: Box[Option[String]], volumeTracingIdBox: Box[Option[String]], requestingUser: User)(implicit ctx: DBAccessContext): Fox[Task] = diff --git a/app/models/task/TaskService.scala b/app/models/task/TaskService.scala index 48a9d8852a7..94f6ee5b482 100644 --- a/app/models/task/TaskService.scala +++ b/app/models/task/TaskService.scala @@ -1,7 +1,9 @@ package models.task import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits} + import javax.inject.Inject import models.annotation.{Annotation, AnnotationDAO, AnnotationType} import models.dataset.DatasetDAO @@ -10,7 +12,7 @@ import models.team.TeamDAO import models.user.{User, UserService} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json.{JsObject, Json} -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.concurrent.ExecutionContext @@ -45,6 +47,8 @@ class TaskService @Inject()(conf: WkConf, "team" -> team.name, "type" -> taskTypeJs, "dataSet" -> dataset.name, + "datasetName" -> dataset.name, + "datasetId" -> dataset._id, // Only used for csv serialization in frontend. "neededExperience" -> task.neededExperience, "created" -> task.created, "status" -> status, diff --git a/app/models/task/TaskType.scala b/app/models/task/TaskType.scala index 87c56dfedd5..93366f2cfc6 100755 --- a/app/models/task/TaskType.scala +++ b/app/models/task/TaskType.scala @@ -1,6 +1,7 @@ package models.task import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.schema.Tables._ @@ -11,7 +12,6 @@ import models.annotation.{AnnotationSettings, TracingMode} import models.team.TeamDAO import play.api.libs.json._ import slick.lifted.Rep -import utils.ObjectId import utils.sql.{EnumerationArrayValue, SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/models/team/Team.scala b/app/models/team/Team.scala index 377515d3b64..a488f83b769 100755 --- a/app/models/team/Team.scala +++ b/app/models/team/Team.scala @@ -16,7 +16,7 @@ import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json._ import slick.lifted.Rep import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/team/TeamMembership.scala b/app/models/team/TeamMembership.scala index 0d61ee5b1e5..32d2122ae9c 100755 --- a/app/models/team/TeamMembership.scala +++ b/app/models/team/TeamMembership.scala @@ -1,11 +1,12 @@ package models.team import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox + import javax.inject.Inject import play.api.libs.functional.syntax._ import play.api.libs.json._ -import utils.ObjectId case class TeamMembership(teamId: ObjectId, isTeamManager: Boolean) diff --git a/app/models/user/EmailVerificationKey.scala b/app/models/user/EmailVerificationKey.scala index 03a087f51d5..3b8b2956cda 100644 --- a/app/models/user/EmailVerificationKey.scala +++ b/app/models/user/EmailVerificationKey.scala @@ -1,11 +1,11 @@ package models.user +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.schema.Tables import com.scalableminds.webknossos.schema.Tables.{Emailverificationkeys, EmailverificationkeysRow} import slick.lifted.{Rep, TableQuery} -import utils.ObjectId import utils.sql.{SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/models/user/EmailVerificationService.scala b/app/models/user/EmailVerificationService.scala index cd04217185c..e55af8d3ca8 100644 --- a/app/models/user/EmailVerificationService.scala +++ b/app/models/user/EmailVerificationService.scala @@ -2,12 +2,13 @@ package models.user import org.apache.pekko.actor.ActorSystem import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.typesafe.scalalogging.LazyLogging import mail.{DefaultMails, Send} import security.RandomIDGenerator -import utils.{ObjectId, WkConf} +import utils.WkConf import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/user/Invite.scala b/app/models/user/Invite.scala index 84ebda8ff4d..ddbc9557ecd 100644 --- a/app/models/user/Invite.scala +++ b/app/models/user/Invite.scala @@ -2,6 +2,7 @@ package models.user import org.apache.pekko.actor.ActorSystem import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.schema.Tables._ @@ -13,8 +14,8 @@ import models.organization.OrganizationDAO import security.RandomIDGenerator import slick.jdbc.PostgresProfile.api._ import slick.lifted.Rep -import utils.sql.{SqlClient, SQLDAO} -import utils.{ObjectId, WkConf} +import utils.sql.{SQLDAO, SqlClient} +import utils.WkConf import scala.concurrent.{ExecutionContext, Future} diff --git a/app/models/user/MultiUser.scala b/app/models/user/MultiUser.scala index a1da71d02f7..55f98691b34 100644 --- a/app/models/user/MultiUser.scala +++ b/app/models/user/MultiUser.scala @@ -10,7 +10,7 @@ import models.user.Theme.Theme import play.api.libs.json.Format.GenericFormat import play.api.libs.json.{JsObject, Json} import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/models/user/User.scala b/app/models/user/User.scala index cc2c951716e..96c9a15f061 100644 --- a/app/models/user/User.scala +++ b/app/models/user/User.scala @@ -17,7 +17,7 @@ import slick.jdbc.PostgresProfile.api._ import slick.jdbc.TransactionIsolation.Serializable import slick.lifted.Rep import utils.sql.{SQLDAO, SimpleSQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/user/UserService.scala b/app/models/user/UserService.scala index a864bfaaa47..be07b9d6115 100755 --- a/app/models/user/UserService.scala +++ b/app/models/user/UserService.scala @@ -2,6 +2,7 @@ package models.user import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.cache.AlfuCache +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.security.SCrypt import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} @@ -23,7 +24,7 @@ import play.silhouette.api.util.PasswordInfo import play.silhouette.impl.providers.CredentialsProvider import security.{PasswordHasher, TokenDAO} import utils.sql.SqlEscaping -import utils.{ObjectId, WkConf} +import utils.WkConf import javax.inject.Inject import scala.concurrent.{ExecutionContext, Future} @@ -237,14 +238,11 @@ class UserService @Inject()(conf: WkConf, def updateDatasetViewConfiguration( user: User, - datasetName: String, - organizationId: String, + datasetId: ObjectId, datasetConfiguration: DatasetViewConfiguration, layerConfiguration: Option[JsValue])(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[Unit] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(GlobalAccessContext) ?~> Messages( - "dataset.notFound", - datasetName) + dataset <- datasetDAO.findOne(datasetId)(GlobalAccessContext) ?~> Messages("dataset.notFound", datasetId) layerMap = layerConfiguration.flatMap(_.asOpt[Map[String, JsValue]]).getOrElse(Map.empty) _ <- Fox.serialCombined(layerMap.toList) { case (name, config) => diff --git a/app/models/user/time/TimeSpan.scala b/app/models/user/time/TimeSpan.scala index cad7cb003e4..90c130dc360 100644 --- a/app/models/user/time/TimeSpan.scala +++ b/app/models/user/time/TimeSpan.scala @@ -8,7 +8,7 @@ import models.annotation.AnnotationType.AnnotationType import play.api.libs.json.{JsArray, JsObject, JsValue, Json} import slick.lifted.Rep import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/user/time/TimeSpanService.scala b/app/models/user/time/TimeSpanService.scala index 8e191f2d3c4..2fa798f5d2b 100644 --- a/app/models/user/time/TimeSpanService.scala +++ b/app/models/user/time/TimeSpanService.scala @@ -1,6 +1,7 @@ package models.user.time import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging @@ -13,7 +14,7 @@ import models.task.TaskDAO import models.user.{User, UserService} import net.liftweb.common.{Box, Full} import org.apache.pekko.actor.{ActorSelection, ActorSystem} -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.collection.mutable import scala.concurrent.ExecutionContext diff --git a/app/models/voxelytics/VoxelyticsDAO.scala b/app/models/voxelytics/VoxelyticsDAO.scala index beffc0b3010..fb45d984870 100644 --- a/app/models/voxelytics/VoxelyticsDAO.scala +++ b/app/models/voxelytics/VoxelyticsDAO.scala @@ -4,7 +4,7 @@ import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import models.user.User import play.api.libs.json._ -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SimpleSQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/voxelytics/VoxelyticsService.scala b/app/models/voxelytics/VoxelyticsService.scala index 1c4480836e0..083c814a3e6 100644 --- a/app/models/voxelytics/VoxelyticsService.scala +++ b/app/models/voxelytics/VoxelyticsService.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits} import models.user.User import models.voxelytics.VoxelyticsRunState.VoxelyticsRunState import play.api.libs.json.{JsArray, JsObject, Json, OFormat} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/opengraph/OpenGraphService.scala b/app/opengraph/OpenGraphService.scala index 09429e98321..e18eedd4c8b 100644 --- a/app/opengraph/OpenGraphService.scala +++ b/app/opengraph/OpenGraphService.scala @@ -4,6 +4,7 @@ import org.apache.pekko.http.scaladsl.model.Uri import com.google.inject.Inject import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.enumeration.ExtendedEnumeration +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataLayerLike} import models.annotation.AnnotationDAO @@ -13,7 +14,7 @@ import models.shortlinks.ShortLinkDAO import net.liftweb.common.Box.tryo import net.liftweb.common.Full import security.URLSharing -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.concurrent.{ExecutionContext, Future} @@ -39,8 +40,10 @@ class OpenGraphService @Inject()(datasetDAO: DatasetDAO, // This should match the frontend-side routes, not api routes, since those are the links people send around private val shortLinkRouteRegex = "^/links/(.*)".r - private val datasetRoute1Regex = "^/datasets/([^/^#]+)/([^/^#]+)/view".r - private val datasetRoute2Regex = "^/datasets/([^/^#]+)/([^/^#]+)".r + private val datasetRoute1Regex = "^/datasets/([^/^#]+)/view".r + private val datasetRoute2Regex = "^/datasets/([^/^#]+)".r + private val datasetRoute1LegacyRegex = "^/datasets/([^/^#]+)/([^/^#]+)/view".r + private val datasetRoute2LegacyRegex = "^/datasets/([^/^#]+)/([^/^#]+)".r private val workflowRouteRegex = "^/workflows/([^/^#]+)".r private val annotationRouteRegex = "^/annotations/([^/^#]+)".r @@ -91,33 +94,58 @@ class OpenGraphService @Inject()(datasetDAO: DatasetDAO, private def detectPageType(uriPath: String) = uriPath match { - case datasetRoute1Regex(_, _) | datasetRoute2Regex(_, _) => OpenGraphPageType.dataset - case annotationRouteRegex(_) => OpenGraphPageType.annotation - case workflowRouteRegex(_) => OpenGraphPageType.workflow - case _ => OpenGraphPageType.unknown + case datasetRoute1Regex(_, _) | datasetRoute2Regex(_, _) | datasetRoute1LegacyRegex(_, _) | + datasetRoute2LegacyRegex(_, _) => + OpenGraphPageType.dataset + case annotationRouteRegex(_) => OpenGraphPageType.annotation + case workflowRouteRegex(_) => OpenGraphPageType.workflow + case _ => OpenGraphPageType.unknown } private def datasetOpenGraphTags(uriPath: String, token: Option[String])(implicit ec: ExecutionContext, ctx: DBAccessContext): Fox[OpenGraphTags] = uriPath match { - case datasetRoute1Regex(organizationId, datasetName) => - datasetOpenGraphTagsWithOrganizationId(organizationId, datasetName, token) - case datasetRoute2Regex(organizationId, datasetName) => - datasetOpenGraphTagsWithOrganizationId(organizationId, datasetName, token) + case datasetRoute1Regex(datasetIdStr) => + val validDatasetIdOpt = getDatasetIdFromURIPath(datasetIdStr) + datasetOpenGraphTagsWithOrganizationId(validDatasetIdOpt, None, None, token) + case datasetRoute2Regex(datasetIdStr) => + val validDatasetIdOpt = getDatasetIdFromURIPath(datasetIdStr) + datasetOpenGraphTagsWithOrganizationId(validDatasetIdOpt, None, None, token) + case datasetRoute1LegacyRegex(organizationId, datasetName) => + datasetOpenGraphTagsWithOrganizationId(None, Some(organizationId), Some(datasetName), token) + case datasetRoute2LegacyRegex(organizationId, datasetName) => + datasetOpenGraphTagsWithOrganizationId(None, Some(organizationId), Some(datasetName), token) case _ => Fox.failure("not a matching uri") } - private def datasetOpenGraphTagsWithOrganizationId(organizationId: String, - datasetName: String, - token: Option[String])(implicit ctx: DBAccessContext) = + private def getDatasetIdFromURIPath(datasetNameAndId: String): Option[ObjectId] = { + val idStrOpt = datasetNameAndId.split("-").lastOption + val idOpt = idStrOpt.flatMap(ObjectId.fromStringSync) + idOpt match { + case Some(validId) => Some(validId) + case None => None + } + } + + private def datasetOpenGraphTagsWithOrganizationId( + datasetIdOpt: Option[ObjectId], + organizationIdOpt: Option[String], + datasetNameOpt: Option[String], + token: Option[String])(implicit ec: ExecutionContext, ctx: DBAccessContext) = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + dataset <- (datasetIdOpt, organizationIdOpt, datasetNameOpt) match { + case (Some(datasetId), None, None) => + datasetDAO.findOne(datasetId) + case (None, Some(organizationId), Some(datasetName)) => + datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + case _ => Fox.failure("Could not find dataset") + } layers <- datasetLayerDAO.findAllForDataset(dataset._id) layerOpt = layers.find(_.category == Category.color) organization <- organizationDAO.findOne(dataset._organization) } yield OpenGraphTags( - Some(s"${dataset.displayName.getOrElse(datasetName)} | WEBKNOSSOS"), + Some(s"${dataset.name} | WEBKNOSSOS"), Some("View this dataset in WEBKNOSSOS"), thumbnailUri(dataset, layerOpt, organization, token) ) @@ -136,8 +164,8 @@ class OpenGraphService @Inject()(datasetDAO: DatasetDAO, layerOpt = layers.find(_.category == Category.color) } yield OpenGraphTags( - Some(s"${annotation.nameOpt.orElse(dataset.displayName).getOrElse(dataset.name)} | WEBKNOSSOS"), - Some(s"View this annotation on dataset ${dataset.displayName.getOrElse(dataset.name)} in WEBKNOSSOS"), + Some(s"${annotation.nameOpt.getOrElse(dataset.name)} | WEBKNOSSOS"), + Some(s"View this annotation on dataset ${dataset.name} in WEBKNOSSOS"), thumbnailUri(dataset, layerOpt, organization, token) ) case _ => Fox.failure("not a matching uri") diff --git a/app/security/Token.scala b/app/security/Token.scala index e4153df5bd4..5ac6b1b101b 100644 --- a/app/security/Token.scala +++ b/app/security/Token.scala @@ -10,7 +10,7 @@ import com.scalableminds.webknossos.schema.Tables._ import TokenType.TokenType import slick.jdbc.PostgresProfile.api._ import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import utils.sql.{SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/security/WebknossosBearerTokenAuthenticatorService.scala b/app/security/WebknossosBearerTokenAuthenticatorService.scala index 2c31b9a4aed..4ab34475baa 100644 --- a/app/security/WebknossosBearerTokenAuthenticatorService.scala +++ b/app/security/WebknossosBearerTokenAuthenticatorService.scala @@ -14,8 +14,9 @@ import com.scalableminds.util.accesscontext.GlobalAccessContext import com.scalableminds.util.tools.{Fox, FoxImplicits} import models.user.{User, UserService} import TokenType.TokenType +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} diff --git a/app/utils/sql/SQLDAO.scala b/app/utils/sql/SQLDAO.scala index 2cf9d7fe40a..21930fb70e6 100644 --- a/app/utils/sql/SQLDAO.scala +++ b/app/utils/sql/SQLDAO.scala @@ -1,15 +1,14 @@ package utils.sql import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import slick.lifted.{AbstractTable, Rep, TableQuery} -import utils.ObjectId import javax.inject.Inject import scala.annotation.nowarn import scala.concurrent.ExecutionContext - import slick.jdbc.PostgresProfile.api._ abstract class SQLDAO[C, R, X <: AbstractTable[R]] @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext) diff --git a/app/utils/sql/SecuredSQLDAO.scala b/app/utils/sql/SecuredSQLDAO.scala index 3935434de65..e79bd71a5ad 100644 --- a/app/utils/sql/SecuredSQLDAO.scala +++ b/app/utils/sql/SecuredSQLDAO.scala @@ -1,11 +1,11 @@ package utils.sql import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import models.user.User import net.liftweb.common.Full import security.{SharingTokenContainer, UserSharingTokenContainer} -import utils.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/utils/sql/SqlInterpolation.scala b/app/utils/sql/SqlInterpolation.scala index 61f72de4490..84941953e0a 100644 --- a/app/utils/sql/SqlInterpolation.scala +++ b/app/utils/sql/SqlInterpolation.scala @@ -7,7 +7,7 @@ import slick.dbio.{Effect, NoStream} import slick.jdbc._ import slick.sql.{SqlAction, SqlStreamingAction} import slick.util.DumpInfo -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import java.sql.{PreparedStatement, Types} import scala.collection.mutable diff --git a/app/utils/sql/SqlTypeImplicits.scala b/app/utils/sql/SqlTypeImplicits.scala index 569e41b334e..a77ec936d7b 100644 --- a/app/utils/sql/SqlTypeImplicits.scala +++ b/app/utils/sql/SqlTypeImplicits.scala @@ -4,7 +4,7 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.time.Instant import play.api.libs.json.JsValue import slick.jdbc.{GetResult, PositionedResult} -import utils.ObjectId +import com.scalableminds.util.objectid.ObjectId import scala.concurrent.duration.FiniteDuration diff --git a/conf/application.conf b/conf/application.conf index 07d8b5d2dd1..85db10f41cf 100644 --- a/conf/application.conf +++ b/conf/application.conf @@ -151,8 +151,8 @@ features { taskReopenAllowedInSeconds = 30 allowDeleteDatasets = true # to enable jobs for local development, use "yarn enable-jobs" to also activate it in the database - jobsEnabled = false - voxelyticsEnabled = false + jobsEnabled = true + voxelyticsEnabled = true # For new users, the dashboard will show a banner which encourages the user to check out the following dataset. # If isWkorgInstance == true, `/createExplorative/hybrid/true` is appended to the URL so that a new tracing is opened. # If isWkorgInstance == false, `/view` is appended to the URL so that it's opened in view mode (since the user might not diff --git a/conf/evolutions/124-decouple-dataset-directory-from-name.sql b/conf/evolutions/124-decouple-dataset-directory-from-name.sql new file mode 100644 index 00000000000..a2a6009a472 --- /dev/null +++ b/conf/evolutions/124-decouple-dataset-directory-from-name.sql @@ -0,0 +1,21 @@ +START TRANSACTION; + +do $$ begin ASSERT (select schemaVersion from webknossos.releaseInformation) = 123, 'Previous schema version mismatch'; end; $$ LANGUAGE plpgsql; + +DROP VIEW IF EXISTS webknossos.datasets_; + +UPDATE webknossos.datasets SET displayName = name WHERE displayName IS NULL; +ALTER TABLE webknossos.datasets RENAME COLUMN name TO directoryName; +ALTER TABLE webknossos.datasets RENAME COLUMN displayName TO name; +ALTER TABLE webknossos.datasets ALTER COLUMN name SET NOT NULL; + +ALTER TABLE webknossos.datasets DROP CONSTRAINT IF EXISTS datasets_name__organization_key; +ALTER TABLE webknossos.datasets ADD CONSTRAINT datasets_directoryName__organization_key UNIQUE(directoryName, _organization); +DROP INDEX webknossos.datasets_name_idx; +CREATE INDEX ON webknossos.datasets(directoryName); + +CREATE VIEW webknossos.datasets_ AS SELECT * FROM webknossos.datasets WHERE NOT isDeleted; + +UPDATE webknossos.releaseInformation SET schemaVersion = 124; + +COMMIT TRANSACTION; diff --git a/conf/evolutions/reversions/124-decouple-dataset-directory-from-name.sql b/conf/evolutions/reversions/124-decouple-dataset-directory-from-name.sql new file mode 100644 index 00000000000..1600b605479 --- /dev/null +++ b/conf/evolutions/reversions/124-decouple-dataset-directory-from-name.sql @@ -0,0 +1,20 @@ +START TRANSACTION; + +do $$ begin ASSERT (select schemaVersion from webknossos.releaseInformation) = 124, 'Previous schema version mismatch'; end; $$ LANGUAGE plpgsql; + +DROP VIEW IF EXISTS webknossos.datasets_; + +ALTER TABLE webknossos.datasets RENAME COLUMN name TO displayName; +ALTER TABLE webknossos.datasets RENAME COLUMN directoryName TO name; +ALTER TABLE webknossos.datasets ALTER COLUMN displayName DROP NOT NULL; + +ALTER TABLE webknossos.datasets DROP CONSTRAINT IF EXISTS datasets_directoryName__organization_key; +ALTER TABLE webknossos.datasets ADD CONSTRAINT datasets_name__organization_key UNIQUE(name, _organization); +DROP INDEX webknossos.datasets_directoryName_idx; +CREATE INDEX ON webknossos.datasets(name); + +CREATE VIEW webknossos.datasets_ AS SELECT * FROM webknossos.datasets WHERE NOT isDeleted; + +UPDATE webknossos.releaseInformation SET schemaVersion = 123; + +COMMIT TRANSACTION; diff --git a/conf/messages b/conf/messages index 44eff4e660d..975dce6d587 100644 --- a/conf/messages +++ b/conf/messages @@ -79,6 +79,7 @@ braintracing.exists=Great, you already have an account on braintracing.org. Plea dataset=Dataset dataset.notFound=Dataset {0} does not exist or could not be accessed +dataset.notFoundByIdOrName=Could not find dataset {0} based on id or name. dataset.notFoundConsiderLogin=Dataset {0} does not exist or could not be accessed. You may need to log in. dataset.notFoundForAnnotation=The Dataset for this annotation does not exist or could not be accessed. dataset.noAccess=Could not access dataset {0}. Does your team have access? diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 8c51aadafba..76e22722a22 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -27,7 +27,7 @@ GET /auth/token DELETE /auth/token controllers.AuthenticationController.deleteToken() GET /auth/switch controllers.AuthenticationController.switchMultiUser(to: String) POST /auth/switchOrganization/:organizationId controllers.AuthenticationController.switchOrganization(organizationId: String) -GET /auth/accessibleBySwitching controllers.AuthenticationController.accessibleBySwitching(organizationId: Option[String], datasetName: Option[String], annotationId: Option[String], workflowHash: Option[String]) +GET /auth/accessibleBySwitching controllers.AuthenticationController.accessibleBySwitching(datasetId: Option[String], annotationId: Option[String], workflowHash: Option[String]) POST /auth/sendInvites controllers.AuthenticationController.sendInvites() POST /auth/startResetPassword controllers.AuthenticationController.handleStartResetPassword() POST /auth/changePassword controllers.AuthenticationController.changePassword() @@ -43,10 +43,10 @@ POST /auth/createUserInOrganization/:organizationId # Configurations GET /user/userConfiguration controllers.ConfigurationController.read() PUT /user/userConfiguration controllers.ConfigurationController.update() -POST /datasetConfigurations/:organizationId/:datasetName controllers.ConfigurationController.readDatasetViewConfiguration(organizationId: String, datasetName: String, sharingToken: Option[String]) -PUT /datasetConfigurations/:organizationId/:datasetName controllers.ConfigurationController.updateDatasetViewConfiguration(organizationId: String, datasetName: String) -GET /datasetConfigurations/default/:organizationId/:datasetName controllers.ConfigurationController.readDatasetAdminViewConfiguration(organizationId: String, datasetName: String) -PUT /datasetConfigurations/default/:organizationId/:datasetName controllers.ConfigurationController.updateDatasetAdminViewConfiguration(organizationId: String, datasetName: String) +POST /datasetConfigurations/:datasetId controllers.ConfigurationController.readDatasetViewConfiguration(datasetId: String, sharingToken: Option[String]) +PUT /datasetConfigurations/:datasetId controllers.ConfigurationController.updateDatasetViewConfiguration(datasetId: String) +GET /datasetConfigurations/default/:datasetId controllers.ConfigurationController.readDatasetAdminViewConfiguration(datasetId: String) +PUT /datasetConfigurations/default/:datasetId controllers.ConfigurationController.updateDatasetAdminViewConfiguration(datasetId: String) # Users POST /user/tasks/request controllers.TaskController.request() @@ -73,25 +73,26 @@ GET /teams/:id/availableTasksReport GET /teams/:id/projectProgressReport controllers.ReportController.projectProgressReport(id: String) # Datasets -POST /datasets/:organizationId/:datasetName/createExplorational controllers.AnnotationController.createExplorational(organizationId: String, datasetName: String) -GET /datasets/:organizationId/:datasetName/sandbox/:typ controllers.AnnotationController.getSandbox(organizationId: String, datasetName: String, typ: String, sharingToken: Option[String]) +POST /datasets/:datasetId/createExplorational controllers.AnnotationController.createExplorational(datasetId: String) +GET /datasets/:datasetId/sandbox/:typ controllers.AnnotationController.getSandbox(datasetId: String, typ: String, sharingToken: Option[String]) GET /datasets controllers.DatasetController.list(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationId: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) POST /datasets controllers.DatasetController.create(typ: String) POST /datasets/exploreRemote controllers.DatasetController.exploreRemoteDataset() POST /datasets/exploreAndAddRemote controllers.DatasetController.exploreAndAddRemoteDataset() GET /datasets/disambiguate/:datasetName/toNew controllers.DatasetController.getOrganizationForDataset(datasetName: String) -GET /datasets/:organizationId/:datasetName/health controllers.DatasetController.health(organizationId: String, datasetName: String, sharingToken: Option[String]) -PATCH /datasets/:organizationId/:datasetName controllers.DatasetController.update(organizationId: String, datasetName: String) -PATCH /datasets/:organizationId/:datasetName/updatePartial controllers.DatasetController.updatePartial(organizationId: String, datasetName: String) -GET /datasets/:organizationId/:datasetName/accessList controllers.DatasetController.accessList(organizationId: String, datasetName: String) -GET /datasets/:organizationId/:datasetName/sharingToken controllers.DatasetController.getSharingToken(organizationId: String, datasetName: String) -DELETE /datasets/:organizationId/:datasetName/sharingToken controllers.DatasetController.deleteSharingToken(organizationId: String, datasetName: String) -PATCH /datasets/:organizationId/:datasetName/teams controllers.DatasetController.updateTeams(organizationId: String, datasetName: String) -GET /datasets/:organizationId/:datasetName/layers/:layer/thumbnail controllers.DatasetController.thumbnail(organizationId: String, datasetName: String, layer: String, w: Option[Int], h: Option[Int], mappingName: Option[String], sharingToken: Option[String]) -POST /datasets/:organizationId/:datasetName/layers/:layer/segmentAnythingMask controllers.DatasetController.segmentAnythingMask(organizationId: String, datasetName: String, layer: String, intensityMin: Option[Float], intensityMax: Option[Float]) -PUT /datasets/:organizationId/:datasetName/clearThumbnailCache controllers.DatasetController.removeFromThumbnailCache(organizationId: String, datasetName: String) -GET /datasets/:organizationId/:datasetName/isValidNewName controllers.DatasetController.isValidNewName(organizationId: String, datasetName: String) -GET /datasets/:organizationId/:datasetName controllers.DatasetController.read(organizationId: String, datasetName: String, sharingToken: Option[String]) +GET /datasets/disambiguate/:organizationId/:datasetName/toId controllers.DatasetController.getDatasetIdFromNameAndOrganization(datasetName: String, organizationId: String) +GET /datasets/:datasetId/health controllers.DatasetController.health(datasetId: String, sharingToken: Option[String]) +PATCH /datasets/:datasetId controllers.DatasetController.update(datasetId: String) +PATCH /datasets/:datasetId/updatePartial controllers.DatasetController.updatePartial(datasetId: String) +GET /datasets/:datasetId/accessList controllers.DatasetController.accessList(datasetId: String) +GET /datasets/:datasetId/sharingToken controllers.DatasetController.getSharingToken(datasetId: String) +DELETE /datasets/:datasetId/sharingToken controllers.DatasetController.deleteSharingToken(datasetId: String) +PATCH /datasets/:datasetId/teams controllers.DatasetController.updateTeams(datasetId: String) +GET /datasets/:datasetId/layers/:layer/thumbnail controllers.DatasetController.thumbnail(datasetId: String, layer: String, w: Option[Int], h: Option[Int], mappingName: Option[String], sharingToken: Option[String]) +POST /datasets/:datasetId/layers/:layer/segmentAnythingMask controllers.DatasetController.segmentAnythingMask(datasetId: String, layer: String, intensityMin: Option[Float], intensityMax: Option[Float]) +PUT /datasets/:datasetId/clearThumbnailCache controllers.DatasetController.removeFromThumbnailCache(datasetId: String) +GET /datasets/:datasetName/isValidNewName controllers.DatasetController.isValidNewName(datasetName: String) +GET /datasets/:datasetId controllers.DatasetController.read(datasetId: String, sharingToken: Option[String]) # Folders GET /folders/root controllers.FolderController.getRoot() @@ -109,7 +110,7 @@ PUT /datastores/:name/datasources PATCH /datastores/:name/status controllers.WKRemoteDataStoreController.statusUpdate(name: String, key: String) POST /datastores/:name/reserveUpload controllers.WKRemoteDataStoreController.reserveDatasetUpload(name: String, key: String, token: String) GET /datastores/:name/getUnfinishedUploadsForUser controllers.WKRemoteDataStoreController.getUnfinishedUploadsForUser(name: String, key: String, token: String, organizationName: String) -POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, datasetName: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean) +POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, datasetDirectoryName: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean) POST /datastores/:name/deleteDataset controllers.WKRemoteDataStoreController.deleteDataset(name: String, key: String) GET /datastores/:name/jobExportProperties controllers.WKRemoteDataStoreController.jobExportProperties(name: String, key: String, jobId: String) GET /datastores/:name/findCredential controllers.WKRemoteDataStoreController.findCredential(name: String, key: String, credentialId: String) @@ -125,7 +126,7 @@ POST /tracingstores/:name/validateUserAccess PUT /tracingstores/:name controllers.TracingStoreController.update(name: String) GET /tracingstores/:name/dataSource controllers.WKRemoteTracingStoreController.dataSourceForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/dataSourceId controllers.WKRemoteTracingStoreController.dataSourceIdForTracing(name: String, key: String, tracingId: String) -GET /tracingstores/:name/dataStoreUri/:datasetName controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], datasetName: String) +GET /tracingstores/:name/dataStoreUri/:datasetDirectoryName controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], datasetDirectoryName: String) # User access tokens for datastore authentication POST /userToken/generate controllers.UserTokenController.generateTokenForDataStore() @@ -259,17 +260,17 @@ GET /time/overview GET /jobs/request controllers.WKRemoteWorkerController.requestJobs(key: String) GET /jobs controllers.JobController.list() GET /jobs/status controllers.JobController.status() -POST /jobs/run/convertToWkw/:organizationId/:datasetName controllers.JobController.runConvertToWkwJob(organizationId: String, datasetName: String, scale: String, unit: Option[String]) -POST /jobs/run/computeMeshFile/:organizationId/:datasetName controllers.JobController.runComputeMeshFileJob(organizationId: String, datasetName: String, layerName: String, mag: String, agglomerateView: Option[String]) -POST /jobs/run/computeSegmentIndexFile/:organizationId/:datasetName controllers.JobController.runComputeSegmentIndexFileJob(organizationId: String, datasetName: String, layerName: String) -POST /jobs/run/exportTiff/:organizationId/:datasetName controllers.JobController.runExportTiffJob(organizationId: String, datasetName: String, bbox: String, additionalCoordinates: Option[String], layerName: Option[String], mag: Option[String], annotationLayerName: Option[String], annotationId: Option[String], asOmeTiff: Boolean) -POST /jobs/run/inferNuclei/:organizationId/:datasetName controllers.JobController.runInferNucleiJob(organizationId: String, datasetName: String, layerName: String, newDatasetName: String) -POST /jobs/run/inferNeurons/:organizationId/:datasetName controllers.JobController.runInferNeuronsJob(organizationId: String, datasetName: String, layerName: String, bbox: String, newDatasetName: String) -POST /jobs/run/inferMitochondria/:organizationId/:datasetName controllers.JobController.runInferMitochondriaJob(organizationId: String, datasetName: String, layerName: String, bbox: String, newDatasetName: String) -POST /jobs/run/alignSections/:organizationId/:datasetName controllers.JobController.runAlignSectionsJob(organizationId: String, datasetName: String, layerName: String, newDatasetName: String, annotationId: Option[String]) -POST /jobs/run/materializeVolumeAnnotation/:organizationId/:datasetName controllers.JobController.runMaterializeVolumeAnnotationJob(organizationId: String, datasetName: String, fallbackLayerName: String, annotationId: String, annotationType: String, newDatasetName: String, outputSegmentationLayerName: String, mergeSegments: Boolean, volumeLayerName: Option[String]) -POST /jobs/run/findLargestSegmentId/:organizationId/:datasetName controllers.JobController.runFindLargestSegmentIdJob(organizationId: String, datasetName: String, layerName: String) -POST /jobs/run/renderAnimation/:organizationId/:datasetName controllers.JobController.runRenderAnimationJob(organizationId: String, datasetName: String) +POST /jobs/run/convertToWkw/:datasetId controllers.JobController.runConvertToWkwJob(datasetId: String, scale: String, unit: Option[String]) +POST /jobs/run/computeMeshFile/:datasetId controllers.JobController.runComputeMeshFileJob(datasetId: String, layerName: String, mag: String, agglomerateView: Option[String]) +POST /jobs/run/computeSegmentIndexFile/:datasetId controllers.JobController.runComputeSegmentIndexFileJob(datasetId: String, layerName: String) +POST /jobs/run/exportTiff/:datasetId controllers.JobController.runExportTiffJob(datasetId: String, bbox: String, additionalCoordinates: Option[String], layerName: Option[String], mag: Option[String], annotationLayerName: Option[String], annotationId: Option[String], asOmeTiff: Boolean) +POST /jobs/run/inferNuclei/:datasetId controllers.JobController.runInferNucleiJob(datasetId: String, layerName: String, newDatasetName: String) +POST /jobs/run/inferNeurons/:datasetId controllers.JobController.runInferNeuronsJob(datasetId: String, layerName: String, bbox: String, newDatasetName: String) +POST /jobs/run/inferMitochondria/:datasetId controllers.JobController.runInferMitochondriaJob(datasetId: String, layerName: String, bbox: String, newDatasetName: String) +POST /jobs/run/alignSections/:datasetId controllers.JobController.runAlignSectionsJob(datasetId: String, layerName: String, newDatasetName: String, annotationId: Option[String]) +POST /jobs/run/materializeVolumeAnnotation/:datasetId controllers.JobController.runMaterializeVolumeAnnotationJob(datasetId: String, fallbackLayerName: String, annotationId: String, annotationType: String, newDatasetName: String, outputSegmentationLayerName: String, mergeSegments: Boolean, volumeLayerName: Option[String]) +POST /jobs/run/findLargestSegmentId/:datasetId controllers.JobController.runFindLargestSegmentIdJob(datasetId: String, layerName: String) +POST /jobs/run/renderAnimation/:datasetId controllers.JobController.runRenderAnimationJob(datasetId: String) GET /jobs/:id controllers.JobController.get(id: String) PATCH /jobs/:id/cancel controllers.JobController.cancel(id: String) POST /jobs/:id/status controllers.WKRemoteWorkerController.updateJobStatus(key: String, id: String) diff --git a/conf/webknossos.versioned.routes b/conf/webknossos.versioned.routes index cfbc59297f1..bd790fe4ed9 100644 --- a/conf/webknossos.versioned.routes +++ b/conf/webknossos.versioned.routes @@ -4,6 +4,8 @@ # Note: keep this in sync with the reported version numbers in the utils.ApiVersioning trait # version log: + # changed in v9: Datasets are now identified by their id, not their name. The routes now need to pass a datasets id instead of a name and organization id tuple. + # Requests to the TracingStore and DatasStore need to address a dataset based on it directoryName and organization id. # changed in v8: Datasets' name was renamed to id and the displayName is now named name. # changed in v7: datasources now contain scale as object with keys factor and unit, e.g. {"factor": [11,11,5.5], "unit": "nm"} # changed in v6: isValidName always returns Ok, with a JSON object containing possible errors and key "isValid" @@ -12,12 +14,33 @@ # new in v3: annotation info and finish request now take timestamp # new in v2: annotation json contains visibility enum instead of booleans +-> /v9/ webknossos.latest.Routes + +# v8: support changes to v9 +PATCH /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v8/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v8/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) +GET /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.readDatasetV8(organizationId: String, datasetName: String, sharingToken: Option[String]) + + -> /v8/ webknossos.latest.Routes +#v7: support changes to v9 +PATCH /v7/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v7/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v7/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) +GET /v7/datasets/:organizationId/:datasetName controllers.LegacyApiController.readDatasetV8(organizationId: String, datasetName: String, sharingToken: Option[String]) + # v7: support changes to v8 GET /v7/datasets controllers.LegacyApiController.listDatasetsV7(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) + -> /v7/ webknossos.latest.Routes +# v6: support changes to v9 +PATCH /v6/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v6/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v6/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) + # v6: support changes to v7 GET /v6/datasets controllers.LegacyApiController.listDatasetsV6(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) GET /v6/datasets/:organizationName/:datasetName controllers.LegacyApiController.readDatasetV6(organizationName: String, datasetName: String, sharingToken: Option[String]) @@ -25,6 +48,11 @@ GET /v6/datasets/:organizationName/:datasetName co -> /v6/ webknossos.latest.Routes +# v5: support changes to v9 +PATCH /v5/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v5/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v5/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) + # v5: support changes to v7 GET /v5/datasets controllers.LegacyApiController.listDatasetsV6(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) GET /v5/datasets/:organizationName/:datasetName controllers.LegacyApiController.readDatasetV6(organizationName: String, datasetName: String, sharingToken: Option[String]) @@ -34,6 +62,10 @@ GET /v5/datasets/:organizationName/:datasetName/isValidNewName co -> /v5/ webknossos.latest.Routes +# v4: support changes to v9 +PATCH /v4/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v4/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v4/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) # v4: support changes to v7 GET /v4/datasets controllers.LegacyApiController.listDatasetsV6(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) @@ -59,6 +91,10 @@ POST /v4/datasets/:organizationName/:datasetName/createExplorational co -> /v4/ webknossos.latest.Routes +# v3: support changes to v9 +PATCH /v3/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v3/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v3/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) # v3: support changes to v7 GET /v3/datasets controllers.LegacyApiController.listDatasetsV6(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 83adbbc3dae..4a57dbc5d9f 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -2,18 +2,16 @@ import ResumableJS from "resumablejs"; import _ from "lodash"; import dayjs from "dayjs"; import type { - APIActiveUser, APIAnnotation, APIAnnotationInfo, APIAnnotationType, APIAnnotationVisibility, - APIAnnotationWithTask, APIBuildInfo, APIConnectomeFile, APIDataSource, APIDataStore, APIDataset, - APIDatasetId, + APIDataSourceId, APIFeatureToggles, APIHistogramData, APIMapping, @@ -32,7 +30,6 @@ import type { APIScript, APIScriptCreator, APIScriptUpdater, - APITask, APITaskType, APITeam, APITimeInterval, @@ -66,7 +63,6 @@ import type { VoxelSize, APITimeTrackingPerUser, } from "types/api_flow_types"; -import { APIAnnotationTypeEnum } from "types/api_flow_types"; import type { LOG_LEVELS, Vector2, Vector3 } from "oxalis/constants"; import Constants, { ControlModeEnum } from "oxalis/constants"; import type { @@ -80,8 +76,6 @@ import type { Mapping, NumberLike, } from "oxalis/store"; -import type { NewTask, TaskCreationResponseContainer } from "admin/task/task_create_bulk_view"; -import type { QueryObject } from "admin/task/task_search_form"; import { V3 } from "libs/mjs"; import type { Versions } from "oxalis/view/version_view"; import { enforceValidatedDatasetViewConfiguration } from "types/schemas/dataset_view_configuration_defaults"; @@ -104,6 +98,7 @@ import type BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; import type { ArbitraryObject } from "types/globals"; import { assertResponseLimit } from "./api/api_utils"; import type { AnnotationTypeFilterEnum } from "admin/statistic/project_and_annotation_type_dropdown"; +import { getDatasetIdFromNameAndOrganization } from "./api/disambiguate_legacy_routes"; export * from "./api/token"; export * from "./api/jobs"; @@ -409,112 +404,6 @@ export async function resumeProject(projectId: string): Promise { }); return transformProject(project); } -// ### Tasks -export function peekNextTasks(): Promise { - return Request.receiveJSON("/api/user/tasks/peek"); -} -export async function requestTask(): Promise { - const taskWithMessages = await Request.receiveJSON("/api/user/tasks/request", { - method: "POST", - }); - // Extract the potential messages property before returning the task to avoid - // failing e2e tests in annotations.e2e.ts - const { messages: _messages, ...task } = taskWithMessages; - return task; -} -export function getAnnotationsForTask(taskId: string): Promise> { - return Request.receiveJSON(`/api/tasks/${taskId}/annotations`); -} -export function deleteTask(taskId: string): Promise { - return Request.receiveJSON(`/api/tasks/${taskId}`, { - method: "DELETE", - }); -} - -function transformTask(task: APITask): APITask { - const tracingTime = task.tracingTime == null ? 0 : task.tracingTime; - // convert bounding box - let boundingBoxVec6; - - if (task.boundingBox != null) { - const { topLeft, width, height, depth } = task.boundingBox; - boundingBoxVec6 = Utils.numberArrayToVector6(topLeft.concat([width, height, depth])); - } - - return { ...task, tracingTime, boundingBoxVec6 }; -} - -export async function getTasks(queryObject: QueryObject): Promise { - const responses = await Request.sendJSONReceiveJSON("/api/tasks/list", { - data: queryObject, - }); - const tasks = responses.map((response: APITask) => transformTask(response)); - assertResponseLimit(tasks); - return tasks; -} - -export function createTasks(tasks: NewTask[]): Promise { - return Request.sendJSONReceiveJSON("/api/tasks", { - data: tasks, - }); -} - -export function createTaskFromNML(task: NewTask): Promise { - return Request.sendMultipartFormReceiveJSON("/api/tasks/createFromFiles", { - data: { - nmlFiles: task.nmlFiles, - formJSON: JSON.stringify(task), - }, - }); -} - -export async function getTask(taskId: string, options: RequestOptions = {}): Promise { - const task = await Request.receiveJSON(`/api/tasks/${taskId}`, options); - return transformTask(task); -} - -export async function updateTask(taskId: string, task: NewTask): Promise { - const updatedTask = await Request.sendJSONReceiveJSON(`/api/tasks/${taskId}`, { - method: "PUT", - data: task, - }); - return transformTask(updatedTask); -} - -export function finishTask(annotationId: string): Promise { - return finishAnnotation(annotationId, APIAnnotationTypeEnum.Task); -} - -export function transferTask(annotationId: string, userId: string): Promise { - return Request.sendJSONReceiveJSON(`/api/annotations/Task/${annotationId}/transfer`, { - method: "PATCH", - data: { - userId, - }, - }); -} - -export async function transferActiveTasksOfProject( - projectId: string, - userId: string, -): Promise { - return Request.sendJSONReceiveJSON(`/api/projects/${projectId}/transferActiveTasks`, { - data: { - userId, - }, - method: "POST", - }); -} - -export async function getUsersWithActiveTasks(projectId: string): Promise> { - return Request.receiveJSON(`/api/projects/${projectId}/usersWithActiveTasks`); -} - -export async function assignTaskToUser(taskId: string, userId: string): Promise { - return Request.receiveJSON(`/api/tasks/${taskId}/assign?userId=${userId}`, { - method: "POST", - }); -} // ### Private Links @@ -779,18 +668,18 @@ export async function getAnnotationCompoundInformation( } export function getEmptySandboxAnnotationInformation( - datasetId: APIDatasetId, + datasetId: string, tracingType: TracingType, sharingToken?: string | null | undefined, options: RequestOptions = {}, ): Promise { const sharingTokenSuffix = sharingToken != null ? `?sharingToken=${sharingToken}` : ""; - const infoUrl = `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/sandbox/${tracingType}${sharingTokenSuffix}`; + const infoUrl = `/api/datasets/${datasetId}/sandbox/${tracingType}${sharingTokenSuffix}`; return Request.receiveJSON(infoUrl, options); } export function createExplorational( - datasetId: APIDatasetId, + datasetId: string, typ: TracingType, autoFallbackLayer: boolean, fallbackLayerName?: string | null | undefined, @@ -798,7 +687,7 @@ export function createExplorational( magRestrictions?: APIMagRestrictions | null | undefined, options: RequestOptions = {}, ): Promise { - const url = `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/createExplorational`; + const url = `/api/datasets/${datasetId}/createExplorational`; let layers: Array = []; if (typ === "skeleton") { @@ -961,13 +850,13 @@ export function getNewestVersionForTracing( export function hasSegmentIndexInDataStore( dataStoreUrl: string, - dataSetName: string, + datasetDirectoryName: string, dataLayerName: string, organizationId: string, ) { return doWithToken((token) => Request.receiveJSON( - `${dataStoreUrl}/data/datasets/${organizationId}/${dataSetName}/layers/${dataLayerName}/hasSegmentIndex?token=${token}`, + `${dataStoreUrl}/data/datasets/${organizationId}/${datasetDirectoryName}/layers/${dataLayerName}/hasSegmentIndex?token=${token}`, ), ); } @@ -1125,21 +1014,22 @@ export async function getDatasets( export function readDatasetDatasource(dataset: APIDataset): Promise { return doWithToken((token) => Request.receiveJSON( - `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.name}/readInboxDataSource?token=${token}`, + `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.directoryName}/readInboxDataSource?token=${token}`, ), ); } export async function updateDatasetDatasource( - datasetName: string, + datasetDirectoryName: string, dataStoreUrl: string, datasource: APIDataSource, ): Promise { await doWithToken((token) => Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${datasource.id.team}/${datasetName}?token=${token}`, + `${dataStoreUrl}/data/datasets/${datasource.id.team}/${datasetDirectoryName}?token=${token}`, { data: datasource, + method: "PUT", }, ), ); @@ -1152,15 +1042,27 @@ export async function getActiveDatasetsOfMyOrganization(): Promise { const sharingTokenSuffix = sharingToken != null ? `?sharingToken=${sharingToken}` : ""; - return Request.receiveJSON( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}${sharingTokenSuffix}`, + return Request.receiveJSON(`/api/datasets/${datasetId}${sharingTokenSuffix}`, options); +} + +export async function getDatasetLegacy( + datasetOrga: string, + datasetName: string, + sharingToken?: string | null | undefined, + options: RequestOptions = {}, +): Promise { + const datasetId = await getDatasetIdFromNameAndOrganization( + datasetName, + datasetOrga, + sharingToken, options, ); + return getDataset(datasetId, sharingToken, options); } export type DatasetUpdater = { @@ -1174,16 +1076,13 @@ export type DatasetUpdater = { }; export function updateDatasetPartial( - datasetId: APIDatasetId, + datasetId: string, updater: DatasetUpdater, ): Promise { - return Request.sendJSONReceiveJSON( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/updatePartial`, - { - method: "PATCH", - data: updater, - }, - ); + return Request.sendJSONReceiveJSON(`/api/datasets/${datasetId}/updatePartial`, { + method: "PATCH", + data: updater, + }); } export async function getDatasetViewConfiguration( @@ -1193,7 +1092,7 @@ export async function getDatasetViewConfiguration( ): Promise { const sharingTokenSuffix = sharingToken != null ? `?sharingToken=${sharingToken}` : ""; const settings = await Request.sendJSONReceiveJSON( - `/api/datasetConfigurations/${dataset.owningOrganization}/${dataset.name}${sharingTokenSuffix}`, + `/api/datasetConfigurations/${dataset.id}${sharingTokenSuffix}`, { data: displayedVolumeTracings, method: "POST", @@ -1204,41 +1103,33 @@ export async function getDatasetViewConfiguration( } export function updateDatasetConfiguration( - datasetId: APIDatasetId, + datasetId: string, datasetConfig: PartialDatasetConfiguration, options: RequestOptions = {}, ): Promise> { - return Request.sendJSONReceiveJSON( - `/api/datasetConfigurations/${datasetId.owningOrganization}/${datasetId.name}`, - { ...options, method: "PUT", data: datasetConfig }, - ); + return Request.sendJSONReceiveJSON(`/api/datasetConfigurations/${datasetId}`, { + ...options, + method: "PUT", + data: datasetConfig, + }); } -export function getDatasetDefaultConfiguration( - datasetId: APIDatasetId, -): Promise { - return Request.receiveJSON( - `/api/datasetConfigurations/default/${datasetId.owningOrganization}/${datasetId.name}`, - ); +export function getDatasetDefaultConfiguration(datasetId: string): Promise { + return Request.receiveJSON(`/api/datasetConfigurations/default/${datasetId}`); } export function updateDatasetDefaultConfiguration( - datasetId: APIDatasetId, + datasetId: string, datasetConfiguration: DatasetConfiguration, ): Promise { - return Request.sendJSONReceiveJSON( - `/api/datasetConfigurations/default/${datasetId.owningOrganization}/${datasetId.name}`, - { - method: "PUT", - data: datasetConfiguration, - }, - ); + return Request.sendJSONReceiveJSON(`/api/datasetConfigurations/default/${datasetId}`, { + method: "PUT", + data: datasetConfiguration, + }); } -export function getDatasetAccessList(datasetId: APIDatasetId): Promise> { - return Request.receiveJSON( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/accessList`, - ); +export function getDatasetAccessList(dataset: APIDataset): Promise> { + return Request.receiveJSON(`/api/datasets/${dataset.id}/accessList`); } type DatasetCompositionArgs = { @@ -1249,10 +1140,23 @@ type DatasetCompositionArgs = { layers: LayerLink[]; }; -export function createDatasetComposition(datastoreUrl: string, payload: DatasetCompositionArgs) { +export function createDatasetComposition( + datastoreUrl: string, + payload: DatasetCompositionArgs, +): Promise { + // Formatting the dataSourceId to the old format so that the backend can parse it. + // And removing the datasetId as the datastore cannot use it. + const updatedLayers = payload.layers.map(({ dataSourceId, datasetId, ...rest }) => ({ + ...rest, + dataSourceId: { name: dataSourceId.directoryName, team: dataSourceId.owningOrganization }, + })); + const payloadWithUpdatedLayers = { + ...payload, + layers: updatedLayers, + }; return doWithToken((token) => Request.sendJSONReceiveJSON(`${datastoreUrl}/data/datasets/compose?token=${token}`, { - data: payload, + data: payloadWithUpdatedLayers, }), ); } @@ -1289,8 +1193,10 @@ export function createResumableUpload(datastoreUrl: string, uploadId: string): P } type ReserveUploadInformation = { uploadId: string; - organization: string; name: string; + directoryName: string; + newDatasetId: string; + organization: string; totalFileCount: number; filePaths: Array; initialTeams: Array; @@ -1311,15 +1217,13 @@ export function reserveDatasetUpload( export type UnfinishedUpload = { uploadId: string; - datasetId: { name: string; organizationName: string }; + datasetName: string; folderId: string; created: number; filePaths: Array | null | undefined; allowedTeams: Array; }; -type OldDatasetIdFormat = { name: string; team: string }; - export function getUnfinishedUploads( datastoreHost: string, organizationName: string, @@ -1330,19 +1234,19 @@ export function getUnfinishedUploads( { host: datastoreHost, }, - )) as Array; - // Rename "team" to "organization" as this is the actual used current naming. - return unfinishedUploads.map(({ dataSourceId: { name, team }, ...rest }) => ({ - ...rest, - datasetId: { name, organizationName: team }, - })); + )) as Array; + return unfinishedUploads; }); } +type NewDatasetReply = { + newDatasetId: string; +}; + export function finishDatasetUpload( datastoreHost: string, uploadInformation: ArbitraryObject, -): Promise { +): Promise { return doWithToken((token) => Request.sendJSONReceiveJSON(`/data/datasets/finishUpload?token=${token}`, { data: uploadInformation, @@ -1407,7 +1311,7 @@ export async function storeRemoteDataset( organizationId: string, datasource: string, folderId: string | null, -): Promise { +): Promise { return doWithToken((token) => { const params = new URLSearchParams(); params.append("token", token); @@ -1418,7 +1322,7 @@ export async function storeRemoteDataset( return Request.sendJSONReceiveJSON( `${datastoreUrl}/data/datasets/${organizationId}/${datasetName}?${params}`, { - method: "PUT", + method: "POST", data: datasource, }, ); @@ -1426,16 +1330,12 @@ export async function storeRemoteDataset( } // Returns void if the name is valid. Otherwise, a string is returned which denotes the reason. -export async function isDatasetNameValid( - datasetId: APIDatasetId, -): Promise { - if (datasetId.name === "") { +export async function isDatasetNameValid(datasetName: string): Promise { + if (datasetName === "") { return "The dataset name must not be empty."; } - const response = await Request.receiveJSON( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/isValidNewName`, - ); + const response = await Request.receiveJSON(`/api/datasets/${datasetName}/isValidNewName`); if (response.isValid) { return null; } else { @@ -1444,16 +1344,13 @@ export async function isDatasetNameValid( } export function updateDatasetTeams( - datasetId: APIDatasetId, + datasetId: string, newTeams: Array, ): Promise { - return Request.sendJSONReceiveJSON( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/teams`, - { - method: "PATCH", - data: newTeams, - }, - ); + return Request.sendJSONReceiveJSON(`/api/datasets/${datasetId}/teams`, { + method: "PATCH", + data: newTeams, + }); } export async function triggerDatasetCheck(datastoreHost: string): Promise { @@ -1467,12 +1364,12 @@ export async function triggerDatasetCheck(datastoreHost: string): Promise export async function triggerDatasetClearCache( datastoreHost: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName?: string, ): Promise { await doWithToken((token) => Request.triggerRequest( - `/data/triggers/reload/${datasetId.owningOrganization}/${datasetId.name}?token=${token}${ + `/data/triggers/reload/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}?token=${token}${ layerName ? `&layerName=${layerName}` : "" }`, { @@ -1485,11 +1382,11 @@ export async function triggerDatasetClearCache( export async function deleteDatasetOnDisk( datastoreHost: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, ): Promise { await doWithToken((token) => Request.triggerRequest( - `/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/deleteOnDisk?token=${token}`, + `/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/deleteOnDisk?token=${token}`, { host: datastoreHost, method: "DELETE", @@ -1498,52 +1395,39 @@ export async function deleteDatasetOnDisk( ); } -export async function triggerDatasetClearThumbnailCache(datasetId: APIDatasetId): Promise { - await Request.triggerRequest( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/clearThumbnailCache`, - { - method: "PUT", - }, - ); +export async function triggerDatasetClearThumbnailCache(datasetId: string): Promise { + await Request.triggerRequest(`/api/datasets/${datasetId}/clearThumbnailCache`, { + method: "PUT", + }); } export async function clearCache(dataset: APIMaybeUnimportedDataset, layerName?: string) { return Promise.all([ triggerDatasetClearCache(dataset.dataStore.url, dataset, layerName), - triggerDatasetClearThumbnailCache(dataset), + triggerDatasetClearThumbnailCache(dataset.id), ]); } export async function getDatasetSharingToken( - datasetId: APIDatasetId, + datasetId: string, options?: RequestOptions, ): Promise { const { sharingToken } = await Request.receiveJSON( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/sharingToken`, + `/api/datasets/${datasetId}/sharingToken`, options, ); return sharingToken; } -export async function revokeDatasetSharingToken(datasetId: APIDatasetId): Promise { - await Request.triggerRequest( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/sharingToken`, - { - method: "DELETE", - }, - ); -} - -export async function getOrganizationForDataset(datasetName: string): Promise { - const { organizationId } = await Request.receiveJSON( - `/api/datasets/disambiguate/${datasetName}/toNew`, - ); - return organizationId; +export async function revokeDatasetSharingToken(datasetId: string): Promise { + await Request.triggerRequest(`/api/datasets/${datasetId}/sharingToken`, { + method: "DELETE", + }); } export async function findDataPositionForLayer( datastoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, ): Promise<{ position: Vector3 | null | undefined; @@ -1551,7 +1435,7 @@ export async function findDataPositionForLayer( }> { const { position, resolution } = await doWithToken((token) => Request.receiveJSON( - `${datastoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/findData?token=${token}`, + `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/findData?token=${token}`, ), ); return { @@ -1578,12 +1462,12 @@ export async function findDataPositionForVolumeTracing( export async function getHistogramForLayer( datastoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, ): Promise { return doWithToken((token) => Request.receiveJSON( - `${datastoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/histogram?token=${token}`, + `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/histogram?token=${token}`, { showErrorToast: false }, ), ); @@ -1591,25 +1475,25 @@ export async function getHistogramForLayer( export async function getMappingsForDatasetLayer( datastoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, ): Promise> { return doWithToken((token) => Request.receiveJSON( - `${datastoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/mappings?token=${token}`, + `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/mappings?token=${token}`, ), ); } export function fetchMapping( datastoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, mappingName: string, ): Promise { return doWithToken((token) => Request.receiveJSON( - `${datastoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/mappings/${mappingName}?token=${token}`, + `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/mappings/${mappingName}?token=${token}`, ), ); } @@ -1639,7 +1523,7 @@ export function getEditableMappingInfo( export function getPositionForSegmentInAgglomerate( datastoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, mappingName: string, segmentId: number, @@ -1650,8 +1534,8 @@ export function getPositionForSegmentInAgglomerate( segmentId: `${segmentId}`, }); const position = await Request.receiveJSON( - `${datastoreUrl}/data/datasets/${datasetId.owningOrganization}/${ - datasetId.name + `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${ + dataSourceId.directoryName }/layers/${layerName}/agglomerates/${mappingName}/positionForSegment?${urlParams.toString()}`, ); return position; @@ -1660,12 +1544,12 @@ export function getPositionForSegmentInAgglomerate( export async function getAgglomeratesForDatasetLayer( datastoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, ): Promise> { return doWithToken((token) => Request.receiveJSON( - `${datastoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/agglomerates?token=${token}`, + `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/agglomerates?token=${token}`, ), ); } @@ -1890,7 +1774,7 @@ export async function isDatasetAccessibleBySwitching( ); } else { return Request.receiveJSON( - `/api/auth/accessibleBySwitching?organizationId=${commandType.owningOrganization}&datasetName=${commandType.name}`, + `/api/auth/accessibleBySwitching?datasetId=${commandType.datasetId}`, { showErrorToast: false, }, @@ -2071,14 +1955,14 @@ export function getBucketPositionsForAdHocMesh( export function getAgglomerateSkeleton( dataStoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, mappingId: string, agglomerateId: number, ): Promise { return doWithToken((token) => Request.receiveArraybuffer( - `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/agglomerates/${mappingId}/skeleton/${agglomerateId}?token=${token}`, // The webworker code cannot do proper error handling and always expects an array buffer from the server. + `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/agglomerates/${mappingId}/skeleton/${agglomerateId}?token=${token}`, // The webworker code cannot do proper error handling and always expects an array buffer from the server. // The webworker code cannot do proper error handling and always expects an array buffer from the server. // However, the server might send an error json instead of an array buffer. Therefore, don't use the webworker code. { @@ -2091,7 +1975,7 @@ export function getAgglomerateSkeleton( export async function getAgglomeratesForSegmentsFromDatastore( dataStoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, mappingId: string, segmentIds: Array, @@ -2099,7 +1983,7 @@ export async function getAgglomeratesForSegmentsFromDatastore(segmentIds); const listArrayBuffer: ArrayBuffer = await doWithToken((token) => Request.receiveArraybuffer( - `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/agglomerates/${mappingId}/agglomeratesForSegments?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/agglomerates/${mappingId}/agglomeratesForSegments?token=${token}`, { method: "POST", body: segmentIdBuffer, @@ -2170,12 +2054,12 @@ export function getEditableAgglomerateSkeleton( export async function getMeshfilesForDatasetLayer( dataStoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, ): Promise> { const meshFiles: Array = await doWithToken((token) => Request.receiveJSON( - `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/meshes?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/meshes?token=${token}`, ), ); @@ -2191,19 +2075,19 @@ export async function getMeshfilesForDatasetLayer( // ### Connectomes export function getConnectomeFilesForDatasetLayer( dataStoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, ): Promise> { return doWithToken((token) => Request.receiveJSON( - `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/connectomes?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/connectomes?token=${token}`, ), ); } export function getSynapsesOfAgglomerates( dataStoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, connectomeFile: string, agglomerateIds: Array, @@ -2215,7 +2099,7 @@ export function getSynapsesOfAgglomerates( > { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/connectomes/synapses?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/connectomes/synapses?token=${token}`, { data: { connectomeFile, @@ -2228,7 +2112,7 @@ export function getSynapsesOfAgglomerates( function getSynapseSourcesOrDestinations( dataStoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, connectomeFile: string, synapseIds: Array, @@ -2236,7 +2120,7 @@ function getSynapseSourcesOrDestinations( ): Promise> { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/connectomes/synapses/${srcOrDst}?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/connectomes/synapses/${srcOrDst}?token=${token}`, { data: { connectomeFile, @@ -2259,14 +2143,14 @@ export function getSynapseDestinations(...args: any): Promise> { export function getSynapsePositions( dataStoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, connectomeFile: string, synapseIds: Array, ): Promise> { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/connectomes/synapses/positions?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/connectomes/synapses/positions?token=${token}`, { data: { connectomeFile, @@ -2279,7 +2163,7 @@ export function getSynapsePositions( export function getSynapseTypes( dataStoreUrl: string, - datasetId: APIDatasetId, + dataSourceId: APIDataSourceId, layerName: string, connectomeFile: string, synapseIds: Array, @@ -2289,7 +2173,7 @@ export function getSynapseTypes( }> { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/connectomes/synapses/types?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/connectomes/synapses/types?token=${token}`, { data: { connectomeFile, @@ -2395,7 +2279,7 @@ export async function getSamMask( const { type: interactionType, ...promptWithoutType } = prompt; const buffer = await Request.sendJSONReceiveArraybuffer( - `/api/datasets/${dataset.owningOrganization}/${dataset.name}/layers/${layerName}/segmentAnythingMask?${params}`, + `/api/datasets/${dataset.id}/layers/${layerName}/segmentAnythingMask?${params}`, { data: { mag, diff --git a/frontend/javascripts/admin/api/disambiguate_legacy_routes.tsx b/frontend/javascripts/admin/api/disambiguate_legacy_routes.tsx new file mode 100644 index 00000000000..b61abc10a8c --- /dev/null +++ b/frontend/javascripts/admin/api/disambiguate_legacy_routes.tsx @@ -0,0 +1,22 @@ +import Request, { type RequestOptions } from "libs/request"; + +export async function getOrganizationForDataset(datasetName: string): Promise { + const { organizationId } = await Request.receiveJSON( + `/api/datasets/disambiguate/${datasetName}/toNew`, + ); + return organizationId; +} + +export async function getDatasetIdFromNameAndOrganization( + datasetName: string, + organizationId: string, + sharingToken?: string | null | undefined, + options: RequestOptions = {}, +): Promise { + const sharingTokenSuffix = sharingToken != null ? `?sharingToken=${sharingToken}` : ""; + const { id: datasetId } = await Request.receiveJSON( + `/api/datasets/disambiguate/${organizationId}/${datasetName}/toId${sharingTokenSuffix}`, + options, + ); + return datasetId; +} diff --git a/frontend/javascripts/admin/api/jobs.ts b/frontend/javascripts/admin/api/jobs.ts index 99d1c6e1bfe..d7c9a318cd0 100644 --- a/frontend/javascripts/admin/api/jobs.ts +++ b/frontend/javascripts/admin/api/jobs.ts @@ -9,6 +9,7 @@ import type { APIEffectiveJobState, AiModel, RenderAnimationOptions, + APIDataset, AdditionalCoordinate, } from "types/api_flow_types"; import { assertResponseLimit } from "./api_utils"; @@ -16,10 +17,12 @@ import { assertResponseLimit } from "./api_utils"; function transformBackendJobToAPIJob(job: any): APIJob { return { id: job.id, + datasetId: job.commandArgs.datasetId, owner: job.owner, type: job.command, datasetName: job.commandArgs.dataset_name, - organizationId: job.commandArgs.organization_name, + datasetDirectoryName: job.commandArgs.dataset_directory_name, + organizationId: job.commandArgs.organization_id, layerName: job.commandArgs.layer_name || job.commandArgs.volume_layer_name, annotationLayerName: job.commandArgs.annotation_layer_name, boundingBox: job.commandArgs.bbox, @@ -73,13 +76,12 @@ export async function cancelJob(jobId: string): Promise { } export async function startConvertToWkwJob( - datasetName: string, - organizationId: string, + datasetId: APIDataset["id"], scale: Vector3, unit: UnitLong, ): Promise { return Request.receiveJSON( - `/api/jobs/run/convertToWkw/${organizationId}/${datasetName}?scale=${scale.toString()}&unit=${unit}`, + `/api/jobs/run/convertToWkw/${datasetId}?scale=${scale.toString()}&unit=${unit}`, { method: "POST", }, @@ -87,12 +89,11 @@ export async function startConvertToWkwJob( } export async function startFindLargestSegmentIdJob( - datasetName: string, - organizationId: string, + datasetId: string, layerName: string, ): Promise { return Request.receiveJSON( - `/api/jobs/run/findLargestSegmentId/${organizationId}/${datasetName}?layerName=${layerName}`, + `/api/jobs/run/findLargestSegmentId/${datasetId}?layerName=${layerName}`, { method: "POST", }, @@ -100,8 +101,7 @@ export async function startFindLargestSegmentIdJob( } export async function startExportTiffJob( - datasetName: string, - organizationId: string, + datasetId: APIDataset["id"], bbox: Vector6, additionalCoordinates: AdditionalCoordinate[] | null, layerName: string | null | undefined, @@ -126,17 +126,13 @@ export async function startExportTiffJob( if (additionalCoordinates != null) { params.append("additionalCoordinates", JSON.stringify(additionalCoordinates)); } - return Request.receiveJSON( - `/api/jobs/run/exportTiff/${organizationId}/${datasetName}?${params}`, - { - method: "POST", - }, - ); + return Request.receiveJSON(`/api/jobs/run/exportTiff/${datasetId}?${params}`, { + method: "POST", + }); } export function startComputeMeshFileJob( - organizationId: string, - datasetName: string, + datasetId: APIDataset["id"], layerName: string, mag: Vector3, agglomerateView?: string, @@ -149,38 +145,30 @@ export function startComputeMeshFileJob( params.append("agglomerateView", agglomerateView); } - return Request.receiveJSON( - `/api/jobs/run/computeMeshFile/${organizationId}/${datasetName}?${params}`, - { - method: "POST", - }, - ); + return Request.receiveJSON(`/api/jobs/run/computeMeshFile/${datasetId}?${params}`, { + method: "POST", + }); } export function startComputeSegmentIndexFileJob( - organizationId: string, - datasetName: string, + datasetId: APIDataset["id"], layerName: string, ): Promise { const params = new URLSearchParams(); params.append("layerName", layerName); - return Request.receiveJSON( - `/api/jobs/run/computeSegmentIndexFile/${organizationId}/${datasetName}?${params}`, - { - method: "POST", - }, - ); + return Request.receiveJSON(`/api/jobs/run/computeSegmentIndexFile/${datasetId}?${params}`, { + method: "POST", + }); } export function startNucleiInferralJob( - organizationId: string, - datasetName: string, + datasetId: APIDataset["id"], layerName: string, newDatasetName: string, ): Promise { return Request.receiveJSON( - `/api/jobs/run/inferNuclei/${organizationId}/${datasetName}?layerName=${layerName}&newDatasetName=${newDatasetName}`, + `/api/jobs/run/inferNuclei/${datasetId}?layerName=${layerName}&newDatasetName=${newDatasetName}`, { method: "POST", }, @@ -188,8 +176,7 @@ export function startNucleiInferralJob( } export function startNeuronInferralJob( - organizationId: string, - datasetName: string, + datasetId: APIDataset["id"], layerName: string, bbox: Vector6, newDatasetName: string, @@ -199,31 +186,23 @@ export function startNeuronInferralJob( bbox: bbox.join(","), newDatasetName, }); - return Request.receiveJSON( - `/api/jobs/run/inferNeurons/${organizationId}/${datasetName}?${urlParams.toString()}`, - { - method: "POST", - }, - ); + return Request.receiveJSON(`/api/jobs/run/inferNeurons/${datasetId}?${urlParams.toString()}`, { + method: "POST", + }); } export function startRenderAnimationJob( - organizationId: string, - datasetName: string, + datasetId: APIDataset["id"], animationOptions: RenderAnimationOptions, ): Promise { - return Request.sendJSONReceiveJSON( - `/api/jobs/run/renderAnimation/${organizationId}/${datasetName}`, - { - data: animationOptions, - }, - ); + return Request.sendJSONReceiveJSON(`/api/jobs/run/renderAnimation/${datasetId}`, { + data: animationOptions, + }); } function startSegmentationAnnotationDependentJob( jobURLPath: string, - organizationId: string, - datasetName: string, + datasetId: APIDataset["id"], fallbackLayerName: string, volumeLayerName: string | null | undefined, newDatasetName: string, @@ -231,10 +210,7 @@ function startSegmentationAnnotationDependentJob( annotationType: APIAnnotationType, mergeSegments?: boolean, ): Promise { - const requestURL = new URL( - `/api/jobs/run/${jobURLPath}/${organizationId}/${datasetName}`, - location.origin, - ); + const requestURL = new URL(`/api/jobs/run/${jobURLPath}/${datasetId}`, location.origin); if (volumeLayerName != null) { requestURL.searchParams.append("volumeLayerName", volumeLayerName); } @@ -253,8 +229,7 @@ function startSegmentationAnnotationDependentJob( } export function startMaterializingVolumeAnnotationJob( - organizationId: string, - datasetName: string, + datasetId: APIDataset["id"], fallbackLayerName: string, volumeLayerName: string | null | undefined, newDatasetName: string, @@ -264,8 +239,7 @@ export function startMaterializingVolumeAnnotationJob( ): Promise { return startSegmentationAnnotationDependentJob( "materializeVolumeAnnotation", - organizationId, - datasetName, + datasetId, fallbackLayerName, volumeLayerName, newDatasetName, @@ -276,8 +250,7 @@ export function startMaterializingVolumeAnnotationJob( } export function startMitochondriaInferralJob( - organizationId: string, - datasetName: string, + datasetId: APIDataset["id"], layerName: string, bbox: Vector6, newDatasetName: string, @@ -288,7 +261,7 @@ export function startMitochondriaInferralJob( newDatasetName, }); return Request.receiveJSON( - `/api/jobs/run/inferMitochondria/${organizationId}/${datasetName}?${urlParams.toString()}`, + `/api/jobs/run/inferMitochondria/${datasetId}?${urlParams.toString()}`, { method: "POST", }, @@ -296,8 +269,7 @@ export function startMitochondriaInferralJob( } export function startAlignSectionsJob( - organizationId: string, - datasetName: string, + datasetId: APIDataset["id"], layerName: string, newDatasetName: string, annotationId?: string, @@ -312,12 +284,9 @@ export function startAlignSectionsJob( layerName, newDatasetName, }); - return Request.receiveJSON( - `/api/jobs/run/alignSections/${organizationId}/${datasetName}?${urlParams.toString()}`, - { - method: "POST", - }, - ); + return Request.receiveJSON(`/api/jobs/run/alignSections/${datasetId}?${urlParams.toString()}`, { + method: "POST", + }); } type AiModelCategory = "em_neurons" | "em_nuclei"; @@ -347,7 +316,7 @@ export function runTraining(params: RunTrainingParameters) { type RunInferenceParameters = { annotationId?: string; aiModelId: string; - datasetName: string; + datasetDirectoryName: string; colorLayerName: string; boundingBox: Vector6; newDatasetName: string; diff --git a/frontend/javascripts/admin/api/mesh.ts b/frontend/javascripts/admin/api/mesh.ts index 0c638d36fc4..e19dfe0ee1d 100644 --- a/frontend/javascripts/admin/api/mesh.ts +++ b/frontend/javascripts/admin/api/mesh.ts @@ -1,6 +1,6 @@ import Request from "libs/request"; import type { Vector3, Vector4 } from "oxalis/constants"; -import type { APIDatasetId } from "types/api_flow_types"; +import type { APIDataSourceId } from "types/api_flow_types"; import { doWithToken } from "./token"; export type MeshChunk = { @@ -31,7 +31,7 @@ type SegmentInfo = { export function getMeshfileChunksForSegment( dataStoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, meshFile: string, segmentId: number, @@ -55,7 +55,7 @@ export function getMeshfileChunksForSegment( params.append("editableMappingTracingId", editableMappingTracingId); } return Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/meshes/chunks?${params}`, + `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.directoryName}/layers/${layerName}/meshes/chunks?${params}`, { data: { meshFile, @@ -79,13 +79,13 @@ type MeshChunkDataRequestList = { export function getMeshfileChunkData( dataStoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, batchDescription: MeshChunkDataRequestList, ): Promise { return doWithToken(async (token) => { const dracoDataChunks = await Request.sendJSONReceiveArraybuffer( - `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/meshes/chunks/data?token=${token}`, + `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.directoryName}/layers/${layerName}/meshes/chunks/data?token=${token}`, { data: batchDescription, useWebworkerForArrayBuffer: true, diff --git a/frontend/javascripts/admin/api/tasks.ts b/frontend/javascripts/admin/api/tasks.ts new file mode 100644 index 00000000000..a26d5033358 --- /dev/null +++ b/frontend/javascripts/admin/api/tasks.ts @@ -0,0 +1,131 @@ +import _ from "lodash"; +import type { + APIActiveUser, + APIAnnotation, + APIAnnotationWithTask, + APITask, +} from "types/api_flow_types"; +import { APIAnnotationTypeEnum } from "types/api_flow_types"; +import type { NewTask, TaskCreationResponseContainer } from "admin/task/task_create_bulk_view"; +import type { QueryObject } from "admin/task/task_search_form"; +import type { RequestOptions } from "libs/request"; +import Request from "libs/request"; +import * as Utils from "libs/utils"; +import { assertResponseLimit } from "./api_utils"; +import type { ArbitraryObject } from "types/globals"; +import { finishAnnotation } from "admin/admin_rest_api"; + +function adaptNewTaskToAPITask(task: NewTask): ArbitraryObject { + const { datasetName } = task; + return { + ..._.omit(task, ["datasetName"]), + dataSet: datasetName, + }; +} + +export function peekNextTasks(): Promise { + return Request.receiveJSON("/api/user/tasks/peek"); +} +export async function requestTask(): Promise { + const taskWithMessages = await Request.receiveJSON("/api/user/tasks/request", { + method: "POST", + }); + // Extract the potential messages property before returning the task to avoid + // failing e2e tests in annotations.e2e.ts + const { messages: _messages, ...task } = taskWithMessages; + return task; +} +export function getAnnotationsForTask(taskId: string): Promise> { + return Request.receiveJSON(`/api/tasks/${taskId}/annotations`); +} +export function deleteTask(taskId: string): Promise { + return Request.receiveJSON(`/api/tasks/${taskId}`, { + method: "DELETE", + }); +} + +function transformTask(task: APITask): APITask { + const tracingTime = task.tracingTime == null ? 0 : task.tracingTime; + // convert bounding box + let boundingBoxVec6; + + if (task.boundingBox != null) { + const { topLeft, width, height, depth } = task.boundingBox; + boundingBoxVec6 = Utils.numberArrayToVector6(topLeft.concat([width, height, depth])); + } + + return { ...task, tracingTime, boundingBoxVec6 }; +} + +export async function getTasks(queryObject: QueryObject): Promise { + const responses = await Request.sendJSONReceiveJSON("/api/tasks/list", { + data: queryObject, + }); + const tasks = responses.map((response: APITask) => transformTask(response)); + assertResponseLimit(tasks); + return tasks; +} + +export function createTasks(tasks: NewTask[]): Promise { + const adaptedTasks = tasks.map(adaptNewTaskToAPITask); + return Request.sendJSONReceiveJSON("/api/tasks", { + data: adaptedTasks, + }); +} + +export function createTaskFromNML(task: NewTask): Promise { + return Request.sendMultipartFormReceiveJSON("/api/tasks/createFromFiles", { + data: { + nmlFiles: task.nmlFiles, + formJSON: JSON.stringify(adaptNewTaskToAPITask(task)), + }, + }); +} + +export async function getTask(taskId: string, options: RequestOptions = {}): Promise { + const task = await Request.receiveJSON(`/api/tasks/${taskId}`, options); + return transformTask(task); +} + +export async function updateTask(taskId: string, task: NewTask): Promise { + const updatedTask = await Request.sendJSONReceiveJSON(`/api/tasks/${taskId}`, { + method: "PUT", + data: adaptNewTaskToAPITask(task), + }); + return transformTask(updatedTask); +} + +export function finishTask(annotationId: string): Promise { + return finishAnnotation(annotationId, APIAnnotationTypeEnum.Task); +} + +export function transferTask(annotationId: string, userId: string): Promise { + return Request.sendJSONReceiveJSON(`/api/annotations/Task/${annotationId}/transfer`, { + method: "PATCH", + data: { + userId, + }, + }); +} + +export async function transferActiveTasksOfProject( + projectId: string, + userId: string, +): Promise { + return Request.sendJSONReceiveJSON(`/api/projects/${projectId}/transferActiveTasks`, { + data: { + userId, + }, + method: "POST", + }); +} + +export async function getUsersWithActiveTasks(projectId: string): Promise> { + return Request.receiveJSON(`/api/projects/${projectId}/usersWithActiveTasks`); +} + +export async function assignTaskToUser(taskId: string, userId: string): Promise { + return Request.receiveJSON(`/api/tasks/${taskId}/assign?userId=${userId}`, { + method: "POST", + }); +} diff --git a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx index 8d183ddfad3..22c252d411e 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx @@ -9,7 +9,7 @@ import _ from "lodash"; import type { Vector3 } from "oxalis/constants"; import { parseNml } from "oxalis/model/helpers/nml_helpers"; import { - tryToFetchDatasetsByName, + tryToFetchDatasetsByNameOrId, type WizardComponentProps, type WizardContext, type FileList, @@ -166,10 +166,12 @@ async function parseNmlFiles(fileList: FileList): Promise throw new SoftError("NML files should not be empty."); } - const { trees: trees1, datasetName: datasetName1 } = await parseNml(nmlString1); - const { trees: trees2, datasetName: datasetName2 } = await parseNml(nmlString2); + // TODO: Now the datasetName stored in the nml is interpreted as the path of the dataset. -> call to legacy route is necessary. + // Discussion: how to handle this better? + const { trees: trees1, datasetName: datasetDirectoryName1 } = await parseNml(nmlString1); + const { trees: trees2, datasetName: datasetDirectoryName2 } = await parseNml(nmlString2); - if (!datasetName1 || !datasetName2) { + if (!datasetDirectoryName1 || !datasetDirectoryName2) { throw new SoftError("Could not extract dataset names."); } @@ -205,8 +207,9 @@ async function parseNmlFiles(fileList: FileList): Promise throw new SoftError("Each file should contain at least 3 nodes."); } - const datasets = await tryToFetchDatasetsByName( - [datasetName1, datasetName2], + const datasets = await tryToFetchDatasetsByNameOrId( + [datasetDirectoryName1, datasetDirectoryName2], // fetch by name + [], "Could not derive datasets from NML. Please specify these manually.", ); diff --git a/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx b/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx index bf7291ad210..ed656a2cc33 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx @@ -4,7 +4,7 @@ import DatasetSelectionComponent, { type DatasetSelectionValue, } from "dashboard/dataset/dataset_selection_component"; import { useState } from "react"; -import { tryToFetchDatasetsByName, type WizardComponentProps } from "./common"; +import { tryToFetchDatasetsByNameOrId, type WizardComponentProps } from "./common"; import { useEffectOnlyOnce } from "libs/react_hooks"; export default function SelectDatasets({ wizardContext, setWizardContext }: WizardComponentProps) { @@ -18,12 +18,13 @@ export default function SelectDatasets({ wizardContext, setWizardContext }: Wiza })); }; const onNext = async () => { - const datasets = await tryToFetchDatasetsByName( - datasetValues.map((el) => el.value), + const datasets = await tryToFetchDatasetsByNameOrId( + [], + datasetValues.map((el) => el.value), // fetch by id "Could not find datasets. Please doublecheck your selection.", ); if (datasets == null) { - // An error message was already shown in tryToFetchDatasetsByName + // An error message was already shown in tryToFetchDatasetsByNameOrId return; } @@ -35,7 +36,7 @@ export default function SelectDatasets({ wizardContext, setWizardContext }: Wiza }; useEffectOnlyOnce(() => { - setDatasetValues(wizardContext.datasets.map((ds) => ({ value: ds.name, label: ds.name }))); + setDatasetValues(wizardContext.datasets.map((ds) => ({ value: ds.id, label: ds.name }))); }); // When not using any transforms, diff --git a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx index 195e1433f1b..c30012effad 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx @@ -24,18 +24,11 @@ import Toast, { guardedWithErrorToast } from "libs/toast"; import * as Utils from "libs/utils"; import _ from "lodash"; import messages from "messages"; -import { flatToNestedMatrix } from "oxalis/model/accessors/dataset_accessor"; +import { flatToNestedMatrix, getURLSanitizedName } from "oxalis/model/accessors/dataset_accessor"; import type { OxalisState } from "oxalis/store"; import React, { useState } from "react"; import { useSelector } from "react-redux"; -import { - type APIDataLayer, - type APIDataset, - type APIDatasetId, - type APITeam, - areDatasetsIdentical, - type LayerLink, -} from "types/api_flow_types"; +import type { APIDataLayer, APIDataset, APITeam, LayerLink } from "types/api_flow_types"; import { syncValidator } from "types/validation"; import type { WizardComponentProps } from "./common"; import { useEffectOnlyOnce } from "libs/react_hooks"; @@ -78,10 +71,12 @@ export function ConfigureNewDataset(props: WizardComponentProps) { ) as [APIDataset, APIDataLayer][] ).map( ([dataset, dataLayer]): LayerLink => ({ - datasetId: { + datasetId: dataset.id, + dataSourceId: { + directoryName: dataset.directoryName, owningOrganization: dataset.owningOrganization, - name: dataset.name, }, + datasetName: dataset.name, sourceName: dataLayer.name, newName: dataLayer.name, transformations: [], @@ -124,13 +119,14 @@ export function ConfigureNewDataset(props: WizardComponentProps) { if (useThinPlateSplines) { checkLandmarksForThinPlateSpline(sourcePoints, targetPoints); } - return layers.map((layer) => ({ - ...layer, - // The first dataset will be transformed to match the second. - transformations: areDatasetsIdentical(layer.datasetId, linkedDatasets[0]) - ? transformationArr - : [], - })); + return layers.map((layer) => { + const areDatasetsIdentical = layer.datasetId === linkedDatasets[0].id; + return { + ...layer, + // The first dataset will be transformed to match the second. + transformations: areDatasetsIdentical ? transformationArr : [], + }; + }); } const uploadableDatastores = props.datastores.filter((datastore) => datastore.allowsUpload); @@ -172,7 +168,8 @@ export function ConfigureNewDataset(props: WizardComponentProps) { const newDatasetName = form.getFieldValue(["name"]); setIsLoading(true); try { - await createDatasetComposition(datastoreToUse.url, { + const { newDatasetId } = await createDatasetComposition(datastoreToUse.url, { + // keep identifying dataset at orgaId & directoryPath as this is a datastore request. newDatasetName, targetFolderId: form.getFieldValue(["targetFolderId"]), organizationId: activeUser.organization, @@ -180,40 +177,36 @@ export function ConfigureNewDataset(props: WizardComponentProps) { layers: layersWithTransforms, }); - const uniqueDatasets = _.uniqBy( - layersWithoutTransforms.map((layer) => layer.datasetId), - (id) => id.owningOrganization + "-" + id.name, - ); + const uniqueDatasets = _.uniqBy(layersWithoutTransforms, (layer) => layer.datasetId); const datasetMarkdownLinks = uniqueDatasets - .map((el) => `- [${el.name}](/datasets/${el.owningOrganization}/${el.name})`) + .map( + (el) => + `- [${el.datasetName}](/datasets/${getURLSanitizedName({ name: el.datasetName })}-${el.dataSourceId})`, + ) .join("\n"); - await updateDatasetPartial( - { owningOrganization: activeUser.organization, name: newDatasetName }, - { - description: [ - "This dataset was composed from:", - datasetMarkdownLinks, - "", - "The layers were combined " + - (sourcePoints.length === 0 - ? "without any transforms" - : `with ${ - useThinPlateSplines - ? `Thin-Plate-Splines (${sourcePoints.length} correspondences)` - : `an affine transformation (mean error: ${formatNumber( - affineMeanError.meanError, - )} vx)` - }`) + - ".", - ].join("\n"), - }, - ); + await updateDatasetPartial(newDatasetId, { + description: [ + "This dataset was composed from:", + datasetMarkdownLinks, + "", + "The layers were combined " + + (sourcePoints.length === 0 + ? "without any transforms" + : `with ${ + useThinPlateSplines + ? `Thin-Plate-Splines (${sourcePoints.length} correspondences)` + : `an affine transformation (mean error: ${formatNumber( + affineMeanError.meanError, + )} vx)` + }`) + + ".", + ].join("\n"), + }); + props.onAdded(newDatasetId, newDatasetName, false); } finally { setIsLoading(false); } - - props.onAdded(activeUser.organization, newDatasetName, false); }; return ( @@ -271,6 +264,7 @@ export function ConfigureNewDataset(props: WizardComponentProps) { void; form: FormInstance; - datasetId: APIDatasetId; + datasetId: string; + datasetName: string; }) { - const layers = Form.useWatch(["layers"]); + const layers = Form.useWatch(["layers"]) || []; // biome-ignore lint/correctness/useExhaustiveDependencies: See comment below React.useEffect(() => { @@ -380,11 +376,11 @@ function LinkedLayerForm({ info="This is the layer which will be linked into the new dataset." > - {datasetId.name} + {datasetName} {" "} / {layer.sourceName} diff --git a/frontend/javascripts/admin/dataset/composition_wizard/common.ts b/frontend/javascripts/admin/dataset/composition_wizard/common.ts index 04776662a1e..745757b0642 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/common.ts +++ b/frontend/javascripts/admin/dataset/composition_wizard/common.ts @@ -1,4 +1,4 @@ -import { getDataset } from "admin/admin_rest_api"; +import { getDataset, getDatasetLegacy } from "admin/admin_rest_api"; import type { UploadFile } from "antd"; import Toast from "libs/toast"; import type { Vector3 } from "oxalis/constants"; @@ -28,30 +28,25 @@ export type WizardComponentProps = { setWizardContext: React.Dispatch>; datastores: APIDataStore[]; onAdded: ( - datasetOrganization: string, - uploadedDatasetName: string, + datasetId: string, + datasetName: string, needsConversion?: boolean | null | undefined, ) => Promise; }; -export async function tryToFetchDatasetsByName( +export async function tryToFetchDatasetsByNameOrId( names: string[], + ids: string[], userErrorMessage: string, ): Promise { const { activeUser } = Store.getState(); try { - const datasets = await Promise.all( - names.map((name) => - getDataset( - { - owningOrganization: activeUser?.organization || "", - name: name, - }, - null, - { showErrorToast: false }, - ), + const datasets = await Promise.all([ + ...names.map((name) => + getDatasetLegacy(activeUser?.organization || "", name, null, { showErrorToast: false }), ), - ); + ...ids.map((id) => getDataset(id, null, { showErrorToast: false })), + ]); return datasets; } catch (exception) { Toast.warning(userErrorMessage); diff --git a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx index eb12f66bc18..24ba9a90cdd 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx @@ -49,8 +49,8 @@ type FileList = UploadFile[]; type OwnProps = { onAdded: ( - datasetOrganization: string, - uploadedDatasetName: string, + uploadedDatasetId: string, + updatedDatasetName: string, needsConversion?: boolean | null | undefined, ) => Promise; datastores: APIDataStore[]; @@ -287,26 +287,23 @@ function DatasetAddRemoteView(props: Props) { let configJSON; try { configJSON = JSON.parse(dataSourceJsonStr); - const nameValidationResult = await isDatasetNameValid({ - name: configJSON.id.name, - owningOrganization: activeUser.organization, - }); + const nameValidationResult = await isDatasetNameValid(configJSON.id.name); if (nameValidationResult) { throw new Error(nameValidationResult); } - await storeRemoteDataset( + const { newDatasetId } = await storeRemoteDataset( datastoreToUse.url, configJSON.id.name, activeUser.organization, dataSourceJsonStr, targetFolderId, ); + onAdded(newDatasetId, configJSON.id.name); } catch (e) { setShowLoadingOverlay(false); Toast.error(`The datasource config could not be stored. ${e}`); return; } - onAdded(activeUser.organization, configJSON.id.name); } } @@ -376,7 +373,6 @@ function DatasetAddRemoteView(props: Props) { {/* Only the component's visibility is changed, so that the form is always rendered. This is necessary so that the form's structure is always populated. */} { diff --git a/frontend/javascripts/admin/dataset/dataset_add_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_view.tsx index c7677831751..ae5980ebd18 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_view.tsx @@ -14,6 +14,7 @@ import { getDatastores } from "admin/admin_rest_api"; import { useFetch } from "libs/react_helpers"; import DatasetAddComposeView from "./dataset_add_compose_view"; import type { History } from "history"; +import { getURLSanitizedName } from "oxalis/model/accessors/dataset_accessor"; const { Content, Sider } = Layout; @@ -33,24 +34,24 @@ const addTypeToVerb: Record = { function DatasetAddView({ history }: RouteComponentProps) { const datastores = useFetch(getDatastores, [], []); - const [datasetName, setDatasetName] = useState(""); - const [organization, setOrganization] = useState(""); + const [datasetId, setDatasetId] = useState(""); + const [uploadedDatasetName, setUploadedDatasetName] = useState(""); const [datasetNeedsConversion, setDatasetNeedsConversion] = useState(false); const [datasetAddType, setImportType] = useState(DatasetAddType.UPLOAD); const handleDatasetAdded = async ( datasetAddType: DatasetAddType, - datasetOrganization: string, - uploadedDatasetName: string, + datasetId: string, + datasetName: string, needsConversion: boolean | null | undefined, ): Promise => { - setOrganization(datasetOrganization); - setDatasetName(uploadedDatasetName); + setDatasetId(datasetId); setImportType(datasetAddType); + setUploadedDatasetName(datasetName); if (needsConversion != null) setDatasetNeedsConversion(needsConversion); }; - const showAfterUploadContent = datasetName !== ""; + const showAfterUploadContent = datasetId !== ""; const getAfterUploadModalContent = () => { if (!showAfterUploadContent) { @@ -60,9 +61,9 @@ function DatasetAddView({ history }: RouteComponentProps) { return getPostUploadModal( datasetNeedsConversion, datasetAddType, - organization, - datasetName, - setDatasetName, + datasetId, + uploadedDatasetName, + setDatasetId, history, ); }; @@ -270,9 +271,9 @@ export default connector(withRouter(DatasetAddView)); const getPostUploadModal = ( datasetNeedsConversion: boolean, datasetAddType: DatasetAddType, - organization: string, - datasetName: string, - setDatasetName: (arg0: string) => void, + datasetId: string, + uploadedDatasetName: string, + setDatasetId: (arg0: string) => void, history: History, ) => { return ( @@ -291,8 +292,8 @@ const getPostUploadModal = ( display: "none", }, }} - onCancel={() => setDatasetName("")} - onOk={() => setDatasetName("")} + onCancel={() => setDatasetId("")} + onOk={() => setDatasetId("")} width={580} >
diff --git a/frontend/javascripts/admin/dataset/dataset_components.tsx b/frontend/javascripts/admin/dataset/dataset_components.tsx index 8044ec861b2..ccbe40d7a30 100644 --- a/frontend/javascripts/admin/dataset/dataset_components.tsx +++ b/frontend/javascripts/admin/dataset/dataset_components.tsx @@ -1,7 +1,6 @@ import type * as React from "react"; import { Form, Input, Select, Card, type FormInstance } from "antd"; import messages from "messages"; -import { isDatasetNameValid } from "admin/admin_rest_api"; import type { APIDataStore, APITeam, APIUser } from "types/api_flow_types"; import { syncValidator } from "types/validation"; import { FormItemWithInfo } from "dashboard/dataset/helper_components"; @@ -53,10 +52,7 @@ export const layerNameRules = [ }, ]; -export const getDatasetNameRules = ( - activeUser: APIUser | null | undefined, - allowRenaming: boolean = true, -) => [ +export const getDatasetNameRules = (activeUser: APIUser | null | undefined) => [ { required: true, message: messages["dataset.import.required.name"], @@ -64,22 +60,9 @@ export const getDatasetNameRules = ( { min: 3, message: messages["dataset.name_length"] }, ...layerNameRules, { - validator: async (_rule: any, value: string) => { - if (!allowRenaming) { - // Renaming is not allowed. No need to validate the (existing) name then. - return Promise.resolve(); - } + validator: async () => { if (!activeUser) throw new Error("Can't do operation if no user is logged in."); - const reasons = await isDatasetNameValid({ - name: value, - owningOrganization: activeUser.organization, - }); - - if (reasons != null) { - return Promise.reject(reasons); - } else { - return Promise.resolve(); - } + return Promise.resolve(); }, }, ]; @@ -88,7 +71,6 @@ export function DatasetNameFormItem({ activeUser, initialName, label, - allowDuplicate, disabled, }: { activeUser: APIUser | null | undefined; @@ -103,7 +85,7 @@ export function DatasetNameFormItem({ label={label || "Dataset Name"} hasFeedback initialValue={initialName} - rules={getDatasetNameRules(activeUser, !allowDuplicate)} + rules={getDatasetNameRules(activeUser)} validateFirst > diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index 653d2ff1249..e21c2103ff3 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -35,7 +35,6 @@ import { type APITeam, type APIDataStore, type APIUser, - type APIDatasetId, type APIOrganization, APIJobType, } from "types/api_flow_types"; @@ -82,7 +81,11 @@ const logRetryToAnalytics = _.throttle((datasetName: string) => { type OwnProps = { datastores: Array; withoutCard?: boolean; - onUploaded: (arg0: string, arg1: string, arg2: boolean) => Promise | void; + onUploaded: ( + datasetId: string, + datasetName: string, + needsConversion: boolean, + ) => Promise | void; }; type StateProps = { activeUser: APIUser | null | undefined; @@ -302,24 +305,23 @@ class DatasetUploadView extends React.Component { this.unblock = this.props.history.block(beforeUnload); // @ts-ignore window.onbeforeunload = beforeUnload; - const datasetId: APIDatasetId = { - name: formValues.name, - owningOrganization: activeUser.organization, - }; const getRandomString = () => { const randomBytes = window.crypto.getRandomValues(new Uint8Array(6)); return Array.from(randomBytes, (byte) => `0${byte.toString(16)}`.slice(-2)).join(""); }; + const newDatasetName = formValues.name; const uploadId = unfinishedUploadToContinue ? unfinishedUploadToContinue.uploadId - : `${dayjs(Date.now()).format("YYYY-MM-DD_HH-mm")}__${datasetId.name}__${getRandomString()}`; + : `${dayjs(Date.now()).format("YYYY-MM-DD_HH-mm")}__${newDatasetName}__${getRandomString()}`; const filePaths = formValues.zipFile.map((file) => file.path || ""); const reserveUploadInformation = { uploadId, - organization: datasetId.owningOrganization, - name: datasetId.name, + name: newDatasetName, + directoryName: "", + newDatasetId: "", + organization: activeUser.organization, totalFileCount: formValues.zipFile.length, filePaths: filePaths, layersToLink: [], @@ -349,7 +351,7 @@ class DatasetUploadView extends React.Component { isFinishing: true, }); finishDatasetUpload(datastoreUrl, uploadInfo).then( - async () => { + async ({ newDatasetId }) => { trackAction("Upload dataset"); Toast.success(messages["dataset.upload_success"]); let maybeError; @@ -363,8 +365,7 @@ class DatasetUploadView extends React.Component { } await startConvertToWkwJob( - formValues.name, - activeUser.organization, + newDatasetId, formValues.voxelSizeFactor, formValues.voxelSizeUnit, ); @@ -403,16 +404,12 @@ class DatasetUploadView extends React.Component { name: "", zipFile: [], }); - this.props.onUploaded( - activeUser.organization, - formValues.name, - this.state.needsConversion, - ); + this.props.onUploaded(newDatasetId, newDatasetName, this.state.needsConversion); } }, (error) => { sendFailedRequestAnalyticsEvent("finish_dataset_upload", error, { - dataset_name: datasetId.name, + dataset_name: reserveUploadInformation.name, }); Toast.error(messages["dataset.upload_failed"]); this.setState({ @@ -442,7 +439,7 @@ class DatasetUploadView extends React.Component { }); }); resumableUpload.on("fileRetry", () => { - logRetryToAnalytics(datasetId.name); + logRetryToAnalytics(newDatasetName); this.setState({ isRetrying: true, }); @@ -754,7 +751,7 @@ class DatasetUploadView extends React.Component { > {unfinishedAndNotSelectedUploads.map((unfinishedUpload) => ( - {unfinishedUpload.datasetId.name} + {unfinishedUpload.datasetName}