diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index b3afa96d014..6f9a0636925 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -34,5 +34,6 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Fixed a bug with incorrect valiation of layer names in the animation modal. [#7882](https://github.com/scalableminds/webknossos/pull/7882) ### Removed +- If the datasource-properties.json file for a dataset is missing or contains errors, WEBKNOSSOS no longer attempts to guess its contents from the raw data. Exploring remote datasets will still create the file. [#7697](https://github.com/scalableminds/webknossos/pull/7697) ### Breaking Changes diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index a92614e9eeb..98a3bec4f9e 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -8,12 +8,12 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). ## Unreleased [Commits](https://github.com/scalableminds/webknossos/compare/24.06.0...HEAD) -- The datastore config field `datastore.cache.dataCube.maxEntries` is no longer used an can be removed. - +- The datastore config field `datastore.cache.dataCube.maxEntries` is no longer used an can be removed. [#7818](https://github.com/scalableminds/webknossos/pull/7818) - If your setup contains webknossos-workers, you may want to add the newly available job `align_sections` to the `supportedJobCommands` of your workers. Make sure you deploy the latest webknossos-worker release. [#7820](https://github.com/scalableminds/webknossos/pull/7820) +- If you place WKW datasets directly on disk, a datasource-properties.json is now required, as WEBKNOSSOS no longer guesses its contents from the raw data. Standard dataset creation methods, e.g. with the WEBKNOSSOS CLI or python libs will already automatically this metadata file. [#7697](https://github.com/scalableminds/webknossos/pull/7697) ### Postgres Evolutions: - [114-ai-models.sql](conf/evolutions/114-ai-models.sql) - [115-annotation-locked-by-user.sql](conf/evolutions/115-annotation-locked-by-user.sql) -- [116-drop-overtimemailinglist.sql](conf/evolutions/116-drop-overtimemailinglist.sql) \ No newline at end of file +- [116-drop-overtimemailinglist.sql](conf/evolutions/116-drop-overtimemailinglist.sql) diff --git a/docs/data_formats.md b/docs/data_formats.md index 259e721367a..a5310bd8b1d 100644 --- a/docs/data_formats.md +++ b/docs/data_formats.md @@ -126,9 +126,8 @@ The term "magnifications" is used synonymously for resolutions throughout the UI At the moment, WebKnossos guarantees correct rendering of data with non-uniform resolution factors only if the z-component between two resolutions changes by a factor of 1 or 2. Most users do not create these metadata files manually. -WEBKNOSSOS can infer most of these properties automatically, except for `scale` and `largestSegmentId`. -During the data import process, WEBKNOSSOS will ask for the necessary properties. When using the [WEBKNOSSOS CLI](http://docs.webknossos.org/cli), a metadata file is automatically generated. Alternatively, you can create and edit WEBKNOSSOS datasets using the [WEBKNOSSOS Python library](https://github.com/scalableminds/webknossos-libs/). +During the data import process, WEBKNOSSOS will ask for the necessary properties. [See below for the full specification](#dataset-metadata-specification). diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 04726e1dfbe..b2aee81b64e 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -11,7 +11,6 @@ import type { APIBuildInfo, APIConnectomeFile, APIDataSource, - APIDataSourceWithMessages, APIDataStore, APIDataset, APIDatasetId, @@ -1116,16 +1115,6 @@ export async function getDatasets( return datasets; } -export function getDatasetDatasource( - dataset: APIMaybeUnimportedDataset, -): Promise { - return doWithToken((token) => - Request.receiveJSON( - `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.name}?token=${token}`, - ), - ); -} - export function readDatasetDatasource(dataset: APIDataset): Promise { return doWithToken((token) => Request.receiveJSON( diff --git a/frontend/javascripts/admin/dataset/dataset_add_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_view.tsx index 0dc44419bea..a180e1e0397 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_view.tsx @@ -96,7 +96,7 @@ function DatasetAddView({ history }: RouteComponentProps) { View the Dataset diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index 357d8c8eb3a..32dd5ae47f9 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -725,12 +725,12 @@ class DatasetUploadView extends React.Component { rules={[ { required: this.state.needsConversion, - message: "Please provide a scale for the dataset.", + message: "Please provide a voxel size for the dataset.", }, { validator: syncValidator( (value: Vector3) => value?.every((el) => el > 0), - "Each component of the scale must be larger than 0.", + "Each component of the voxel size must be larger than 0.", ), }, ]} diff --git a/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx b/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx index cf5e0237b51..76d5f3e97fa 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx @@ -3,7 +3,6 @@ import { EllipsisOutlined, EyeOutlined, LoadingOutlined, - PlusCircleOutlined, PlusOutlined, ReloadOutlined, SettingOutlined, @@ -18,9 +17,9 @@ import Toast from "libs/toast"; import messages from "messages"; import CreateExplorativeModal from "dashboard/advanced_dataset/create_explorative_modal"; import { MenuProps, Modal, Typography } from "antd"; +import { useState } from "react"; import { confirmAsync } from "dashboard/dataset/helper_components"; import { useQueryClient } from "@tanstack/react-query"; -import { useState } from "react"; const disabledStyle: React.CSSProperties = { pointerEvents: "none", @@ -193,18 +192,31 @@ function DatasetActionView(props: Props) { style={disabledWhenReloadingStyle} type="link" > - {isReloading ? : } + {isReloading ? ( + + ) : ( + + )} Reload ); - const importLink = ( -
- + - - Import + + Settings + + + ); + const brokenDatasetActions = ( +
+ + + Settings {reloadLink} ); + + const activeDatasetActions = ( + <> + {" "} + setIsCreateExplorativeModalVisible(true)} + onCloseCreateExplorativeModal={() => setIsCreateExplorativeModalVisible(false)} + /> + + + View + + {dataset.isEditable ? datasetSettingsLink : null} + {reloadLink} + + ); return (
- {dataset.isEditable && !dataset.isActive ? importLink : null} - {dataset.isActive ? ( -
- setIsCreateExplorativeModalVisible(true)} - onCloseCreateExplorativeModal={() => setIsCreateExplorativeModalVisible(false)} - /> - - - View - - {dataset.isEditable ? ( - - - - Settings - - {reloadLink} - - ) : null} -
- ) : null} + {dataset.isEditable && !dataset.isActive ? brokenDatasetActions : null} +
+ {dataset.isActive ? activeDatasetActions : null} +
); } @@ -330,7 +336,7 @@ export function getDatasetActionContextMenu({ }, } : null, - dataset.isEditable && dataset.isActive + dataset.isEditable ? { key: "edit", label: "Open Settings", @@ -340,15 +346,6 @@ export function getDatasetActionContextMenu({ } : null, - dataset.isEditable && !dataset.isActive - ? { - key: "import", - label: "Import", - onClick: () => { - window.location.href = `/datasets/${dataset.owningOrganization}/${dataset.name}/import`; - }, - } - : null, { key: "reload", label: "Reload", diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx index 9858022bf84..70dc5199d14 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx @@ -64,14 +64,12 @@ export default function DatasetSettingsDataTab({ form, activeDataSourceEditMode, onChange, - additionalAlert, dataset, }: { allowRenamingDataset: boolean; form: FormInstance; activeDataSourceEditMode: "simple" | "advanced"; onChange: (arg0: "simple" | "advanced") => void; - additionalAlert?: React.ReactNode | null | undefined; dataset?: APIDataset | null | undefined; }) { // Using the return value of useWatch for the `dataSource` var @@ -113,8 +111,6 @@ export default function DatasetSettingsDataTab({
- {additionalAlert} - diff --git a/frontend/javascripts/messages.tsx b/frontend/javascripts/messages.tsx index b7cf880c274..e0cae035e6b 100644 --- a/frontend/javascripts/messages.tsx +++ b/frontend/javascripts/messages.tsx @@ -338,11 +338,11 @@ instead. Only enable this option if you understand its effect. All layers will n "The explored data has a different voxel size from the datasource that was already loaded. The explored voxel size was:", "dataset.segmentationlayer_not_existing": "This annotation has no segmentation layer.", "dataset.invalid_datasource_json": - "The datasource-properties.json on disk is invalid. Please review all properties before importing the dataset. You can always go back and change the values later.", + "The datasource-properties.json on disk is invalid. Please review all properties below to use the dataset. You can always go back and change the values later.", "dataset.missing_datasource_json": - "The datasource-properties.json was not found. The values below are guessed by WEBKNOSSOS. Please review all properties before importing the dataset. You can always go back and change the values later.", + "A datasource-properties.json file was not found. Please review all properties below to use the dataset. You can always go back and change the values later.", "dataset.import_complete": - "A valid datasource-properties.json was found. The dataset is imported and ready to use. You may still change the properties below.", + "A valid datasource-properties.json file was found. The dataset is imported and ready to use. You may still change the properties below.", "dataset.confirm_signup": "For dataset annotation, please log in or create an account. For dataset viewing, no account is required. Do you wish to sign up now?", "dataset.does_not_exist": "Selected dataset doesn't exist!", diff --git a/frontend/javascripts/router.tsx b/frontend/javascripts/router.tsx index 3d61ba4c9f9..73f6fd6e756 100644 --- a/frontend/javascripts/router.tsx +++ b/frontend/javascripts/router.tsx @@ -436,24 +436,6 @@ class ReactRouter extends React.Component { requiresAdminOrManagerRole render={() => } /> - ( - - window.location.replace(`${window.location.origin}/dashboard/datasets`) - } - onCancel={() => window.history.back()} - /> - )} - /> ; -}; export type APITeamMembership = { readonly id: string; readonly name: string; diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index dbfbba32a4d..4cdd566a1d2 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -15,11 +15,7 @@ import com.scalableminds.webknossos.datastore.helpers.{ SegmentIndexData, SegmentStatisticsParameters } -import com.scalableminds.webknossos.datastore.models.datasource.inbox.{ - InboxDataSource, - InboxDataSourceLike, - UnusableInboxDataSource -} +import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId, GenericDataSource} import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.datastore.services.uploading.{ @@ -68,24 +64,18 @@ class DataSourceController @Inject()( override def allowRemoteOrigin: Boolean = true - def read(token: Option[String], - organizationName: String, - datasetName: String, - returnFormatLike: Boolean): Action[AnyContent] = + def readInboxDataSource(token: Option[String], organizationName: String, datasetName: String): Action[AnyContent] = Action.async { implicit request => { accessTokenService.validateAccessForSyncBlock( UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationName)), urlOrHeaderToken(token, request)) { - val dsOption: Option[InboxDataSource] = - dataSourceRepository.find(DataSourceId(datasetName, organizationName)) - dsOption match { - case Some(ds) => - val dslike: InboxDataSourceLike = ds - if (returnFormatLike) Ok(Json.toJson(dslike)) - else Ok(Json.toJson(ds)) - case _ => Ok - } + // Read directly from file, not from repository to ensure recent changes are seen + val dataSource: InboxDataSource = + dataSourceService.dataSourceFromFolder( + dataSourceService.dataBaseDir.resolve(organizationName).resolve(datasetName), + organizationName) + Ok(Json.toJson(dataSource)) } } } @@ -225,35 +215,6 @@ class DataSourceController @Inject()( } } - def suggestDatasourceJson(token: Option[String], organizationName: String, datasetName: String): Action[AnyContent] = - Action.async { implicit request => - accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.writeDataSource(DataSourceId(datasetName, organizationName)), - urlOrHeaderToken(token, request)) { - for { - previousDataSource <- dataSourceRepository.find(DataSourceId(datasetName, organizationName)) ?~ Messages( - "dataSource.notFound") ~> NOT_FOUND - (dataSource, messages) <- dataSourceService.exploreDataSource(previousDataSource.id, - previousDataSource.toUsable) - previousDataSourceJson = previousDataSource match { - case usableDataSource: DataSource => Json.toJson(usableDataSource) - case unusableDataSource: UnusableInboxDataSource => - unusableDataSource.existingDataSourceProperties match { - case Some(existingConfig) => existingConfig - case None => Json.toJson(unusableDataSource) - } - } - } yield { - Ok( - Json.obj( - "dataSource" -> dataSource, - "previousDataSource" -> previousDataSourceJson, - "messages" -> messages.map(m => Json.obj(m._1 -> m._2)) - )) - } - } - } - def listMappings( token: Option[String], organizationName: String, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormat.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormat.scala deleted file mode 100644 index 59eefc96a0d..00000000000 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormat.scala +++ /dev/null @@ -1,145 +0,0 @@ -package com.scalableminds.webknossos.datastore.dataformats.wkw - -import java.nio.file.Path -import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataLayer, SegmentationLayer} -import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} -import com.scalableminds.util.io.PathUtils -import com.scalableminds.util.tools.ExtendedTypes._ -import com.scalableminds.webknossos.datastore.dataformats.layers.{WKWDataLayer, WKWResolution, WKWSegmentationLayer} -import com.scalableminds.webknossos.datastore.services.{DataSourceImportReport, DataSourceImporter} -import net.liftweb.common.{Box, Failure, Full} - -object WKWDataFormat extends DataSourceImporter with WKWDataFormatHelper { - - def exploreLayer(name: String, baseDir: Path, previous: Option[DataLayer])( - implicit report: DataSourceImportReport[Path]): Box[DataLayer] = - (for { - resolutions <- exploreResolutions(baseDir) - ((voxelType, voxelSize), wkwResolutions) <- extractHeaderParameters(resolutions) - elementClass <- VoxelType.toElementClass(voxelType, voxelSize) - } yield { - val category = previous.map(_.category).getOrElse(guessLayerCategory(name, elementClass)) - val boundingBox = previous - .map(_.boundingBox) - .orElse(guessBoundingBox(baseDir, wkwResolutions.headOption)) - .getOrElse(BoundingBox.empty) - val defaultViewConfiguration = previous.flatMap(_.defaultViewConfiguration) - category match { - case Category.segmentation => - val mappings = exploreMappings(baseDir) - val largestSegmentId = previous match { - case Some(l: SegmentationLayer) => l.largestSegmentId - case _ => None - } - WKWSegmentationLayer( - name, - boundingBox, - wkwResolutions, - elementClass, - mappings, - largestSegmentId, - defaultViewConfiguration - ) - case _ => - WKWDataLayer( - name, - category, - boundingBox, - wkwResolutions, - elementClass, - defaultViewConfiguration - ) - } - }).passFailure { f => - report.error(layer => s"Error processing layer '$layer' - ${f.msg}") - } - - private def exploreResolutions(baseDir: Path)( - implicit report: DataSourceImportReport[Path]): Box[List[(WKWHeader, Vec3Int)]] = - PathUtils.listDirectories(baseDir, silent = false, magDirFilter).flatMap { resolutionDirs => - val resolutionHeaders = resolutionDirs.sortBy(magDirSortingKey).map { resolutionDir => - val resolution = magFromPath(resolutionDir).get - WKWHeader(resolutionDir.resolve(FILENAME_HEADER_WKW).toFile).map { header => - (header, resolution) - }.passFailure { f => - report.error(_ => s"Error processing resolution '$resolution' - ${f.msg}") - } - } - - resolutionHeaders - .toSingleBox("Error reading resolutions") - .flatMap(list => - if (list.isEmpty) { - Failure("No resolutions found. Consider adding resolution directories.") - } else Full(list)) - } - - private def extractHeaderParameters(resolutions: List[(WKWHeader, Vec3Int)])( - implicit report: DataSourceImportReport[Path]): Box[((VoxelType.Value, Int), List[WKWResolution])] = { - val headers = resolutions.map(_._1) - val voxelTypes = headers.map(_.voxelType).toSet - val voxelSize = headers.map(_.numBytesPerVoxel).toSet - val bucketLengths = headers.map(_.numVoxelsPerChunkDimension).toSet - val wkwResolutions = resolutions.map { resolution => - WKWResolution(resolution._2, resolution._1.numVoxelsPerChunkDimension * resolution._1.numVoxelsPerChunkDimension) - } - - if (voxelTypes.size == 1 && bucketLengths == Set(32)) { - Full(((voxelTypes.head, voxelSize.head), wkwResolutions)) - } else { - if (voxelTypes.size != 1) - report.error(layer => s"Error processing layer '$layer' - all resolutions must have the same voxelType") - if (bucketLengths != Set(32)) - report.error(layer => s"Error processing layer '$layer' - all resolutions must have a bucketLength of 32") - Failure("Error extracting parameters from header.wkw") - } - } - - private def guessBoundingBox(baseDir: Path, resolutionOption: Option[WKWResolution]) = { - def getIntFromFilePath(path: Path) = path.getFileName.toString.replaceAll(".wkw", "").substring(1).toInt - - def minMaxValue(path: Path, minMax: (Int, Int)) = - (Math.min(minMax._1, getIntFromFilePath(path)), Math.max(minMax._2, getIntFromFilePath(path) + 1)) - - for { - resolution <- resolutionOption - multiplierX = resolution.cubeLength * resolution.resolution.x - multiplierY = resolution.cubeLength * resolution.resolution.y - multiplierZ = resolution.cubeLength * resolution.resolution.z - - resolutionDirs <- PathUtils.listDirectories(baseDir, silent = false, filterGen("")) - resolutionDir <- resolveHead(baseDir, resolutionDirs.sortBy(magDirSortingKey)) - - zDirs <- PathUtils.listDirectories(resolutionDir, silent = false, filterGen("z")) - zHeadDir <- resolveHead(resolutionDir, zDirs) - - yDirs <- PathUtils.listDirectories(zHeadDir, silent = false, filterGen("y")) - yHeadDir <- resolveHead(zHeadDir, yDirs) - - xFiles <- PathUtils.listFiles(yHeadDir, silent = false, filterGen("x")) - xFile <- xFiles.headOption - - (zMin, zMax) = zDirs.foldRight((getIntFromFilePath(zHeadDir), 0))(minMaxValue) - (yMin, yMax) = yDirs.foldRight((getIntFromFilePath(yHeadDir), 0))(minMaxValue) - (xMin, xMax) = xFiles.foldRight((getIntFromFilePath(xFile), 0))(minMaxValue) - } yield { - BoundingBox( - Vec3Int(xMin * multiplierX, yMin * multiplierY, zMin * multiplierZ), - xMax * multiplierX - xMin * multiplierX, - yMax * multiplierY - yMin * multiplierY, - zMax * multiplierZ - zMin * multiplierZ - ) - } - } - - private def filterGen(dimension: String) = (path: Path) => { - path.getFileName.toString.matches(dimension + "\\d+.*") - } - - private def resolveHead(baseDir: Path, paths: List[Path]) = - for { - headDirPath <- paths.headOption - } yield { - baseDir.resolve(headDirPath.getFileName) - } -} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala index b2d30cba9c0..c29e42d399f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala @@ -5,12 +5,7 @@ import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.tools.Fox.box2Fox import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.JsonHelper.bool2Box -import com.scalableminds.webknossos.datastore.dataformats.wkw.{ - MortonEncoding, - WKWDataFormat, - WKWDataFormatHelper, - WKWHeader -} +import com.scalableminds.webknossos.datastore.dataformats.wkw.{MortonEncoding, WKWDataFormatHelper, WKWHeader} import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, ChunkUtils, DatasetArray} import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataSourceId} @@ -22,14 +17,13 @@ import java.io.ByteArrayInputStream import scala.collection.immutable.NumericRange import scala.concurrent.ExecutionContext -object WKWArray { +object WKWArray extends WKWDataFormatHelper { def open(path: VaultPath, dataSourceId: DataSourceId, layerName: String, sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[WKWArray] = for { - headerBytes <- (path / WKWDataFormat.FILENAME_HEADER_WKW) - .readBytes() ?~> s"Could not read header at ${WKWDataFormat.FILENAME_HEADER_WKW}" + headerBytes <- (path / FILENAME_HEADER_WKW).readBytes() ?~> s"Could not read header at ${FILENAME_HEADER_WKW}" dataInputStream = new LittleEndianDataInputStream(new ByteArrayInputStream(headerBytes)) header <- WKWHeader(dataInputStream, readJumpTable = false).toFox array <- tryo(new WKWArray(path, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceImporter.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceImporter.scala deleted file mode 100644 index 5a5023a3526..00000000000 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceImporter.scala +++ /dev/null @@ -1,85 +0,0 @@ -package com.scalableminds.webknossos.datastore.services - -import java.nio.file.Path -import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} -import com.scalableminds.util.io.PathUtils -import com.scalableminds.webknossos.datastore.dataformats.MappingProvider -import com.scalableminds.webknossos.datastore.models.datasource._ -import net.liftweb.common.Box - -import scala.collection.mutable.ArrayBuffer - -case class DataSourceImportReport[A](ctx: A, messages: ArrayBuffer[(String, String)] = ArrayBuffer.empty) { - - def error(msg: A => String): Unit = messages.append("error" -> msg(ctx)) - - def warning(msg: A => String): Unit = messages.append("warning" -> msg(ctx)) - - def info(msg: A => String): Unit = messages.append("info" -> msg(ctx)) - - def withContext(f: A => A): DataSourceImportReport[A] = DataSourceImportReport(f(ctx), messages) -} - -trait DataSourceImporter { - - def dataFileExtension: String - - protected def exploreLayer(name: String, baseDir: Path, previous: Option[DataLayer])( - implicit report: DataSourceImportReport[Path]): Box[DataLayer] - - private def wkwFileFilter(path: Path): Boolean = path.getFileName.toString.toLowerCase().endsWith(".wkw") - - def looksLikeWKWDataSource(baseDir: Path): Box[Boolean] = - PathUtils.containsFile(baseDir, maxDepth = 3, silent = true, filters = wkwFileFilter) - - def exploreDataSource(id: DataSourceId, - baseDir: Path, - previous: Option[DataSource], - report: DataSourceImportReport[Path]): Box[DataSource] = - PathUtils.listDirectories(baseDir, silent = false).map { layerDirs => - val layers = layerDirs.flatMap { layerDir => - val layerName = layerDir.getFileName.toString - val previousLayer = previous.flatMap(_.getDataLayer(layerName)) - exploreLayer(layerName, layerDir, previousLayer)(report.withContext(_.resolve(layerName))) - } - GenericDataSource(id, - layers, - previous.map(_.scale).getOrElse(Vec3Double.zeros), - previous.flatMap(_.defaultViewConfiguration)) - } - - def dummyDataSource(id: DataSourceId, - previous: Option[DataSource], - report: DataSourceImportReport[Path]): Box[DataSource] = { - report.warning(_ => - "Automatic suggestions for the datasource-properties.json are not available since the dataset is not in WKW format.") - previous.orElse(Some(GenericDataSource(id, List.empty, Vec3Double.zeros))) - } - - protected def guessLayerCategory(layerName: String, elementClass: ElementClass.Value)( - implicit report: DataSourceImportReport[Path]): Category.Value = { - val ColorRx = ".*color.*".r - val SegmentationRx = ".*segmentation.*".r - - layerName match { - case ColorRx() => - Category.color - case SegmentationRx() => - Category.segmentation - case _ => - report.warning(layer => s"Layer [$layer] - Falling back to elementClass for determining category") - Category.guessFromElementClass(elementClass) - } - } - - protected def magFromPath(path: Path): Option[Vec3Int] = - Vec3Int.fromMagLiteral(path.getFileName.toString, allowScalar = true) - - protected def magDirFilter(path: Path): Boolean = magFromPath(path).isDefined - - protected def magDirSortingKey(path: Path): Int = - magFromPath(path).get.maxDim - - protected def exploreMappings(baseDir: Path): Option[Set[String]] = MappingProvider.exploreMappings(baseDir) - -} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala index 543a31e3ffa..04fcff8d30b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala @@ -9,7 +9,6 @@ import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.dataformats.MappingProvider -import com.scalableminds.webknossos.datastore.dataformats.wkw.WKWDataFormat import com.scalableminds.webknossos.datastore.helpers.IntervalScheduler import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSource, UnusableDataSource} @@ -108,16 +107,6 @@ class DataSourceService @Inject()( if (emptyDirs.nonEmpty) logger.warn(s"Empty organization dataset dirs: ${emptyDirs.mkString(", ")}") } - def exploreDataSource(id: DataSourceId, previous: Option[DataSource]): Box[(DataSource, List[(String, String)])] = { - val path = dataBaseDir.resolve(id.team).resolve(id.name) - val report = DataSourceImportReport[Path](dataBaseDir.relativize(path)) - for { - looksLikeWKWDataSource <- WKWDataFormat.looksLikeWKWDataSource(path) - dataSource <- if (looksLikeWKWDataSource) WKWDataFormat.exploreDataSource(id, path, previous, report) - else WKWDataFormat.dummyDataSource(id, previous, report) - } yield (dataSource, report.messages.toList) - } - def exploreMappings(organizationName: String, datasetName: String, dataLayerName: String): Set[String] = MappingProvider .exploreMappings(dataBaseDir.resolve(organizationName).resolve(datasetName).resolve(dataLayerName)) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala index a98ca6fbd2f..41d66b80879 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.io.PathUtils.ensureDirectoryBox import com.scalableminds.util.io.{PathUtils, ZipIO} import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.dataformats.layers.{WKWDataLayer, WKWSegmentationLayer} -import com.scalableminds.webknossos.datastore.dataformats.wkw.WKWDataFormat.FILENAME_HEADER_WKW +import com.scalableminds.webknossos.datastore.dataformats.wkw.WKWDataFormatHelper import com.scalableminds.webknossos.datastore.datareaders.n5.N5Header.FILENAME_ATTRIBUTES_JSON import com.scalableminds.webknossos.datastore.datareaders.n5.{N5Header, N5Metadata} import com.scalableminds.webknossos.datastore.datareaders.precomputed.PrecomputedHeader.FILENAME_INFO @@ -83,6 +83,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, with DirectoryConstants with FoxImplicits with BoxImplicits + with WKWDataFormatHelper with LazyLogging { /* Redis stores different information for each upload, with different prefixes in the keys: diff --git a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes index 35f363740b0..e796fc871d3 100644 --- a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes +++ b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes @@ -84,11 +84,9 @@ POST /datasets/reserveManualUpload POST /datasets/finishUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.finishUpload(token: Option[String]) POST /datasets/cancelUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.cancelUpload(token: Option[String]) GET /datasets/measureUsedStorage/:organizationName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(token: Option[String], organizationName: String, datasetName: Option[String]) -GET /datasets/:organizationName/:datasetName/readInboxDataSourceLike @com.scalableminds.webknossos.datastore.controllers.DataSourceController.read(token: Option[String], organizationName: String, datasetName: String, returnFormatLike: Boolean ?= true) -GET /datasets/:organizationName/:datasetName/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.read(token: Option[String], organizationName: String, datasetName: String, returnFormatLike: Boolean ?= false) +GET /datasets/:organizationName/:datasetName/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(token: Option[String], organizationName: String, datasetName: String) POST /datasets/:organizationName/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(token: Option[String], organizationName: String, datasetName: String) PUT /datasets/:organizationName/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(token: Option[String], organizationName: String, datasetName: String, folderId: Option[String]) -GET /datasets/:organizationName/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.suggestDatasourceJson(token: Option[String], organizationName: String, datasetName: String) DELETE /datasets/:organizationName/:datasetName/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(token: Option[String], organizationName: String, datasetName: String) POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose(token: Option[String]) POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset(token: Option[String])