diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 000000000..c1ea26d9c --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,42 @@ +name: Build Docs + +on: + push: + branches: + - master + pull_request: + +jobs: + docs: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python: + - 3.7 + #- 3.8 + #- 3.9 + steps: + - name: Check out repository + uses: actions/checkout@v2 + with: + # Fetch all commits so that versioneer will return something compatible + # with semantic-version + fetch-depth: 0 + + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + + - name: Install hdf5 (Ubuntu) + if: matrix.python == '3.9' && startsWith(matrix.os, 'ubuntu') + run: sudo apt-get update && sudo apt-get install -y libhdf5-dev + + - name: Install dependencies + run: | + python -m pip install --upgrade pip wheel + python -m pip install --upgrade tox + + - name: Build docs + run: tox -e docs diff --git a/.gitignore b/.gitignore index 100e0f0f6..b6bade9e0 100644 --- a/.gitignore +++ b/.gitignore @@ -3,12 +3,14 @@ .*.swp .coverage .coverage.* +.docker/ .eggs .idea .tox/ __pycache__/ build/ dist/ +docs/**/generated/ pip-wheel-metadata/ sandbox/ venv/ diff --git a/dandi/dandiapi.py b/dandi/dandiapi.py index 76f8aa538..e26b91ae3 100644 --- a/dandi/dandiapi.py +++ b/dandi/dandiapi.py @@ -526,16 +526,13 @@ def upload_raw_asset( this version of the Dandiset and return the resulting asset. Blocks until the upload is complete. - Parameters - ---------- - filepath: str or PathLike - the path to the local file to upload - asset_metadata: dict - Metadata for the uploaded asset file. Must include a "path" field - giving the POSIX path at which the uploaded file will be placed on - the server. - jobs: int - Number of threads to use for uploading; defaults to 5 + :param filepath: the path to the local file to upload + :type filepath: str or PathLike + :param dict asset_metadata: + Metadata for the uploaded asset file. Must include a "path" field + giving the POSIX path at which the uploaded file will be placed on + the server. + :param int jobs: Number of threads to use for uploading; defaults to 5 """ for status in self.iter_upload_raw_asset(filepath, asset_metadata, jobs=jobs): if status["status"] == "done": @@ -553,22 +550,19 @@ def iter_upload_raw_asset( this version of the Dandiset, returning a generator of status `dict`\\s. - Parameters - ---------- - filepath: str or PathLike - the path to the local file to upload - asset_metadata: dict - Metadata for the uploaded asset file. Must include a "path" field - giving the POSIX path at which the uploaded file will be placed on - the server. - jobs: int - Number of threads to use for uploading; defaults to 5 - - Returns - ------- - A generator of `dict`\\s containing at least a ``"status"`` key. Upon - successful upload, the last `dict` will have a status of ``"done"`` and - an ``"asset"`` key containing the resulting `RemoteAsset`. + :param filepath: the path to the local file to upload + :type filepath: str or PathLike + :param dict asset_metadata: + Metadata for the uploaded asset file. Must include a "path" field + giving the POSIX path at which the uploaded file will be placed on + the server. + :param int jobs: + Number of threads to use for uploading; defaults to 5 + :returns: + A generator of `dict`\\s containing at least a ``"status"`` key. + Upon successful upload, the last `dict` will have a status of + ``"done"`` and an ``"asset"`` key containing the resulting + `RemoteAsset`. """ from .support.digests import get_dandietag diff --git a/dandi/dandiarchive.py b/dandi/dandiarchive.py index 242c7b158..6d183f938 100644 --- a/dandi/dandiarchive.py +++ b/dandi/dandiarchive.py @@ -206,15 +206,11 @@ def get_assets(self, client: DandiAPIClient) -> Iterator[RemoteAsset]: def navigate_url(url): """Context manager to 'navigate' URL pointing to DANDI archive. - Parameters - ---------- - url: str - URL which might point to a dandiset, a folder, or an asset(s) - - Yields - ------ - client, dandiset, assets (generator) - `client` will have established a session for the duration of the context + :param str url: URL which might point to a dandiset, a folder, or an + asset(s) + + :returns: Generator of one ``(client, dandiset, assets)``; ``client`` will + have established a session for the duration of the context """ parsed_url = parse_dandi_url(url) with parsed_url.navigate() as (client, dandiset, assets): @@ -337,14 +333,17 @@ class _dandi_url_parser: @classmethod def parse(cls, url, *, map_instance=True): - """Parse url like and return server (address), asset_id and/or directory + """ + Parse url like and return server (address), asset_id and/or directory Example URLs (as of 20210428): + - Dataset landing page metadata: https://gui.dandiarchive.org/#/dandiset/000003 Individual and multiple files: - - dandi??? + + - dandi??? Multiple selected files + folders -- we do not support ATM, then further RFing would be due, probably making this into a generator or returning a @@ -352,12 +351,10 @@ def parse(cls, url, *, map_instance=True): "Features": - - uses some of `known_instance`s to map some urls, e.g. from + - uses some of `known_instances` to map some urls, e.g. from gui.dandiarchive.org ones into girder. - Returns - ------- - ParsedDandiURL + :rtype: ParsedDandiURL """ lgr.debug("Parsing url %s", url) diff --git a/dandi/tests/skip.py b/dandi/tests/skip.py index 1396247d0..5fbc8cae6 100644 --- a/dandi/tests/skip.py +++ b/dandi/tests/skip.py @@ -12,13 +12,13 @@ There are two main ways to skip in pytest: - * decorating a test function, such as +* decorating a test function, such as:: @pytest.mark.skip(sys.platform.startswith("win"), reason="on windows") def test_func(): [...] - * skipping inline, such as +* skipping inline, such as:: def test_func(): if sys.platform.startswith("win"): @@ -28,16 +28,16 @@ def test_func(): This module provides a mechanism to register a reason and condition as both a decorator and an inline function: - * Within this module, create a condition function that returns a tuple of the - form (REASON, COND). REASON is a str that will be shown as the reason for - the skip, and COND is a boolean indicating if the test should be skipped. +* Within this module, create a condition function that returns a tuple of the + form (REASON, COND). REASON is a str that will be shown as the reason for + the skip, and COND is a boolean indicating if the test should be skipped. - For example + For example:: def windows(): return "on windows", sys.platform.startswith("win") - * Then add the above function to CONDITION_FNS. +* Then add the above function to CONDITION_FNS. Doing that will make the skip condition available in two places: `mark.skipif_NAME` and `skipif.NAME`. So, for the above example, there would diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 000000000..a4de0bff1 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= -W +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/doc/demos/basic-workflow1.sh b/docs/demos/basic-workflow1.sh similarity index 100% rename from doc/demos/basic-workflow1.sh rename to docs/demos/basic-workflow1.sh diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 000000000..6247f7e23 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 000000000..016ad722c --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,2 @@ +alabaster +Sphinx diff --git a/docs/source/cmdline/delete.rst b/docs/source/cmdline/delete.rst new file mode 100644 index 000000000..84b77612f --- /dev/null +++ b/docs/source/cmdline/delete.rst @@ -0,0 +1,16 @@ +:program:`dandi delete` +======================= + +:: + + dandi [] delete [] [ ...] + +Delete dandisets and assets from the server. + +PATH could be a local path or a URL to an asset, directory, or an entire +dandiset. + +Options +------- + +.. option:: --skip-missing diff --git a/docs/source/cmdline/digest.rst b/docs/source/cmdline/digest.rst new file mode 100644 index 000000000..4324a915a --- /dev/null +++ b/docs/source/cmdline/digest.rst @@ -0,0 +1,15 @@ +:program:`dandi digest` +======================= + +:: + + dandi [] digest [] [ ...] + +Calculate file digests + +Options +------- + +.. option:: -d, --digest [dandi-etag|md5|sha1|sha256|sha512] + + Digest algorithm to use [default: dandi-etag] diff --git a/docs/source/cmdline/download.rst b/docs/source/cmdline/download.rst new file mode 100644 index 000000000..55f1bb31a --- /dev/null +++ b/docs/source/cmdline/download.rst @@ -0,0 +1,36 @@ +:program:`dandi download` +========================= + +:: + + dandi [] download [] [ ...] + +Options +------- + +.. option:: -o, --output-dir + + Directory where to download to (directory must exist). Files will be + downloaded with paths relative to that directory. + +.. option:: -e, --existing [error|skip|overwrite|overwrite-different|refresh] + + What to do if a file found existing locally. 'refresh': verify + that according to the size and mtime, it is the same file, if not - + download and overwrite. + +.. option:: -f, --format [pyout|debug] + + Choose the format/frontend for output. + +.. option:: -J, --jobs + + Number of parallel download jobs. + +.. option:: --download [dandiset.yaml,assets,all] + + Comma-separated list of elements to download + +.. option:: --sync + + Delete local assets that do not exist on the server diff --git a/docs/source/cmdline/index.rst b/docs/source/cmdline/index.rst new file mode 100644 index 000000000..4899bebba --- /dev/null +++ b/docs/source/cmdline/index.rst @@ -0,0 +1,8 @@ +********************** +Command-Line Interface +********************** + +.. toctree:: + :glob: + + * diff --git a/docs/source/cmdline/ls.rst b/docs/source/cmdline/ls.rst new file mode 100644 index 000000000..0435a0c37 --- /dev/null +++ b/docs/source/cmdline/ls.rst @@ -0,0 +1,50 @@ +:program:`dandi ls` +=================== + +:: + + dandi [] ls [] [ ...] + +List .nwb files and dandisets metadata. + +Patterns for known setups: + +- ``DANDI:`` +- ``https://dandiarchive.org/...`` +- ``https://identifiers.org/DANDI:`` +- ``https://[/api]/[#/]dandiset/[/][/files[?location=]]`` +- ``https://*dandiarchive-org.netflify.app/...`` +- ``https://[/api]/dandisets/[/versions[/]]`` +- ``https://[/api]/dandisets//versions//assets/[/download]`` +- ``https://[/api]/dandisets//versions//assets/?path=`` +- ``dandi:///[@][/]`` +- ``https:///...`` + + +Options +------- + +.. option:: -F, --fields + + Comma-separated list of fields to display. An empty value to trigger a + list of available fields to be printed out + +.. option:: -f, --format [auto|pyout|json|json_pp|json_lines|yaml] + + Choose the format/frontend for output. If 'auto' (default), 'pyout' will be + used in case of multiple files, and 'yaml' for a single file. + +.. option:: -r, --recursive + + Recurse into content of dandisets/directories. Only .nwb files will be + considered. + +.. option:: -J, --jobs + + Number of parallel download jobs. + +.. option:: --metadata [api|all|assets] + +.. option:: --schema + + Convert metadata to new schema version diff --git a/docs/source/cmdline/organize.rst b/docs/source/cmdline/organize.rst new file mode 100644 index 000000000..d54d36489 --- /dev/null +++ b/docs/source/cmdline/organize.rst @@ -0,0 +1,58 @@ +:program:`dandi organize` +========================= + +:: + + dandi [] organize [] [ ...] + +(Re)organize files according to the metadata. + +The purpose of this command is to take advantage of metadata contained in the +.nwb files to provide datasets with consistently named files, so their naming +reflects data they contain. + +.nwb files are organized into a hierarchy of subfolders one per each "subject", +e.g. sub-0001 if .nwb file had contained a Subject group with subject_id=0001. +Each file in a subject-specific subfolder follows the convention:: + + sub-[_key-][_mod1+mod2+...].nwb + +where following keys are considered if present in the data:: + + ses -- session_id + tis -- tissue_sample_id + slice -- slice_id + cell -- cell_id + +and ``modX`` are "modalities" as identified based on detected neural data types +(such as "ecephys", "icephys") per extensions found in nwb-schema definitions: +https://github.com/NeurodataWithoutBorders/nwb-schema/tree/dev/core + +In addition an "obj" key with a value corresponding to crc32 checksum of +"object_id" is added if aforementioned keys and the list of modalities are +not sufficient to disambiguate different files. + +You can visit https://dandiarchive.org for a growing collection of +(re)organized dandisets. + +Options +------- + +.. option:: -d, --dandiset-path + + A top directory (local) of the dandiset to organize files under. If not + specified, dandiset current directory is under is assumed. For 'simulate' + mode target dandiset/directory must not exist. + +.. option:: --invalid [fail|warn] + + What to do if files without sufficient metadata are encountered. + +.. option:: -f, --files-mode [dry|simulate|copy|move|hardlink|symlink|auto] + + If 'dry' - no action is performed, suggested renames are printed. If + 'simulate' - hierarchy of empty files at --local-top-path is created. Note + that previous layout should be removed prior this operation. If 'auto' + (default) - whichever of symlink, hardlink, copy is allowed by system. The + other modes (copy, move, symlink, hardlink) define how data files should be + made available. diff --git a/docs/source/cmdline/shell-completion.rst b/docs/source/cmdline/shell-completion.rst new file mode 100644 index 000000000..bc576f567 --- /dev/null +++ b/docs/source/cmdline/shell-completion.rst @@ -0,0 +1,24 @@ +:program:`dandi shell-completion` +================================= + +:: + + dandi [] shell-completion [] + +Emit shell script for enabling command completion. + +The output of this command should be "sourced" by bash or zsh to enable command +completion. + +Example:: + + $ source <(dandi shell-completion) + $ dandi -- + +Options +------- + +.. option:: -s, --shell [bash|zsh|fish|auto] + + The shell for which to generate completion code; `auto` (default) attempts + autodetection diff --git a/docs/source/cmdline/upload.rst b/docs/source/cmdline/upload.rst new file mode 100644 index 000000000..a5bd23e1d --- /dev/null +++ b/docs/source/cmdline/upload.rst @@ -0,0 +1,42 @@ +:program:`dandi upload` +======================= + +:: + + dandi [] upload [] [ ...] + +Upload dandiset (files) to DANDI archive. + +Target dandiset to upload to must already be registered in the archive and +locally :file:`dandiset.yaml` should exist in :option:`--dandiset-path`. + +Local dandiset should pass validation. For that it should be first organized +using ``dandi organize`` command. + +By default all files in the dandiset (not following directories starting with a +period) will be considered for the upload. You can point to specific files you +would like to validate and have uploaded. + +Options +------- + +.. option:: -e, --existing [error|skip|force|overwrite|refresh] + + What to do if a file found existing on the server. 'skip' would skip the + file, 'force' - force reupload, 'overwrite' - force upload if either size + or modification time differs; 'refresh' (default) - upload only if local + modification time is ahead of the remote. + +.. option:: -J, --jobs N[:M] + + Number of files to upload in parallel and, optionally, number of upload + threads per file + +.. option:: --sync + + Delete assets on the server that do not exist locally + +.. option:: --validation [require|skip|ignore] + + Data must pass validation before the upload. Use of this option is highly + discouraged. diff --git a/docs/source/cmdline/validate.rst b/docs/source/cmdline/validate.rst new file mode 100644 index 000000000..1e9c6fdea --- /dev/null +++ b/docs/source/cmdline/validate.rst @@ -0,0 +1,10 @@ +:program:`dandi validate` +========================= + +:: + + dandi [] validate [ ...] + +Validate files for NWB (and DANDI) compliance. + +Exits with non-0 exit code if any file is not compliant. diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 000000000..46e10d81c --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,56 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- + +project = "DANDI Archive CLI and Python API library" +copyright = "2021, DANDI Team" +author = "DANDI Team" + +import dandi + +# The full version, including alpha/beta/rc tags +release = dandi.__version__ + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = ["sphinx.ext.autodoc", "sphinx.ext.autosummary"] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "alabaster" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = ["_static"] diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 000000000..e5f1b5c81 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,25 @@ +.. DANDI Archive CLI and Python API library documentation master file, created by + sphinx-quickstart on Wed Jul 14 14:06:55 2021. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to dandi-cli documentation +================================== + +dandi-cli provides both command line interface (CLI) tools and a Python library (AKA Python API) to work with `DANDI +Archive `_. + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + cmdline/index + modref/index + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/source/modref/consts.rst b/docs/source/modref/consts.rst new file mode 100644 index 000000000..eb6202ea9 --- /dev/null +++ b/docs/source/modref/consts.rst @@ -0,0 +1,4 @@ +``dandi.consts`` +================ + +.. automodule:: dandi.consts diff --git a/docs/source/modref/dandiapi.rst b/docs/source/modref/dandiapi.rst new file mode 100644 index 000000000..ea2532c6d --- /dev/null +++ b/docs/source/modref/dandiapi.rst @@ -0,0 +1,17 @@ +.. currentmodule:: dandi.dandiapi + +Mid-level user interfaces +========================= + +.. autoclass:: RESTFullAPIClient + :members: +.. autoclass:: DandiAPIClient + :members: +.. autoclass:: APIBase + :members: +.. autoclass:: RemoteDandiset + :members: +.. autoclass:: Version + :members: +.. autoclass:: RemoteAsset + :members: diff --git a/docs/source/modref/dandiarchive.rst b/docs/source/modref/dandiarchive.rst new file mode 100644 index 000000000..597e9bf50 --- /dev/null +++ b/docs/source/modref/dandiarchive.rst @@ -0,0 +1,24 @@ +.. currentmodule:: dandi.dandiarchive + +High-level user interfaces +========================== + +.. autofunction:: navigate_url +.. autofunction:: parse_dandi_url + +.. autoclass:: ParsedDandiURL + :members: +.. autoclass:: DandisetURL + :show-inheritance: +.. autoclass:: SingleAssetURL + :show-inheritance: +.. autoclass:: MultiAssetURL + :show-inheritance: +.. autoclass:: AssetIDURL + :show-inheritance: +.. autoclass:: AssetPathPrefixURL + :show-inheritance: +.. autoclass:: AssetItemURL + :show-inheritance: +.. autoclass:: AssetFolderURL + :show-inheritance: diff --git a/docs/source/modref/index.rst b/docs/source/modref/index.rst new file mode 100644 index 000000000..475e1c3dd --- /dev/null +++ b/docs/source/modref/index.rst @@ -0,0 +1,54 @@ +.. -*- mode: rst -*- +.. vi: set ft=rst sts=4 ts=4 sw=4 et tw=79: + +.. currentmodule:: dandi + +.. _chap_modref: + +********** +Python API +********** + +This module reference extends the manual with a comprehensive overview of the +available functionality built into datalad. Each module in the package is +documented by a general summary of its purpose and the list of classes and +functions it provides. + + +High-level user interfaces +========================== + +.. toctree:: + + dandiarchive + +Mid-level user interfaces +========================= + +.. toctree:: + + dandiapi + +Support functionality +===================== + +.. toctree:: + + consts + utils + support.digests + +Test infrastructure +=================== + +.. autosummary:: + :toctree: generated + + tests.fixtures + tests.skip + +Command line interface infrastructure +===================================== + +.. autosummary:: + :toctree: generated diff --git a/docs/source/modref/support.digests.rst b/docs/source/modref/support.digests.rst new file mode 100644 index 000000000..c758c2084 --- /dev/null +++ b/docs/source/modref/support.digests.rst @@ -0,0 +1,4 @@ +``dandi.support.digests`` +========================= + +.. automodule:: dandi.support.digests diff --git a/docs/source/modref/utils.rst b/docs/source/modref/utils.rst new file mode 100644 index 000000000..604dd0d69 --- /dev/null +++ b/docs/source/modref/utils.rst @@ -0,0 +1,4 @@ +``dandi.utils`` +=============== + +.. automodule:: dandi.utils diff --git a/tox.ini b/tox.ini index 7a3b7fdee..917920c2f 100644 --- a/tox.ini +++ b/tox.ini @@ -20,6 +20,12 @@ commands = codespell dandi setup.py flake8 --config=setup.cfg {posargs} dandi setup.py +[testenv:docs] +basepython = python3 +deps = -rdocs/requirements.txt +changedir = docs +commands = sphinx-build -E -W -b html source build + [pytest] addopts = --tb=short --durations=10 markers =