dandi.download#

Functions

download(urls, output_dir, *[, format, ...])

pairing(p, gen)

Classes

DLState(value)

An enumeration.

DownloadDirectory(filepath, digests)

DownloadExisting(value)

An enumeration.

DownloadFormat(value)

An enumeration.

DownloadProgress(state, downloaded, size)

Downloader(url, output_dir, existing, ...[, ...])

meta private:

ItemsSummary()

A helper "structure" to accumulate information about assets to be downloaded

PYOUTHelper()

Helper for PYOUT styling

PathType(value)

An enumeration.

ProgressCombiner(zarr_size, file_qty, files, ...)

class dandi.download.DLState(value)#

An enumeration.

CHECKSUM_ERROR = 5#
DONE = 6#
DOWNLOADING = 2#
ERROR = 4#
SKIPPED = 3#
STARTING = 1#
class dandi.download.DownloadDirectory(filepath: str | Path, digests: dict[str, str])[source]#
append(blob: bytes) None[source]#
digests#

Expected hashes of the downloaded data, as a mapping from algorithm names to digests

dirpath#

The working directory in which downloaded data will be temporarily stored

filepath#

The path to which to save the file after downloading

fp: IO[bytes] | None#

An open filehandle to writefile

lock: InterProcessLock | None#

A fasteners.InterProcessLock on dirpath

offset: int | None#

How much of the data has been downloaded so far

writefile#

The file in dirpath to which data will be written as it is received

class dandi.download.DownloadExisting(value)[source]#

An enumeration.

ERROR = 'error'#
OVERWRITE = 'overwrite'#
OVERWRITE_DIFFERENT = 'overwrite-different'#
REFRESH = 'refresh'#
SKIP = 'skip'#
class dandi.download.DownloadFormat(value)[source]#

An enumeration.

DEBUG = 'debug'#
PYOUT = 'pyout'#
class dandi.download.DownloadProgress(state: 'DLState' = <DLState.STARTING: 1>, downloaded: 'int' = 0, size: 'int | None' = None)[source]#
downloaded: int = 0#
size: int | None = None#
state: DLState = 1#
class dandi.download.ItemsSummary[source]#

A helper “structure” to accumulate information about assets to be downloaded

To be used as a callback to IteratorWithAggregation

as_dict() dict[source]#
class dandi.download.PYOUTHelper[source]#

Helper for PYOUT styling

Provides aggregation callbacks for PyOUT and also an iterator to be wrapped around iterating over assets, so it would get “totals” as soon as they are available.

agg_done(done_sizes: Iterator[int]) list[str][source]#

Formatter for “DONE” column

agg_files(*ignored: Any) str[source]#
agg_size(sizes: Iterable[int]) str | list[str][source]#

Formatter for “size” column where it would show

how much is “active” (or done) +how much yet to be “shown”.

class dandi.download.PathType(value)[source]#

An enumeration.

EXACT = 'exact'#
GLOB = 'glob'#
class dandi.download.ProgressCombiner(zarr_size: 'int', file_qty: 'int', files: 'dict[str, DownloadProgress]' = <factory>, maxsize: 'int' = 0, prev_status: 'str' = '', yielded_size: 'bool' = False)[source]#
feed(path: str, status: dict) Iterator[dict][source]#
file_qty: int#
files: dict[str, DownloadProgress]#
get_done() dict[source]#
maxsize: int = 0#

Total size of all files that were not skipped and did not error out during download

property message: str#
prev_status: str = ''#
set_status(statusdict: dict) None[source]#
yielded_size: bool = False#
zarr_size: int#
dandi.download.download(urls: str | Sequence[str], output_dir: str | Path, *, format: DownloadFormat = DownloadFormat.PYOUT, existing: DownloadExisting = DownloadExisting.ERROR, jobs: int = 1, jobs_per_zarr: int | None = None, get_metadata: bool = True, get_assets: bool = True, sync: bool = False, path_type: PathType = PathType.EXACT) None[source]#
dandi.download.pairing(p: str, gen: Iterator[dict]) Iterator[tuple[str, dict]][source]#