Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Spool map #227

Merged
merged 8 commits into from
Aug 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 6 additions & 13 deletions dascore/clients/dirspool.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,10 @@
from typing_extensions import Self

import dascore as dc
from dascore.core.spool import DataFrameSpool
from dascore.core.spool import DataFrameSpool, BaseSpool
from dascore.io.indexer import AbstractIndexer, DirectoryIndexer
from dascore.utils.pd import adjust_segments
from dascore.utils.docs import compose_docstring


class DirectorySpool(DataFrameSpool):
Expand Down Expand Up @@ -91,24 +92,16 @@ def spool_path(self):
"""Return the path in which the spool contents are found."""
return self.indexer.path

@compose_docstring(doc=BaseSpool.get_contents.__doc__)
def get_contents(self) -> pd.DataFrame:
"""
Return a dataframe of the contents of the data files.

Parameters
----------
time
If not None, a tuple of start/end time where either can be None
indicating an open interval.
{doc}
"""
return self._df

@compose_docstring(doc=BaseSpool.update.__doc__)
def update(self) -> Self:
"""
Updates the contents of the spool and returns a spool.

Resets any previous selection.
"""
"""{doc}"""
out = self.__class__(
base_path=self.indexer.update(),
preferred_format=self._preferred_format,
Expand Down
8 changes: 5 additions & 3 deletions dascore/clients/filespool.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@

import dascore as dc
from dascore.constants import SpoolType
from dascore.core.spool import DataFrameSpool
from dascore.core.spool import DataFrameSpool, BaseSpool
from dascore.io.core import FiberIO
from dascore.utils.docs import compose_docstring


class FileSpool(DataFrameSpool):
Expand Down Expand Up @@ -65,11 +66,12 @@ def _load_patch(self, kwargs) -> Self:
"""Given a row from the managed dataframe, return a patch."""
return dc.read(**kwargs)[0]

@compose_docstring(doc=BaseSpool.update.__doc__)
def update(self: SpoolType) -> Self:
"""
Update the spool.
{doc}

If the file format supports indexing (e.g. DASDAE) this will
Note: If the file format supports indexing (e.g. DASDAE) this will
trigger an indexing of the file.
"""
formater = FiberIO.manager.get_fiberio(self._file_format, self._file_version)
Expand Down
11 changes: 10 additions & 1 deletion dascore/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from __future__ import annotations

from pathlib import Path
from typing import TypeVar
from typing import TypeVar, Protocol, runtime_checkable

import numpy as np
import pandas as pd
Expand All @@ -13,6 +13,15 @@

SpoolType = TypeVar("SpoolType", bound="dascore.BaseSpool")


@runtime_checkable
class ExecutorType(Protocol):
"""Protocol for Executors that DASCore can use."""

def map(self, func, iterables, **kwargs):
"""Map function for applying concurrency of some flavor."""


# Bump this to force re-downloading of all data file
DATA_VERSION = "0.0.0"

Expand Down
Loading