Skip to content

Commit

Permalink
Spool map (#227)
Browse files Browse the repository at this point in the history
- spool.split
- spool.map
- add better docs to spool methods
  • Loading branch information
d-chambers authored Aug 23, 2023
1 parent c2296d3 commit fe5ae1d
Show file tree
Hide file tree
Showing 9 changed files with 447 additions and 87 deletions.
19 changes: 6 additions & 13 deletions dascore/clients/dirspool.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,10 @@
from typing_extensions import Self

import dascore as dc
from dascore.core.spool import DataFrameSpool
from dascore.core.spool import DataFrameSpool, BaseSpool
from dascore.io.indexer import AbstractIndexer, DirectoryIndexer
from dascore.utils.pd import adjust_segments
from dascore.utils.docs import compose_docstring


class DirectorySpool(DataFrameSpool):
Expand Down Expand Up @@ -91,24 +92,16 @@ def spool_path(self):
"""Return the path in which the spool contents are found."""
return self.indexer.path

@compose_docstring(doc=BaseSpool.get_contents.__doc__)
def get_contents(self) -> pd.DataFrame:
"""
Return a dataframe of the contents of the data files.
Parameters
----------
time
If not None, a tuple of start/end time where either can be None
indicating an open interval.
{doc}
"""
return self._df

@compose_docstring(doc=BaseSpool.update.__doc__)
def update(self) -> Self:
"""
Updates the contents of the spool and returns a spool.
Resets any previous selection.
"""
"""{doc}"""
out = self.__class__(
base_path=self.indexer.update(),
preferred_format=self._preferred_format,
Expand Down
8 changes: 5 additions & 3 deletions dascore/clients/filespool.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@

import dascore as dc
from dascore.constants import SpoolType
from dascore.core.spool import DataFrameSpool
from dascore.core.spool import DataFrameSpool, BaseSpool
from dascore.io.core import FiberIO
from dascore.utils.docs import compose_docstring


class FileSpool(DataFrameSpool):
Expand Down Expand Up @@ -65,11 +66,12 @@ def _load_patch(self, kwargs) -> Self:
"""Given a row from the managed dataframe, return a patch."""
return dc.read(**kwargs)[0]

@compose_docstring(doc=BaseSpool.update.__doc__)
def update(self: SpoolType) -> Self:
"""
Update the spool.
{doc}
If the file format supports indexing (e.g. DASDAE) this will
Note: If the file format supports indexing (e.g. DASDAE) this will
trigger an indexing of the file.
"""
formater = FiberIO.manager.get_fiberio(self._file_format, self._file_version)
Expand Down
11 changes: 10 additions & 1 deletion dascore/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from __future__ import annotations

from pathlib import Path
from typing import TypeVar
from typing import TypeVar, Protocol, runtime_checkable

import numpy as np
import pandas as pd
Expand All @@ -13,6 +13,15 @@

SpoolType = TypeVar("SpoolType", bound="dascore.BaseSpool")


@runtime_checkable
class ExecutorType(Protocol):
"""Protocol for Executors that DASCore can use."""

def map(self, func, iterables, **kwargs):
"""Map function for applying concurrency of some flavor."""


# Bump this to force re-downloading of all data file
DATA_VERSION = "0.0.0"

Expand Down
Loading

0 comments on commit fe5ae1d

Please sign in to comment.