import numpy as np
import dascore as dc
spool = dc.get_example_spool("random_das")
# Calculate the std for each channel in 5 second chunks
results = (
spool.chunk(time=5)
.map(lambda x: np.std(x.data, axis=0))
)
# stack back into array. dims are (distance, time chunk)
out = np.stack(results, axis=-1)map
map(
self ,
func: collections.abc.Callable[Patch, …, None] ,
client: dascore.constants.ExecutorType | None[ExecutorType, None] = None,
size: int | None[int, None] = None,
progress: bool = True,
**kwargs ,
)-> ‘list[T]’
Map a function of all the contents of the spool.
Parameters
| Parameter | Description |
|---|---|
| func | A callable which takes a patch as its first argument. |
| client |
A client, or executor, which has a map method.
|
| size |
The number of patches in each spool mapped to a client. If not set, defaults to the number of processors on the host. Does nothing unless client is defined. |
| progress | If True, display a progress bar. |
| **kwargs | kwargs passed to func. |
Note
When a client is specified, the spool is split then passed to the client’s map method. This is to avoid serializing loaded patches. See Spool.split for more details about the spool_count and spool_size parameters.