Dask

Testing of Dask in with Echopype and AWS lambda

Vanilla example derived from here:

Dask Arrays Example

process.py

import echopype as ep
print(ep.__version__) # 0.7.1

import dask
from dask.distributed import Client

import dask.array as da

def main():
        client = Client(processes=False, threads_per_worker=2, n_workers=2, memory_limit='2GB')
        print(client)
        #
        x = da.random.random((10000, 10000), chunks=(1000, 1000))
        print(x)
        y = x + x.T
        z = y[::2, 5000:].mean(axis=1)
        print(z)
        z.compute()
        y = y.persist()
        print(y[0, 0].compute())
        print(y.sum().compute())
        #
        print('running echopype processing')
        file = "L0003-D20040909-T161906-EK60.raw"
        echodata = ep.open_raw(file, sonar_model='EK60') #PROBLEM WITH --> use_swap=True)
        print("calibrate compute sv")
        ds_Sv = ep.calibrate.compute_Sv(echodata).compute()
        print("write to zarr")
        ds_Sv.to_zarr(store="L0003-D20040909-T161906-EK60.zarr")
        #
        client.close()

if __name__ == '__main__':
        main()

To run, "python -m mprof run dask_echopype.py" and then to plot, "python -m mprof plot".

Last updated

Was this helpful?