File: arrow.py

package info (click to toggle)
dask.distributed 2022.12.1%2Bds.1-3
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 10,164 kB
  • sloc: python: 81,938; javascript: 1,549; makefile: 228; sh: 100
file content (49 lines) | stat: -rw-r--r-- 1,336 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
from __future__ import annotations

import pyarrow

from distributed.protocol.serialize import dask_deserialize, dask_serialize

if pyarrow.__version__ < "0.10":
    raise ImportError(
        "Need pyarrow >= 0.10 . "
        "See https://arrow.apache.org/docs/python/install.html"
    )


@dask_serialize.register(pyarrow.RecordBatch)
def serialize_batch(batch):
    sink = pyarrow.BufferOutputStream()
    writer = pyarrow.RecordBatchStreamWriter(sink, batch.schema)
    writer.write_batch(batch)
    writer.close()
    buf = sink.getvalue()
    header = {}
    frames = [buf]
    return header, frames


@dask_deserialize.register(pyarrow.RecordBatch)
def deserialize_batch(header, frames):
    blob = frames[0]
    reader = pyarrow.RecordBatchStreamReader(pyarrow.BufferReader(blob))
    return reader.read_next_batch()


@dask_serialize.register(pyarrow.Table)
def serialize_table(tbl):
    sink = pyarrow.BufferOutputStream()
    writer = pyarrow.RecordBatchStreamWriter(sink, tbl.schema)
    writer.write_table(tbl)
    writer.close()
    buf = sink.getvalue()
    header = {}
    frames = [buf]
    return header, frames


@dask_deserialize.register(pyarrow.Table)
def deserialize_table(header, frames):
    blob = frames[0]
    reader = pyarrow.RecordBatchStreamReader(pyarrow.BufferReader(blob))
    return reader.read_all()