1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167
|
from __future__ import annotations
# isort: off
from distributed import config # load distributed configuration first
from distributed import widgets # load distributed widgets second
# isort: on
import atexit
import dask
from dask.config import config # type: ignore
from distributed._version import get_versions
from distributed.actor import Actor, ActorFuture, BaseActorFuture
from distributed.client import (
Client,
CompatibleExecutor,
Future,
as_completed,
default_client,
fire_and_forget,
futures_of,
get_task_metadata,
get_task_stream,
performance_report,
wait,
)
from distributed.core import Status, connect, rpc
from distributed.deploy import Adaptive, LocalCluster, SpecCluster, SSHCluster
from distributed.diagnostics.plugin import (
CondaInstall,
Environ,
NannyPlugin,
PackageInstall,
PipInstall,
SchedulerPlugin,
UploadDirectory,
UploadFile,
WorkerPlugin,
)
from distributed.diagnostics.progressbar import progress
from distributed.event import Event
from distributed.lock import Lock
from distributed.multi_lock import MultiLock
from distributed.nanny import Nanny
from distributed.pubsub import Pub, Sub
from distributed.queues import Queue
from distributed.scheduler import KilledWorker, Scheduler
from distributed.security import Security
from distributed.semaphore import Semaphore
from distributed.threadpoolexecutor import rejoin
from distributed.utils import CancelledError, TimeoutError, sync
from distributed.variable import Variable
from distributed.worker import (
Reschedule,
Worker,
get_client,
get_worker,
print,
secede,
warn,
)
from distributed.worker_client import local_client, worker_client
def __getattr__(name):
global __version__, __git_revision__
if name == "__version__":
from importlib.metadata import version
__version__ = version("distributed")
return __version__
if name == "__git_revision__":
from distributed._version import get_versions
__git_revision__ = get_versions()["full-revisionid"]
return __git_revision__
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
_python_shutting_down = False
@atexit.register
def _():
"""Set a global when Python shuts down.
Note
----
This function must be registered with atexit *after* any class that invokes
``dstributed.utils.is_python_shutting_down`` has been defined. This way it
will be called before the ``__del__`` method of those classes.
See Also
--------
distributed.utils.is_python_shutting_down
"""
global _python_shutting_down
_python_shutting_down = True
__all__ = [
"Actor",
"ActorFuture",
"Adaptive",
"BaseActorFuture",
"CancelledError",
"Client",
"CompatibleExecutor",
"CondaInstall",
"Environ",
"Event",
"Future",
"KilledWorker",
"LocalCluster",
"Lock",
"MultiLock",
"Nanny",
"NannyPlugin",
"PackageInstall",
"PipInstall",
"Pub",
"Queue",
"Reschedule",
"SSHCluster",
"Scheduler",
"SchedulerPlugin",
"Security",
"Semaphore",
"SpecCluster",
"Status",
"Sub",
"TimeoutError",
"UploadDirectory",
"UploadFile",
"Variable",
"Worker",
"WorkerPlugin",
"as_completed",
"config",
"connect",
"dask",
"default_client",
"fire_and_forget",
"futures_of",
"get_client",
"get_task_metadata",
"get_task_stream",
"get_versions",
"get_worker",
"local_client",
"performance_report",
"print",
"progress",
"rejoin",
"rpc",
"secede",
"sync",
"wait",
"warn",
"widgets",
"worker_client",
]
|