1+ """Sample via dask."""
2+
13import numpy as np
24from dask .distributed import Client
35
@@ -13,7 +15,7 @@ class DaskDistributedSampler(EPSMixin, Sampler):
1315
1416 Parameters
1517 ----------
16- dask_client: dask.Client, optional
18+ dask_client:
1719 The configured dask Client.
1820 If none is provided, then a local dask distributed cluster is created.
1921 client_max_jobs:
@@ -22,14 +24,14 @@ class DaskDistributedSampler(EPSMixin, Sampler):
2224 the distributed infrastructure, the infrastructure will not be utilized
2325 fully.
2426 default_pickle:
25- Specify if the sampler uses pythons default pickle function to
27+ Specify if the sampler uses python's default pickle function to
2628 communicate the submit function to python; if this is the case, a
2729 cloud-pickle based workaround is used to pickle the simulate and
2830 evaluate functions. This allows utilization of locally defined
2931 functions, which can not be pickled using default pickle, at the cost
3032 of an additional pickling overhead. For dask, this workaround should
3133 not be necessary and it should be save to use default_pickle=false.
32- batch_size: int, optional
34+ batch_size:
3335 Number of parameter samples that are evaluated in one remote execution
3436 call. Batchsubmission can be used to reduce the communication overhead
3537 for fast (ms-s) model evaluations. Large batch sizes can result in un-
@@ -39,41 +41,35 @@ class DaskDistributedSampler(EPSMixin, Sampler):
3941
4042 def __init__ (
4143 self ,
42- dask_client = None ,
43- client_max_jobs = np .inf ,
44- default_pickle = False ,
45- batch_size = 1 ,
44+ dask_client : Client = None ,
45+ client_max_jobs : int = np .inf ,
46+ default_pickle : bool = False ,
47+ batch_size : int = 1 ,
4648 ):
47- super ().__init__ ()
48-
4949 # Assign Client
5050 if dask_client is None :
5151 dask_client = Client ()
52- self .my_client = dask_client
53-
54- # Client options
55- self .client_max_jobs = client_max_jobs
56-
57- # Job state
58- self .jobs_queued = 0
59-
60- # For dask, we use cloudpickle by default
61- self .default_pickle = default_pickle
6252
63- # Batchsize
64- self .batch_size = batch_size
53+ EPSMixin .__init__ (
54+ self ,
55+ client = dask_client ,
56+ client_max_jobs = client_max_jobs ,
57+ default_pickle = default_pickle ,
58+ batch_size = batch_size ,
59+ )
60+ Sampler .__init__ (self )
6561
6662 def __getstate__ (self ):
6763 d = dict (self .__dict__ )
68- del d ['my_client ' ]
64+ del d ['client ' ]
6965 return d
7066
71- def client_cores (self ):
72- return sum (self .my_client .ncores ().values ())
67+ def client_cores (self ) -> int :
68+ return sum (self .client .ncores ().values ())
7369
7470 def shutdown (self ):
7571 """Shutdown the dask client.
7672 If it was started without arguments, the
7773 local cluster that was started at the same time is also closed.
7874 """
79- self .my_client .close ()
75+ self .client .close ()
0 commit comments