Skip to content

Commit b000b16

Browse files
committed
REVERTME queue by default
1 parent d5048c1 commit b000b16

File tree

3 files changed

+11
-8
lines changed

3 files changed

+11
-8
lines changed

.github/workflows/tests.yaml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ jobs:
2424
matrix:
2525
os: [ubuntu-latest, windows-latest, macos-latest]
2626
python-version: ["3.8", "3.9", "3.10"]
27-
queuing: [no_queue]
27+
queuing: [queue]
2828
# Cherry-pick test modules to split the overall runtime roughly in half
2929
partition: [ci1, not ci1]
3030
exclude:
@@ -33,11 +33,11 @@ jobs:
3333
include:
3434
- os: ubuntu-latest
3535
python-version: 3.9
36-
queuing: queue
36+
queuing: no_queue
3737
partition: "ci1"
3838
- os: ubuntu-latest
3939
python-version: 3.9
40-
queuing: queue
40+
queuing: no_queue
4141
partition: "not ci1"
4242

4343
# Uncomment to stress-test the test suite for random failures.
@@ -144,8 +144,8 @@ jobs:
144144

145145
- name: Set up dask env for job queuing
146146
shell: bash -l {0}
147-
if: ${{ matrix.queuing == 'queue' }}
148-
run: echo "DASK_DISTRIBUTED__SCHEDULER__WORKER_SATURATION=1.0" >> $GITHUB_ENV
147+
if: ${{ matrix.queuing == 'no_queue' }}
148+
run: echo "DASK_DISTRIBUTED__SCHEDULER__WORKER_SATURATION=inf" >> $GITHUB_ENV
149149

150150
- name: Print host info
151151
shell: bash -l {0}

distributed/distributed.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ distributed:
2222
events-log-length: 100000
2323
work-stealing: True # workers should steal tasks from each other
2424
work-stealing-interval: 100ms # Callback time for work stealing
25-
worker-saturation: .inf # Send this fraction of nthreads root tasks to workers
25+
worker-saturation: 1.1 # Send this fraction of nthreads root tasks to workers
2626
worker-ttl: "5 minutes" # like '60s'. Time to live for workers. They must heartbeat faster than this
2727
pickle: True # Is the scheduler allowed to deserialize arbitrary bytestrings
2828
preload: [] # Run custom modules with Scheduler
@@ -152,7 +152,7 @@ distributed:
152152
# Fractions of worker process memory at which we take action to avoid memory
153153
# blowup. Set any of the values to False to turn off the behavior entirely.
154154
# All fractions are relative to each worker's memory_limit.
155-
transfer: 0.10 # fractional size of incoming data transfers where we start
155+
transfer: 0.10 # fractional size of incoming data transfers where we start
156156
# throttling incoming data transfers
157157
target: 0.60 # fraction of managed memory where we start spilling to disk
158158
spill: 0.70 # fraction of process memory where we start spilling to disk

distributed/tests/test_scheduler.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -891,8 +891,11 @@ def f(x, y=2):
891891
assert set(d) == {"function", "args"}
892892

893893

894+
@pytest.mark.parametrize("worker_saturation", [1.0, float("inf")])
894895
@gen_cluster()
895-
async def test_ready_remove_worker(s, a, b):
896+
async def test_ready_remove_worker(s, a, b, worker_saturation):
897+
s.WORKER_SATURATION = worker_saturation
898+
896899
s.update_graph(
897900
tasks={"x-%d" % i: dumps_task((inc, i)) for i in range(20)},
898901
keys=["x-%d" % i for i in range(20)],

0 commit comments

Comments
 (0)