Skip to content

Commit d763940

Browse files
authored
Memory budget config parameter fix for max_partition_weight heuristic for incomplete queries (#208)
1 parent 858177a commit d763940

File tree

5 files changed

+6
-6
lines changed

5 files changed

+6
-6
lines changed

examples/readers/pytorch_data_api_tiledb_dense.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -454,7 +454,7 @@
454454
" img = np.clip(img,0,1)\n",
455455
" return img\n",
456456
"\n",
457-
"ctx = tiledb.Ctx({'sm.memory_budget': 1024**2})\n",
457+
"ctx = tiledb.Ctx({'sm.mem.total_budget': 1024**2})\n",
458458
"with tiledb.open(training_images, ctx=ctx) as x, tiledb.open(training_labels, ctx=ctx) as y:\n",
459459
" # Because of this issue (https://github.com/pytorch/pytorch/issues/59451#issuecomment-854883855) we avoid using multiple workers on Jupyter.\n",
460460
" train_loader = PyTorchTileDBDataLoader(\n",

examples/readers/tensorflow_data_api_tiledb_dense.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -336,7 +336,7 @@
336336
"\n",
337337
"model = create_model()\n",
338338
"\n",
339-
"ctx = tiledb.Ctx({'sm.memory_budget': 1024**2})\n",
339+
"ctx = tiledb.Ctx({'sm.mem.total_budget': 1024**2})\n",
340340
"with tiledb.open(training_images, ctx=ctx) as x, tiledb.open(training_labels, ctx=ctx) as y:\n",
341341
" tiledb_dataset = TensorflowTileDBDataset(\n",
342342
" ArrayParams(array=x, fields=['features']),\n",
@@ -406,4 +406,4 @@
406406
},
407407
"nbformat": 4,
408408
"nbformat_minor": 4
409-
}
409+
}

tests/readers/test_tensor_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ def parametrize_fields(*fields, num=3):
117117
def test_max_partition_weight_dense(
118118
dense_uri, fields, key_dim, memory_budget, dim_selectors
119119
):
120-
config = {"py.max_incomplete_retries": 0, "sm.memory_budget": memory_budget}
120+
config = {"py.max_incomplete_retries": 0, "sm.mem.total_budget": memory_budget}
121121
with tiledb.open(dense_uri, config=config) as array:
122122
_test_max_partition_weight(array, fields, key_dim, dim_selectors)
123123

tiledb/ml/readers/_tensor_schema/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ def max_partition_weight(self) -> int:
104104
105105
What constitutes weight of a partition depends on the array type:
106106
- For dense arrays, it is the number of unique keys (= number of "rows").
107-
It depends on the `sm.memory_budget` config parameter.
107+
It depends on the `sm.mem.total_budget` config parameter.
108108
- For sparse arrays, it is the number of non-empty cells.
109109
It depends on the `py.init_buffer_bytes` config parameter.
110110
"""

tiledb/ml/readers/_tensor_schema/dense.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ def iter_tensors(
6666

6767
@property
6868
def max_partition_weight(self) -> int:
69-
memory_budget = int(self._array._ctx_().config()["sm.memory_budget"])
69+
memory_budget = int(self._array._ctx_().config()["sm.mem.total_budget"])
7070

7171
# The memory budget should be large enough to read the cells of the largest field
7272
bytes_per_cell = max(dtype.itemsize for dtype in self.field_dtypes)

0 commit comments

Comments
 (0)