Skip to content

Commit

Permalink
Fix array.query() incorrectly handling nullables (#1998)
Browse files Browse the repository at this point in the history
  • Loading branch information
kounelisagis committed Jul 1, 2024
1 parent 5f43369 commit 19bd09e
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 6 deletions.
12 changes: 6 additions & 6 deletions tiledb/libtiledb.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -2170,14 +2170,14 @@ cdef class DenseArrayImpl(Array):
if attr.isnullable:
data = np.array([values[idx] for idx in result[attr.name].data])
result[attr.name] = np.ma.array(
data, mask=~result[attr.name].mask)
data, mask=result[attr.name].mask)
else:
result[attr.name] = np.array(
[values[idx] for idx in result[attr.name]])
else:
if attr.isnullable:
result[attr.name] = np.ma.array(result[attr.name].data,
mask=~result[attr.name].mask)
mask=result[attr.name].mask)

return result

Expand Down Expand Up @@ -2429,7 +2429,7 @@ cdef class DenseArrayImpl(Array):
out[name] = arr

if self.schema.has_attr(name) and self.attr(name).isnullable:
out[name] = np.ma.array(out[name], mask=results[name][2].astype(bool))
out[name] = np.ma.array(out[name], mask=~results[name][2].astype(bool))

return out

Expand Down Expand Up @@ -3251,14 +3251,14 @@ cdef class SparseArrayImpl(Array):
if attr.isnullable:
data = np.array([values[idx] for idx in result[attr.name].data])
result[attr.name] = np.ma.array(
data, mask=~result[attr.name].mask)
data, mask=result[attr.name].mask)
else:
result[attr.name] = np.array(
[values[idx] for idx in result[attr.name]])
else:
if attr.isnullable:
result[attr.name] = np.ma.array(result[attr.name].data,
mask=~result[attr.name].mask)
mask=result[attr.name].mask)

return result

Expand Down Expand Up @@ -3559,7 +3559,7 @@ cdef class SparseArrayImpl(Array):
out[final_name] = arr

if self.schema.has_attr(final_name) and self.attr(final_name).isnullable:
out[final_name] = np.ma.array(out[final_name], mask=results[name][2])
out[final_name] = np.ma.array(out[final_name], mask=~results[name][2].astype(bool))

return out

Expand Down
1 change: 1 addition & 0 deletions tiledb/tests/test_enumeration.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,7 @@ def test_array_schema_enumeration_nullable(self, sparse, pass_df):
expected_validity = [False, False, True, False, False]
assert_array_equal(A[:]["a"].mask, expected_validity)
assert_array_equal(A.df[:]["a"].isna(), expected_validity)
assert_array_equal(A.query(attrs=["a"])[:]["a"].mask, expected_validity)

@pytest.mark.parametrize(
"dtype, values",
Expand Down
4 changes: 4 additions & 0 deletions tiledb/tests/test_libtiledb.py
Original file line number Diff line number Diff line change
Expand Up @@ -429,10 +429,12 @@ def test_array_write_nullable(self, sparse, pass_df):
expected_validity1 = [False, False, True, False, False]
assert_array_equal(A[:]["a1"].mask, expected_validity1)
assert_array_equal(A.df[:]["a1"].isna(), expected_validity1)
assert_array_equal(A.query(attrs=["a1"])[:]["a1"].mask, expected_validity1)

expected_validity2 = [False, False, True, True, False]
assert_array_equal(A[:]["a2"].mask, expected_validity2)
assert_array_equal(A.df[:]["a2"].isna(), expected_validity2)
assert_array_equal(A.query(attrs=["a2"])[:]["a2"].mask, expected_validity2)

with tiledb.open(uri, "w") as A:
dims = pa.array([1, 2, 3, 4, 5])
Expand All @@ -452,10 +454,12 @@ def test_array_write_nullable(self, sparse, pass_df):
expected_validity1 = [True, True, True, True, True]
assert_array_equal(A[:]["a1"].mask, expected_validity1)
assert_array_equal(A.df[:]["a1"].isna(), expected_validity1)
assert_array_equal(A.query(attrs=["a1"])[:]["a1"].mask, expected_validity1)

expected_validity2 = [True, True, True, True, True]
assert_array_equal(A[:]["a2"].mask, expected_validity2)
assert_array_equal(A.df[:]["a2"].isna(), expected_validity2)
assert_array_equal(A.query(attrs=["a2"])[:]["a2"].mask, expected_validity2)


class DenseArrayTest(DiskTestCase):
Expand Down

0 comments on commit 19bd09e

Please sign in to comment.