From 1acbde3a8750280781efe9b61ef0cb16824ef375 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Tue, 3 Feb 2026 12:24:39 +0530 Subject: [PATCH 1/5] WIP --- .../documentstore/postgres/FlatPostgresCollection.java | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java index 430dd36b..66d370b1 100644 --- a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java @@ -174,16 +174,6 @@ public boolean delete(Filter filter) { throw new UnsupportedOperationException(WRITE_NOT_SUPPORTED); } - @Override - public BulkDeleteResult delete(Set keys) { - throw new UnsupportedOperationException(WRITE_NOT_SUPPORTED); - } - - @Override - public boolean deleteAll() { - throw new UnsupportedOperationException(WRITE_NOT_SUPPORTED); - } - @Override public boolean deleteSubDoc(Key key, String subDocPath) { throw new UnsupportedOperationException(WRITE_NOT_SUPPORTED); From fc33186ae1a808e7529ddc3555821622ff62a7bd Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Tue, 3 Feb 2026 14:01:13 +0530 Subject: [PATCH 2/5] Implement deletes for flat collections --- .../FlatCollectionWriteTest.java | 257 ++++++++++++++++-- .../core/documentstore/Collection.java | 5 + .../postgres/FlatPostgresCollection.java | 105 ++++++- 3 files changed, 349 insertions(+), 18 deletions(-) diff --git a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java index 5e8a26d0..73fb0364 100644 --- a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java +++ b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java @@ -834,31 +834,256 @@ void testBulkUpsertAndReturnOlderDocuments() { class DeleteTests { @Test - @DisplayName("Should throw UnsupportedOperationException for delete by key") - void testDeleteByKey() { - Key keyToDelete = new SingleValueKey("default", "1"); - assertThrows(UnsupportedOperationException.class, () -> flatCollection.delete(keyToDelete)); + @DisplayName("Should delete document by single key") + void testDeleteByKey() throws Exception { + ObjectNode node = OBJECT_MAPPER.createObjectNode(); + node.put("id", "delete-key-test"); + node.put("item", "ToDeleteByKey"); + node.put("price", 50); + Key key = new SingleValueKey(DEFAULT_TENANT, "delete-key-test"); + flatCollection.create(key, new JSONDocument(node)); + + assertTrue(flatCollection.delete(key)); + queryAndAssert(key, rs -> assertFalse(rs.next())); } @Test - @DisplayName("Should throw UnsupportedOperationException for delete by filter") - void testDeleteByFilter() { - Filter filter = Filter.eq("item", "Soap"); - assertThrows(UnsupportedOperationException.class, () -> flatCollection.delete(filter)); + @DisplayName("Should delete documents by multiple keys") + void testDeleteByKeys() throws Exception { + Key key1 = new SingleValueKey(DEFAULT_TENANT, "delete-keys-1"); + Key key2 = new SingleValueKey(DEFAULT_TENANT, "delete-keys-2"); + Key key3 = new SingleValueKey(DEFAULT_TENANT, "delete-keys-3"); + + for (int i = 1; i <= 3; i++) { + ObjectNode node = OBJECT_MAPPER.createObjectNode(); + node.put("id", "delete-keys-" + i); + node.put("item", "Item" + i); + node.put("price", i * 10); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "delete-keys-" + i), new JSONDocument(node)); + } + + // Delete keys 1 and 2, keep 3 + BulkDeleteResult result = flatCollection.delete(Set.of(key1, key2)); + assertEquals(2, result.getDeletedCount()); + + queryAndAssert(key1, rs -> assertFalse(rs.next())); + queryAndAssert(key2, rs -> assertFalse(rs.next())); + queryAndAssert(key3, rs -> assertTrue(rs.next())); } @Test - @DisplayName("Should throw UnsupportedOperationException for delete by keys") - void testDeleteByKeys() { - Set keys = - Set.of(new SingleValueKey("default", "1"), new SingleValueKey("default", "2")); - assertThrows(UnsupportedOperationException.class, () -> flatCollection.delete(keys)); + @DisplayName("Should delete all documents") + void testDeleteAll() throws Exception { + for (int i = 1; i <= 2; i++) { + ObjectNode node = OBJECT_MAPPER.createObjectNode(); + node.put("id", "delete-all-" + i); + node.put("item", "AllItem" + i); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "delete-all-" + i), new JSONDocument(node)); + } + + assertTrue(flatCollection.deleteAll()); + + PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement( + String.format("SELECT COUNT(*) FROM \"%s\"", FLAT_COLLECTION_NAME)); + ResultSet rs = ps.executeQuery()) { + assertTrue(rs.next()); + assertEquals(0, rs.getInt(1)); + } } @Test - @DisplayName("Should throw UnsupportedOperationException for deleteAll") - void testDeleteAll() { - assertThrows(UnsupportedOperationException.class, () -> flatCollection.deleteAll()); + @DisplayName("Should delete with various filter types: EQ, GT, IN, legacy Filter") + void testDeleteWithFilters() throws Exception { + // Setup: Create documents for different filter scenarios + // Doc 1: For EQ filter test + ObjectNode node1 = OBJECT_MAPPER.createObjectNode(); + node1.put("id", "filter-eq"); + node1.put("item", "ToBeDeleted"); + node1.put("price", 100); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "filter-eq"), new JSONDocument(node1)); + + // Doc 2 & 3: For GT filter test + ObjectNode node2 = OBJECT_MAPPER.createObjectNode(); + node2.put("id", "filter-gt-expensive"); + node2.put("item", "Expensive"); + node2.put("price", 1000); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "filter-gt-expensive"), new JSONDocument(node2)); + + ObjectNode node3 = OBJECT_MAPPER.createObjectNode(); + node3.put("id", "filter-gt-cheap"); + node3.put("item", "Cheap"); + node3.put("price", 10); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "filter-gt-cheap"), new JSONDocument(node3)); + + // Doc 4, 5, 6: For IN filter test + for (String fruit : List.of("Apple", "Banana", "Cherry")) { + ObjectNode node = OBJECT_MAPPER.createObjectNode(); + node.put("id", "filter-in-" + fruit.toLowerCase()); + node.put("item", fruit); + node.put("price", 50); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "filter-in-" + fruit.toLowerCase()), + new JSONDocument(node)); + } + + // Doc 7: For legacy Filter test + ObjectNode node7 = OBJECT_MAPPER.createObjectNode(); + node7.put("id", "filter-legacy"); + node7.put("item", "LegacyItem"); + node7.put("price", 75); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "filter-legacy"), new JSONDocument(node7)); + + // Test 1: EQ filter + org.hypertrace.core.documentstore.query.Filter eqFilter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + RelationalExpression.of( + IdentifierExpression.of("item"), + RelationalOperator.EQ, + ConstantExpression.of("ToBeDeleted"))) + .build(); + assertTrue(flatCollection.delete(eqFilter)); + queryAndAssert(new SingleValueKey(DEFAULT_TENANT, "filter-eq"), rs -> assertFalse(rs.next())); + + // Test 2: GT filter (price > 500) + org.hypertrace.core.documentstore.query.Filter gtFilter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + RelationalExpression.of( + IdentifierExpression.of("price"), + RelationalOperator.GT, + ConstantExpression.of(500))) + .build(); + assertTrue(flatCollection.delete(gtFilter)); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-gt-expensive"), rs -> assertFalse(rs.next())); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-gt-cheap"), rs -> assertTrue(rs.next())); + + // Test 3: IN filter + org.hypertrace.core.documentstore.query.Filter inFilter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + RelationalExpression.of( + IdentifierExpression.of("item"), + RelationalOperator.IN, + ConstantExpression.ofStrings(List.of("Apple", "Banana")))) + .build(); + assertTrue(flatCollection.delete(inFilter)); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-in-apple"), rs -> assertFalse(rs.next())); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-in-banana"), rs -> assertFalse(rs.next())); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-in-cherry"), rs -> assertTrue(rs.next())); + + // Test 4: Legacy Filter API + Filter legacyFilter = Filter.eq("item", "LegacyItem"); + assertTrue(flatCollection.delete(legacyFilter)); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "filter-legacy"), rs -> assertFalse(rs.next())); + } + + @Test + @DisplayName("Should delete with composite AND filter and nested JSONB filter") + void testDeleteWithCompositeAndNestedFilters() throws Exception { + // Setup for AND filter + ObjectNode node1 = OBJECT_MAPPER.createObjectNode(); + node1.put("id", "and-match"); + node1.put("item", "TargetItem"); + node1.put("price", 100); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "and-match"), new JSONDocument(node1)); + + ObjectNode node2 = OBJECT_MAPPER.createObjectNode(); + node2.put("id", "and-nomatch"); + node2.put("item", "TargetItem"); + node2.put("price", 200); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "and-nomatch"), new JSONDocument(node2)); + + // Setup for JSONB nested filter + ObjectNode node3 = OBJECT_MAPPER.createObjectNode(); + node3.put("id", "jsonb-nike"); + node3.put("item", "Product1"); + ObjectNode props1 = OBJECT_MAPPER.createObjectNode(); + props1.put("brand", "Nike"); + node3.set("props", props1); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "jsonb-nike"), new JSONDocument(node3)); + + ObjectNode node4 = OBJECT_MAPPER.createObjectNode(); + node4.put("id", "jsonb-adidas"); + node4.put("item", "Product2"); + ObjectNode props2 = OBJECT_MAPPER.createObjectNode(); + props2.put("brand", "Adidas"); + node4.set("props", props2); + flatCollection.create( + new SingleValueKey(DEFAULT_TENANT, "jsonb-adidas"), new JSONDocument(node4)); + + // Test 1: AND filter (item = 'TargetItem' AND price = 100) + org.hypertrace.core.documentstore.query.Filter andFilter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + org.hypertrace.core.documentstore.expression.impl.LogicalExpression.and( + RelationalExpression.of( + IdentifierExpression.of("item"), + RelationalOperator.EQ, + ConstantExpression.of("TargetItem")), + RelationalExpression.of( + IdentifierExpression.of("price"), + RelationalOperator.EQ, + ConstantExpression.of(100)))) + .build(); + assertTrue(flatCollection.delete(andFilter)); + queryAndAssert(new SingleValueKey(DEFAULT_TENANT, "and-match"), rs -> assertFalse(rs.next())); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "and-nomatch"), rs -> assertTrue(rs.next())); + + // Test 2: Nested JSONB filter (props.brand = 'Nike') + org.hypertrace.core.documentstore.query.Filter jsonbFilter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + RelationalExpression.of( + org.hypertrace.core.documentstore.expression.impl.JsonIdentifierExpression.of( + "props", "brand"), + RelationalOperator.EQ, + ConstantExpression.of("Nike"))) + .build(); + assertTrue(flatCollection.delete(jsonbFilter)); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "jsonb-nike"), rs -> assertFalse(rs.next())); + queryAndAssert( + new SingleValueKey(DEFAULT_TENANT, "jsonb-adidas"), rs -> assertTrue(rs.next())); + } + + @Test + @DisplayName("Should handle edge cases: no match returns false, null filter throws exception") + void testDeleteEdgeCases() { + // Test 1: No match returns false + org.hypertrace.core.documentstore.query.Filter noMatchFilter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + RelationalExpression.of( + IdentifierExpression.of("item"), + RelationalOperator.EQ, + ConstantExpression.of("NonExistentItem12345"))) + .build(); + assertFalse(flatCollection.delete(noMatchFilter)); + + // Test 2: Null filter throws exception + assertThrows( + IllegalArgumentException.class, + () -> flatCollection.delete((org.hypertrace.core.documentstore.query.Filter) null)); } } diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/Collection.java b/document-store/src/main/java/org/hypertrace/core/documentstore/Collection.java index 4d0430fb..699fdc18 100644 --- a/document-store/src/main/java/org/hypertrace/core/documentstore/Collection.java +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/Collection.java @@ -159,8 +159,13 @@ CloseableIterator query( * @param filter The filter to determine documents to be deleted. Only the filter clause. * @return True if the documents are deleted, false otherwise. */ + @Deprecated boolean delete(Filter filter); + default boolean delete(org.hypertrace.core.documentstore.query.Filter filter) { + throw new IllegalArgumentException("Not implemented!"); + } + /** * Delete the documents for the given keys * diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java index 66d370b1..71af0c5f 100644 --- a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java @@ -166,12 +166,113 @@ public BulkUpdateResult bulkOperationOnArrayValue(BulkArrayValueUpdateRequest re @Override public boolean delete(Key key) { - throw new UnsupportedOperationException(WRITE_NOT_SUPPORTED); + String pkForTable = getPKForTable(tableIdentifier.getTableName()); + String deleteSQL = + String.format( + "DELETE FROM %s WHERE %s = ?", + tableIdentifier, PostgresUtils.wrapFieldNamesWithDoubleQuotes(pkForTable)); + try (PreparedStatement preparedStatement = client.getConnection().prepareStatement(deleteSQL)) { + preparedStatement.setString(1, key.toString()); + preparedStatement.executeUpdate(); + return true; + } catch (SQLException e) { + LOGGER.error("SQLException deleting document. key: {}", key, e); + } + return false; + } + + @Override + public boolean delete(org.hypertrace.core.documentstore.query.Filter filter) { + + Preconditions.checkArgument(filter != null, "Filter cannot be null"); + + Query query = Query.builder().setFilter(filter).build(); + + // Create parser with flat field transformer + PostgresQueryParser queryParser = + new PostgresQueryParser(tableIdentifier, query, new FlatPostgresFieldTransformer()); + + String filterClause = queryParser.buildFilterClause(); + + if (filterClause.isEmpty()) { + throw new IllegalArgumentException("Parsed filter is invalid"); + } + + String sql = "DELETE FROM " + tableIdentifier + " " + filterClause; + LOGGER.debug("Delete SQL: {}", sql); + + try (Connection conn = client.getPooledConnection(); + PreparedStatement ps = + queryExecutor.buildPreparedStatement( + sql, queryParser.getParamsBuilder().build(), conn)) { + int deletedCount = ps.executeUpdate(); + LOGGER.debug("Deleted {} rows", deletedCount); + return deletedCount > 0; + } catch (SQLException e) { + LOGGER.error("SQLException deleting documents. filter: {}", filter, e); + } + return false; } @Override + @Deprecated public boolean delete(Filter filter) { - throw new UnsupportedOperationException(WRITE_NOT_SUPPORTED); + if (filter == null) { + throw new IllegalArgumentException("Filter must be provided"); + } + + // Convert legacy Filter to query.Filter using FilterToQueryConverter + org.hypertrace.core.documentstore.expression.type.FilterTypeExpression expression = + org.hypertrace.core.documentstore.postgres.query.v1.FilterToQueryConverter.convert(filter); + + org.hypertrace.core.documentstore.query.Filter queryFilter = + org.hypertrace.core.documentstore.query.Filter.builder().expression(expression).build(); + + return delete(queryFilter); + } + + @Override + public BulkDeleteResult delete(Set keys) { + if (keys == null || keys.isEmpty()) { + return new BulkDeleteResult(0); + } + + String pkColumn = getPKForTable(tableIdentifier.getTableName()); + String quotedPkColumn = PostgresUtils.wrapFieldNamesWithDoubleQuotes(pkColumn); + + String ids = + keys.stream().map(key -> "'" + key.toString() + "'").collect(Collectors.joining(", ")); + + String deleteSQL = + String.format("DELETE FROM %s WHERE %s IN (%s)", tableIdentifier, quotedPkColumn, ids); + + LOGGER.debug("Bulk delete SQL: {}", deleteSQL); + + try (Connection conn = client.getPooledConnection(); + PreparedStatement ps = conn.prepareStatement(deleteSQL)) { + int deletedCount = ps.executeUpdate(); + LOGGER.debug("Bulk deleted {} rows", deletedCount); + return new BulkDeleteResult(deletedCount); + } catch (SQLException e) { + LOGGER.error("SQLException bulk deleting documents. keys: {}", keys, e); + } + return new BulkDeleteResult(0); + } + + @Override + public boolean deleteAll() { + String deleteSQL = String.format("DELETE FROM %s", tableIdentifier); + LOGGER.debug("Delete all SQL: {}", deleteSQL); + + try (Connection conn = client.getPooledConnection(); + PreparedStatement ps = conn.prepareStatement(deleteSQL)) { + int deletedCount = ps.executeUpdate(); + LOGGER.debug("Deleted all {} rows", deletedCount); + return true; + } catch (SQLException e) { + LOGGER.error("SQLException deleting all documents.", e); + } + return false; } @Override From 911e266df1fece4105f99a80e9bf9e74bef89e3d Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Tue, 3 Feb 2026 14:08:08 +0530 Subject: [PATCH 3/5] Fix compilation issue --- .../postgres/FlatPostgresCollection.java | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java index 71af0c5f..2550f59e 100644 --- a/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java +++ b/document-store/src/main/java/org/hypertrace/core/documentstore/postgres/FlatPostgresCollection.java @@ -217,18 +217,8 @@ public boolean delete(org.hypertrace.core.documentstore.query.Filter filter) { @Override @Deprecated public boolean delete(Filter filter) { - if (filter == null) { - throw new IllegalArgumentException("Filter must be provided"); - } - - // Convert legacy Filter to query.Filter using FilterToQueryConverter - org.hypertrace.core.documentstore.expression.type.FilterTypeExpression expression = - org.hypertrace.core.documentstore.postgres.query.v1.FilterToQueryConverter.convert(filter); - - org.hypertrace.core.documentstore.query.Filter queryFilter = - org.hypertrace.core.documentstore.query.Filter.builder().expression(expression).build(); - - return delete(queryFilter); + throw new UnsupportedOperationException( + "DELETE not supported for legacy Filter, use delete(org.hypertrace.core.documentstore.query.Filter filter) rather"); } @Override From f18546588d243533c61c24a98ae9db70931b67a4 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Tue, 3 Feb 2026 17:20:37 +0530 Subject: [PATCH 4/5] Fix failing test case --- .../documentstore/FlatCollectionWriteTest.java | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java index 73fb0364..aede748a 100644 --- a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java +++ b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java @@ -934,14 +934,6 @@ void testDeleteWithFilters() throws Exception { new JSONDocument(node)); } - // Doc 7: For legacy Filter test - ObjectNode node7 = OBJECT_MAPPER.createObjectNode(); - node7.put("id", "filter-legacy"); - node7.put("item", "LegacyItem"); - node7.put("price", 75); - flatCollection.create( - new SingleValueKey(DEFAULT_TENANT, "filter-legacy"), new JSONDocument(node7)); - // Test 1: EQ filter org.hypertrace.core.documentstore.query.Filter eqFilter = org.hypertrace.core.documentstore.query.Filter.builder() @@ -986,11 +978,9 @@ void testDeleteWithFilters() throws Exception { queryAndAssert( new SingleValueKey(DEFAULT_TENANT, "filter-in-cherry"), rs -> assertTrue(rs.next())); - // Test 4: Legacy Filter API - Filter legacyFilter = Filter.eq("item", "LegacyItem"); - assertTrue(flatCollection.delete(legacyFilter)); - queryAndAssert( - new SingleValueKey(DEFAULT_TENANT, "filter-legacy"), rs -> assertFalse(rs.next())); + // Test 4: Legacy Filter API throws UnsupportedOperationException + Filter legacyFilter = Filter.eq("item", "Cherry"); + assertThrows(UnsupportedOperationException.class, () -> flatCollection.delete(legacyFilter)); } @Test From 3dad7d2aff5c79672b80f8be673cb86486404649 Mon Sep 17 00:00:00 2001 From: Prashant Pandey Date: Tue, 3 Feb 2026 17:53:32 +0530 Subject: [PATCH 5/5] Add tests --- .../FlatCollectionWriteTest.java | 114 ++++++++++++++++++ 1 file changed, 114 insertions(+) diff --git a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java index aede748a..80a72d65 100644 --- a/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java +++ b/document-store/src/integrationTest/java/org/hypertrace/core/documentstore/FlatCollectionWriteTest.java @@ -1075,6 +1075,120 @@ void testDeleteEdgeCases() { IllegalArgumentException.class, () -> flatCollection.delete((org.hypertrace.core.documentstore.query.Filter) null)); } + + @Test + @DisplayName("delete(Filter) should return false when SQLException occurs (dropped table)") + void testDeleteByFilterReturnsFalseOnSQLException() throws Exception { + // Create a temporary table, get collection, then drop the table to trigger SQLException + String tempTable = "temp_delete_filter_test"; + PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; + + // Create temp table + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement( + String.format( + "CREATE TABLE \"%s\" (\"id\" TEXT PRIMARY KEY, \"item\" TEXT)", tempTable))) { + ps.execute(); + } + + // Get collection for the temp table + Collection tempCollection = + postgresDatastore.getCollectionForType(tempTable, DocumentType.FLAT); + + // Drop the table to cause SQLException on delete + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement(String.format("DROP TABLE \"%s\"", tempTable))) { + ps.execute(); + } + + org.hypertrace.core.documentstore.query.Filter filter = + org.hypertrace.core.documentstore.query.Filter.builder() + .expression( + RelationalExpression.of( + IdentifierExpression.of("item"), + RelationalOperator.EQ, + ConstantExpression.of("SomeValue"))) + .build(); + + // SQLException should be caught and method should return false + assertFalse(tempCollection.delete(filter)); + } + + @Test + @DisplayName("delete(Set) should return BulkDeleteResult(0) when SQLException occurs") + void testDeleteByKeysReturnsZeroOnSQLException() throws Exception { + // Create a temporary table, get collection, then drop the table to trigger SQLException + String tempTable = "temp_delete_keys_test"; + PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; + + // Create temp table + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement( + String.format( + "CREATE TABLE \"%s\" (\"id\" TEXT PRIMARY KEY, \"item\" TEXT)", tempTable))) { + ps.execute(); + } + + // Get collection for the temp table + Collection tempCollection = + postgresDatastore.getCollectionForType(tempTable, DocumentType.FLAT); + + // Insert a document to force schema caching (getPKForTable is called during create) + ObjectNode node = OBJECT_MAPPER.createObjectNode(); + node.put("id", "temp-key"); + node.put("item", "temp-item"); + tempCollection.create(new SingleValueKey(DEFAULT_TENANT, "temp-key"), new JSONDocument(node)); + + // Drop the table to cause SQLException on delete + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement(String.format("DROP TABLE \"%s\"", tempTable))) { + ps.execute(); + } + + Set keys = + Set.of( + new SingleValueKey(DEFAULT_TENANT, "key1"), + new SingleValueKey(DEFAULT_TENANT, "key2")); + + // SQLException should be caught and method should return BulkDeleteResult with 0 count + BulkDeleteResult result = tempCollection.delete(keys); + assertEquals(0, result.getDeletedCount()); + } + + @Test + @DisplayName("deleteAll() should return false when SQLException occurs (dropped table)") + void testDeleteAllReturnsFalseOnSQLException() throws Exception { + // Create a temporary table, get collection, then drop the table to trigger SQLException + String tempTable = "temp_delete_all_test"; + PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore; + + // Create temp table + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement( + String.format( + "CREATE TABLE \"%s\" (\"id\" TEXT PRIMARY KEY, \"item\" TEXT)", tempTable))) { + ps.execute(); + } + + // Get collection for the temp table + Collection tempCollection = + postgresDatastore.getCollectionForType(tempTable, DocumentType.FLAT); + + // Drop the table to cause SQLException on delete + try (Connection conn = pgDatastore.getPostgresClient(); + PreparedStatement ps = + conn.prepareStatement(String.format("DROP TABLE \"%s\"", tempTable))) { + ps.execute(); + } + + // SQLException should be caught and method should return false + assertFalse(tempCollection.deleteAll()); + } } @Nested