iface) {
+ return null;
+ }
+
+ @Override
+ public boolean isWrapperFor(Class> iface) {
+ return false;
+ }
+
+ // --- Helper Methods ---
+
+ /**
+ * Determines the effective catalog and schema pattern to use for metadata retrieval.
+ *
+ * This method applies the logic for the {@code FilterTablesOnDefaultDataset} connection
+ * property. If this property is enabled and the provided {@code catalog} or {@code schemaPattern}
+ * are null, empty, or wildcard ('%'), they may be overridden by the default catalog (project) and
+ * default dataset (schema) configured in the {@link BigQueryConnection}.
+ *
+ * @param catalog The catalog name provided by the user; may be {@code null}.
+ * @param schemaPattern The schema name pattern provided by the user; may be {@code null}.
+ * @return A {@link Tuple} where {@code Tuple.x()} is the effective catalog string and {@code
+ * Tuple.y()} is the effective schema pattern string. These are the values that should be used
+ * for querying BigQuery's metadata.
+ * @see BigQueryConnection#isFilterTablesOnDefaultDataset()
+ */
+ private Tuple determineEffectiveCatalogAndSchema(
+ String catalog, String schemaPattern) {
+ String effectiveCatalog = catalog;
+ String effectiveSchemaPattern = schemaPattern;
+
+ if (this.connection.isFilterTablesOnDefaultDataset()
+ && this.connection.getDefaultDataset() != null
+ && this.connection.getDefaultDataset().getDataset() != null
+ && !this.connection.getDefaultDataset().getDataset().isEmpty()) {
+
+ String defaultProjectFromConnection = this.connection.getCatalog();
+ // We only use the dataset part of the DefaultDataset for schema filtering
+ String defaultSchemaFromConnection = this.connection.getDefaultDataset().getDataset();
+
+ boolean catalogIsNullOrEmptyOrWildcard =
+ (catalog == null || catalog.isEmpty() || catalog.equals("%"));
+ boolean schemaPatternIsNullOrEmptyOrWildcard =
+ (schemaPattern == null || schemaPattern.isEmpty() || schemaPattern.equals("%"));
+
+ final String logPrefix = "FilterTablesOnDefaultDatasetTrue: ";
+ if (catalogIsNullOrEmptyOrWildcard && schemaPatternIsNullOrEmptyOrWildcard) {
+ effectiveCatalog = defaultProjectFromConnection;
+ effectiveSchemaPattern = defaultSchemaFromConnection;
+ LOG.info(
+ String.format(
+ logPrefix + "Using default catalog '%s' and default dataset '%s'.",
+ effectiveCatalog,
+ effectiveSchemaPattern));
+ } else if (catalogIsNullOrEmptyOrWildcard) {
+ effectiveCatalog = defaultProjectFromConnection;
+ LOG.info(
+ String.format(
+ logPrefix
+ + "Using default catalog '%s' with user dataset '%s'. Default dataset '%s' ignored.",
+ effectiveCatalog,
+ effectiveSchemaPattern,
+ defaultSchemaFromConnection));
+ } else if (schemaPatternIsNullOrEmptyOrWildcard) {
+ effectiveSchemaPattern = defaultSchemaFromConnection;
+ LOG.info(
+ String.format(
+ logPrefix + "Using user catalog '%s' and default dataset '%s'.",
+ effectiveCatalog,
+ effectiveSchemaPattern));
+ } else {
+ LOG.info(
+ String.format(
+ logPrefix
+ + "Using user catalog '%s' and schema '%s'. Default dataset '%s' ignored.",
+ effectiveCatalog,
+ effectiveSchemaPattern,
+ defaultSchemaFromConnection));
+ }
+ }
+ return Tuple.of(effectiveCatalog, effectiveSchemaPattern);
+ }
+
+ private ColumnTypeInfo getColumnTypeInfoForSqlType(StandardSQLTypeName bqType) {
+ if (bqType == null) {
+ LOG.warning("Null BigQuery type encountered: " + bqType.name() + ". Mapping to VARCHAR.");
+ return new ColumnTypeInfo(Types.VARCHAR, bqType.name(), null, null, null);
+ }
+
+ switch (bqType) {
+ case INT64:
+ return new ColumnTypeInfo(Types.BIGINT, "BIGINT", 19, 0, 10);
+ case BOOL:
+ return new ColumnTypeInfo(Types.BOOLEAN, "BOOLEAN", 1, null, null);
+ case FLOAT64:
+ return new ColumnTypeInfo(Types.DOUBLE, "DOUBLE", 15, null, 10);
+ case NUMERIC:
+ return new ColumnTypeInfo(Types.NUMERIC, "NUMERIC", 38, 9, 10);
+ case BIGNUMERIC:
+ return new ColumnTypeInfo(Types.NUMERIC, "NUMERIC", 77, 38, 10);
+ case STRING:
+ return new ColumnTypeInfo(Types.NVARCHAR, "NVARCHAR", null, null, null);
+ case TIMESTAMP:
+ case DATETIME:
+ return new ColumnTypeInfo(Types.TIMESTAMP, "TIMESTAMP", 29, null, null);
+ case DATE:
+ return new ColumnTypeInfo(Types.DATE, "DATE", 10, null, null);
+ case TIME:
+ return new ColumnTypeInfo(Types.TIME, "TIME", 15, null, null);
+ case GEOGRAPHY:
+ case JSON:
+ case INTERVAL:
+ return new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null);
+ case BYTES:
+ return new ColumnTypeInfo(Types.VARBINARY, "VARBINARY", null, null, null);
+ case STRUCT:
+ return new ColumnTypeInfo(Types.STRUCT, "STRUCT", null, null, null);
+ default:
+ LOG.warning(
+ "Unknown BigQuery type encountered: " + bqType.name() + ". Mapping to VARCHAR.");
+ return new ColumnTypeInfo(Types.VARCHAR, bqType.name(), null, null, null);
+ }
+ }
+
+ List findMatchingBigQueryObjects(
+ String objectTypeName,
+ Supplier> listAllOperation,
+ Function getSpecificOperation,
+ Function nameExtractor,
+ String pattern,
+ Pattern regex,
+ BigQueryJdbcCustomLogger logger) {
+
+ boolean needsList = needsListing(pattern);
+ List resultList = new ArrayList<>();
+
+ try {
+ Iterable objects;
+ if (needsList) {
+ logger.info(
+ String.format(
+ "Listing all %ss (pattern: %s)...",
+ objectTypeName, pattern == null ? "" : pattern));
+ Page firstPage = listAllOperation.get();
+ objects = firstPage.iterateAll();
+ logger.fine(
+ String.format(
+ "Retrieved initial %s list, iterating & filtering if needed...", objectTypeName));
+
+ } else {
+ logger.info(String.format("Getting specific %s: '%s'", objectTypeName, pattern));
+ T specificObject = getSpecificOperation.apply(pattern);
+ objects =
+ (specificObject == null)
+ ? Collections.