diff --git a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java
index 8929087467..4fb248dcba 100644
--- a/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java
+++ b/src/main/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaData.java
@@ -892,21 +892,6 @@ public boolean supportsSharding() throws SQLException {
return false;
}
- /**
- * Builds the result set for stored procedures metadata.
- *
- *
The result set structure is defined based on the JDBC driver specifications to ensure
- * consistency. The following columns are included in the result set:
- *
- *
- * PROCEDURE_CAT: The catalog of the procedure (String)
- * PROCEDURE_SCHEM: The schema of the procedure (String)
- * PROCEDURE_NAME: The name of the procedure (String)
- * REMARKS: A description or remarks about the procedure (String)
- * PROCEDURE_TYPE: The type of procedure (e.g., FUNCTION, PROCEDURE) (String)
- * SPECIFIC_NAME: The specific name for the procedure (String)
- *
- */
@Override
public ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern)
throws SQLException {
@@ -916,44 +901,14 @@ public ResultSet getProcedures(String catalog, String schemaPattern, String proc
schemaPattern,
procedureNamePattern);
throwExceptionIfConnectionIsClosed();
- return new DatabricksResultSet(
- new StatementStatus().setState(StatementState.SUCCEEDED),
- new StatementId("getprocedures-metadata"),
- Arrays.asList(
- "PROCEDURE_CAT",
- "PROCEDURE_SCHEM",
- "PROCEDURE_NAME",
- "NUM_INPUT_PARAMS",
- "NUM_OUTPUT_PARAMS",
- "NUM_RESULT_SETS",
- "REMARKS",
- "PROCEDURE_TYPE",
- "SPECIFIC_NAME"),
- Arrays.asList(
- "VARCHAR",
- "VARCHAR",
- "VARCHAR",
- "INTEGER",
- "INTEGER",
- "INTEGER",
- "VARCHAR",
- "SMALLINT",
- "VARCHAR"),
- new int[] {
- Types.VARCHAR,
- Types.VARCHAR,
- Types.VARCHAR,
- Types.INTEGER,
- Types.INTEGER,
- Types.INTEGER,
- Types.VARCHAR,
- Types.SMALLINT,
- Types.VARCHAR
- },
- new int[] {128, 128, 128, 10, 10, 10, 254, 5, 128},
- new int[] {1, 1, 0, 1, 1, 1, 1, 1, 0},
- new Object[0][0],
- StatementType.METADATA);
+ try {
+ return session
+ .getDatabricksMetadataClient()
+ .listProcedures(session, catalog, schemaPattern, procedureNamePattern);
+ } catch (Exception e) {
+ LOGGER.error(e, "Unable to fetch procedures, returning empty result set");
+ return metadataResultSetBuilder.getProceduresResult(new ArrayList<>());
+ }
}
@Override
@@ -967,12 +922,15 @@ public ResultSet getProcedureColumns(
procedureNamePattern,
columnNamePattern);
throwExceptionIfConnectionIsClosed();
-
- return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns(
- PROCEDURE_COLUMNS_COLUMNS,
- new ArrayList<>(),
- METADATA_STATEMENT_ID,
- CommandName.GET_PROCEDURES_COLUMNS);
+ try {
+ return session
+ .getDatabricksMetadataClient()
+ .listProcedureColumns(
+ session, catalog, schemaPattern, procedureNamePattern, columnNamePattern);
+ } catch (Exception e) {
+ LOGGER.error(e, "Unable to fetch procedure columns, returning empty result set");
+ return metadataResultSetBuilder.getProcedureColumnsResult(new ArrayList<>());
+ }
}
@Override
diff --git a/src/main/java/com/databricks/jdbc/common/CommandName.java b/src/main/java/com/databricks/jdbc/common/CommandName.java
index 3e4b3a85ea..6a9527ebca 100644
--- a/src/main/java/com/databricks/jdbc/common/CommandName.java
+++ b/src/main/java/com/databricks/jdbc/common/CommandName.java
@@ -22,7 +22,8 @@ public enum CommandName {
GET_TABLE_PRIVILEGES,
GET_VERSION_COLUMNS,
GET_SUPER_TYPES,
- GET_PROCEDURES_COLUMNS,
+ LIST_PROCEDURES,
+ LIST_PROCEDURE_COLUMNS,
GET_INDEX_INFO,
GET_SUPER_TABLES,
GET_FUNCTION_COLUMNS,
diff --git a/src/main/java/com/databricks/jdbc/common/MetadataOperationType.java b/src/main/java/com/databricks/jdbc/common/MetadataOperationType.java
index 861deba5cd..fdadfe5ee3 100644
--- a/src/main/java/com/databricks/jdbc/common/MetadataOperationType.java
+++ b/src/main/java/com/databricks/jdbc/common/MetadataOperationType.java
@@ -11,7 +11,9 @@ public enum MetadataOperationType {
GET_COLUMNS("GetColumns"),
GET_FUNCTIONS("GetFunctions"),
GET_PRIMARY_KEYS("GetPrimaryKeys"),
- GET_CROSS_REFERENCE("GetCrossReference");
+ GET_CROSS_REFERENCE("GetCrossReference"),
+ GET_PROCEDURES("GetProcedures"),
+ GET_PROCEDURE_COLUMNS("GetProcedureColumns");
private final String headerValue;
diff --git a/src/main/java/com/databricks/jdbc/common/MetadataResultConstants.java b/src/main/java/com/databricks/jdbc/common/MetadataResultConstants.java
index 6adc4d9b3f..f53ad7d1de 100644
--- a/src/main/java/com/databricks/jdbc/common/MetadataResultConstants.java
+++ b/src/main/java/com/databricks/jdbc/common/MetadataResultConstants.java
@@ -195,6 +195,14 @@ public class MetadataResultConstants {
private static final ResultColumn RADIX = new ResultColumn("RADIX", "radix", Types.SMALLINT);
private static final ResultColumn NULLABLE_SHORT =
new ResultColumn("NULLABLE", "nullable", Types.SMALLINT);
+ private static final ResultColumn NUM_INPUT_PARAMS =
+ new ResultColumn("NUM_INPUT_PARAMS", "numInputParams", Types.INTEGER);
+ private static final ResultColumn NUM_OUTPUT_PARAMS =
+ new ResultColumn("NUM_OUTPUT_PARAMS", "numOutputParams", Types.INTEGER);
+ private static final ResultColumn NUM_RESULT_SETS =
+ new ResultColumn("NUM_RESULT_SETS", "numResultSets", Types.INTEGER);
+ private static final ResultColumn PROCEDURE_TYPE =
+ new ResultColumn("PROCEDURE_TYPE", "procedureType", Types.SMALLINT);
private static final ResultColumn NON_UNIQUE =
new ResultColumn("NON_UNIQUE", "nonUnique", Types.BOOLEAN);
private static final ResultColumn INDEX_QUALIFIER =
@@ -225,6 +233,18 @@ public class MetadataResultConstants {
FUNCTION_TYPE_COLUMN,
SPECIFIC_NAME_COLUMN);
+ public static final List PROCEDURES_COLUMNS =
+ List.of(
+ PROCEDURE_CAT,
+ PROCEDURE_SCHEM,
+ PROCEDURE_NAME,
+ NUM_INPUT_PARAMS,
+ NUM_OUTPUT_PARAMS,
+ NUM_RESULT_SETS,
+ REMARKS_COLUMN,
+ PROCEDURE_TYPE,
+ SPECIFIC_NAME_COLUMN);
+
public static List COLUMN_COLUMNS =
List.of(
CATALOG_COLUMN,
@@ -618,8 +638,9 @@ public class MetadataResultConstants {
CommandName.GET_VERSION_COLUMNS,
List.of(SCOPE, COL_NAME_COLUMN, DATA_TYPE_COLUMN, TYPE_NAME_COLUMN, PSEUDO_COLUMN));
put(CommandName.GET_SUPER_TYPES, List.of(TYPE_NAME_COLUMN, SUPERTYPE_NAME));
+ put(CommandName.LIST_PROCEDURES, List.of(PROCEDURE_NAME, SPECIFIC_NAME_COLUMN));
put(
- CommandName.GET_PROCEDURES_COLUMNS,
+ CommandName.LIST_PROCEDURE_COLUMNS,
List.of(
PROCEDURE_NAME,
COLUMN_NAME_COLUMN,
diff --git a/src/main/java/com/databricks/jdbc/dbclient/IDatabricksMetadataClient.java b/src/main/java/com/databricks/jdbc/dbclient/IDatabricksMetadataClient.java
index ed01dd44de..d8929f9db8 100644
--- a/src/main/java/com/databricks/jdbc/dbclient/IDatabricksMetadataClient.java
+++ b/src/main/java/com/databricks/jdbc/dbclient/IDatabricksMetadataClient.java
@@ -132,6 +132,42 @@ DatabricksResultSet listImportedKeys(
DatabricksResultSet listExportedKeys(
IDatabricksSession session, String catalog, String schema, String table) throws SQLException;
+ /**
+ * Returns the list of stored procedures
+ *
+ * @param session underlying session
+ * @param catalog catalogName; null means use system catalog
+ * @param schemaNamePattern schema name pattern (can be a LIKE pattern)
+ * @param procedureNamePattern procedure name pattern (can be a LIKE pattern)
+ * @return a DatabricksResultSet representing list of procedures
+ */
+ @DatabricksMetricsTimed
+ DatabricksResultSet listProcedures(
+ IDatabricksSession session,
+ String catalog,
+ String schemaNamePattern,
+ String procedureNamePattern)
+ throws SQLException;
+
+ /**
+ * Returns the list of stored procedure columns/parameters
+ *
+ * @param session underlying session
+ * @param catalog catalogName; null means use system catalog
+ * @param schemaNamePattern schema name pattern (can be a LIKE pattern)
+ * @param procedureNamePattern procedure name pattern (can be a LIKE pattern)
+ * @param columnNamePattern column/parameter name pattern (can be a LIKE pattern)
+ * @return a DatabricksResultSet representing list of procedure columns
+ */
+ @DatabricksMetricsTimed
+ DatabricksResultSet listProcedureColumns(
+ IDatabricksSession session,
+ String catalog,
+ String schemaNamePattern,
+ String procedureNamePattern,
+ String columnNamePattern)
+ throws SQLException;
+
/**
* Returns the list of cross references between a parent table and a foreign table
*
diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/common/CommandConstants.java b/src/main/java/com/databricks/jdbc/dbclient/impl/common/CommandConstants.java
index be80667c46..f51e39cf7f 100644
--- a/src/main/java/com/databricks/jdbc/dbclient/impl/common/CommandConstants.java
+++ b/src/main/java/com/databricks/jdbc/dbclient/impl/common/CommandConstants.java
@@ -6,6 +6,8 @@ public class CommandConstants {
public static final String GET_CATALOGS_STATEMENT_ID = "getcatalogs-metadata";
public static final String GET_TABLE_TYPE_STATEMENT_ID = "gettabletype-metadata";
public static final String GET_FUNCTIONS_STATEMENT_ID = "getfunctions-metadata";
+ public static final String GET_PROCEDURES_STATEMENT_ID = "getprocedures-metadata";
+ public static final String GET_PROCEDURE_COLUMNS_STATEMENT_ID = "getprocedurecolumns-metadata";
public static final String SHOW_CATALOGS_SQL = "SHOW CATALOGS";
public static final String SHOW_TABLE_TYPES_SQL = "SHOW TABLE_TYPES";
public static final String IN_CATALOG_SQL = " IN CATALOG `%s`";
@@ -26,4 +28,75 @@ public class CommandConstants {
"SHOW KEYS" + IN_CATALOG_SQL + IN_ABSOLUTE_SCHEMA_SQL + IN_ABSOLUTE_TABLE_SQL;
public static final String SHOW_FOREIGN_KEYS_SQL =
"SHOW FOREIGN KEYS" + IN_CATALOG_SQL + IN_ABSOLUTE_SCHEMA_SQL + IN_ABSOLUTE_TABLE_SQL;
+
+ private static final String INFORMATION_SCHEMA_ROUTINES = "information_schema.routines";
+ private static final String INFORMATION_SCHEMA_PARAMETERS = "information_schema.parameters";
+ private static final String PROCEDURE_TYPE_FILTER = "routine_type = 'PROCEDURE'";
+
+ private static final String ROUTINES_SELECT_COLUMNS =
+ "routine_catalog, routine_schema, routine_name, comment, specific_name";
+
+ private static final String PARAMETERS_SELECT_COLUMNS =
+ "p.specific_catalog, p.specific_schema, p.specific_name,"
+ + " p.parameter_name, p.parameter_mode, p.is_result,"
+ + " p.data_type, p.full_data_type,"
+ + " p.numeric_precision, p.numeric_precision_radix, p.numeric_scale,"
+ + " p.character_maximum_length, p.character_octet_length,"
+ + " p.ordinal_position, p.parameter_default, p.comment";
+
+ public static String buildProceduresSQL(
+ String catalog, String schemaPattern, String procedureNamePattern) {
+ String catalogPrefix = getCatalogPrefix(catalog);
+ String routinesTable = catalogPrefix + "." + INFORMATION_SCHEMA_ROUTINES;
+
+ StringBuilder sql = new StringBuilder();
+ sql.append("SELECT ").append(ROUTINES_SELECT_COLUMNS);
+ sql.append(" FROM ").append(routinesTable);
+ sql.append(" WHERE ").append(PROCEDURE_TYPE_FILTER);
+ if (schemaPattern != null) {
+ sql.append(" AND routine_schema LIKE '").append(escapeSql(schemaPattern)).append("'");
+ }
+ if (procedureNamePattern != null) {
+ sql.append(" AND routine_name LIKE '").append(escapeSql(procedureNamePattern)).append("'");
+ }
+ sql.append(" ORDER BY routine_catalog, routine_schema, routine_name");
+ return sql.toString();
+ }
+
+ public static String buildProcedureColumnsSQL(
+ String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern) {
+ String catalogPrefix = getCatalogPrefix(catalog);
+ String parametersTable = catalogPrefix + "." + INFORMATION_SCHEMA_PARAMETERS + " p";
+ String routinesTable = catalogPrefix + "." + INFORMATION_SCHEMA_ROUTINES + " r";
+
+ StringBuilder sql = new StringBuilder();
+ sql.append("SELECT ").append(PARAMETERS_SELECT_COLUMNS);
+ sql.append(" FROM ").append(parametersTable);
+ sql.append(" JOIN ").append(routinesTable);
+ sql.append(" ON p.specific_catalog = r.specific_catalog");
+ sql.append(" AND p.specific_schema = r.specific_schema");
+ sql.append(" AND p.specific_name = r.specific_name");
+ sql.append(" WHERE r.").append(PROCEDURE_TYPE_FILTER);
+ if (schemaPattern != null) {
+ sql.append(" AND p.specific_schema LIKE '").append(escapeSql(schemaPattern)).append("'");
+ }
+ if (procedureNamePattern != null) {
+ sql.append(" AND p.specific_name LIKE '").append(escapeSql(procedureNamePattern)).append("'");
+ }
+ if (columnNamePattern != null) {
+ sql.append(" AND p.parameter_name LIKE '").append(escapeSql(columnNamePattern)).append("'");
+ }
+ sql.append(
+ " ORDER BY p.specific_catalog, p.specific_schema, p.specific_name, p.ordinal_position");
+ return sql.toString();
+ }
+
+ /** Escapes single quotes in SQL string literals to prevent SQL injection. */
+ private static String escapeSql(String value) {
+ return value.replace("'", "''");
+ }
+
+ private static String getCatalogPrefix(String catalog) {
+ return (catalog == null) ? "system" : "`" + catalog + "`";
+ }
}
diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/common/MetadataResultSetBuilder.java b/src/main/java/com/databricks/jdbc/dbclient/impl/common/MetadataResultSetBuilder.java
index f54e1375a4..68f202afc9 100644
--- a/src/main/java/com/databricks/jdbc/dbclient/impl/common/MetadataResultSetBuilder.java
+++ b/src/main/java/com/databricks/jdbc/dbclient/impl/common/MetadataResultSetBuilder.java
@@ -510,6 +510,44 @@ public DatabricksResultSet getFunctionsResult(DatabricksResultSet resultSet, Str
CommandName.LIST_FUNCTIONS);
}
+ public DatabricksResultSet getProceduresResult(DatabricksResultSet resultSet)
+ throws SQLException {
+ List> rows = getRowsForProcedures(resultSet);
+ return buildResultSet(
+ PROCEDURES_COLUMNS,
+ rows,
+ GET_PROCEDURES_STATEMENT_ID,
+ resultSet.getMetaData(),
+ CommandName.LIST_PROCEDURES);
+ }
+
+ public DatabricksResultSet getProceduresResult(List> rows) {
+ return buildResultSet(
+ PROCEDURES_COLUMNS,
+ rows != null ? rows : new ArrayList<>(),
+ GET_PROCEDURES_STATEMENT_ID,
+ CommandName.LIST_PROCEDURES);
+ }
+
+ public DatabricksResultSet getProcedureColumnsResult(DatabricksResultSet resultSet)
+ throws SQLException {
+ List> rows = getRowsForProcedureColumns(resultSet);
+ return buildResultSet(
+ PROCEDURE_COLUMNS_COLUMNS,
+ rows,
+ GET_PROCEDURE_COLUMNS_STATEMENT_ID,
+ resultSet.getMetaData(),
+ CommandName.LIST_PROCEDURE_COLUMNS);
+ }
+
+ public DatabricksResultSet getProcedureColumnsResult(List> rows) {
+ return buildResultSet(
+ PROCEDURE_COLUMNS_COLUMNS,
+ rows != null ? rows : new ArrayList<>(),
+ GET_PROCEDURE_COLUMNS_STATEMENT_ID,
+ CommandName.LIST_PROCEDURE_COLUMNS);
+ }
+
public DatabricksResultSet getColumnsResult(DatabricksResultSet resultSet) throws SQLException {
List> rows = getRows(resultSet, COLUMN_COLUMNS, defaultAdapter);
return buildResultSet(
@@ -1129,6 +1167,118 @@ private List> getRowsForFunctions(
return rows;
}
+ private List> getRowsForProcedures(DatabricksResultSet resultSet)
+ throws SQLException {
+ List> rows = new ArrayList<>();
+ while (resultSet.next()) {
+ List row = new ArrayList<>();
+ row.add(getStringOrNull(resultSet, "routine_catalog")); // PROCEDURE_CAT
+ row.add(getStringOrNull(resultSet, "routine_schema")); // PROCEDURE_SCHEM
+ row.add(getStringOrNull(resultSet, "routine_name")); // PROCEDURE_NAME
+ row.add(null); // NUM_INPUT_PARAMS (reserved)
+ row.add(null); // NUM_OUTPUT_PARAMS (reserved)
+ row.add(null); // NUM_RESULT_SETS (reserved)
+ row.add(getStringOrNull(resultSet, "comment")); // REMARKS
+ row.add((short) procedureNoResult); // PROCEDURE_TYPE
+ row.add(getStringOrNull(resultSet, "specific_name")); // SPECIFIC_NAME
+ rows.add(row);
+ }
+ return rows;
+ }
+
+ private List> getRowsForProcedureColumns(DatabricksResultSet resultSet)
+ throws SQLException {
+ List> rows = new ArrayList<>();
+ while (resultSet.next()) {
+ String dataType = getStringOrNull(resultSet, "data_type");
+ String parameterMode = getStringOrNull(resultSet, "parameter_mode");
+ String isResult = getStringOrNull(resultSet, "is_result");
+
+ List row = new ArrayList<>();
+ row.add(getStringOrNull(resultSet, "specific_catalog")); // PROCEDURE_CAT
+ row.add(getStringOrNull(resultSet, "specific_schema")); // PROCEDURE_SCHEM
+ row.add(getStringOrNull(resultSet, "specific_name")); // PROCEDURE_NAME
+ row.add(getStringOrNull(resultSet, "parameter_name")); // COLUMN_NAME
+ row.add(mapParameterModeToColumnType(parameterMode, isResult)); // COLUMN_TYPE
+ row.add(
+ dataType != null
+ ? getCode(stripBaseTypeName(dataType.toUpperCase()))
+ : null); // DATA_TYPE
+ row.add(dataType != null ? dataType.toUpperCase() : null); // TYPE_NAME
+ Integer numericPrecision = getIntOrNull(resultSet, "numeric_precision");
+ Integer charMaxLength = getIntOrNull(resultSet, "character_maximum_length");
+ Integer charOctetLength = getIntOrNull(resultSet, "character_octet_length");
+ row.add(numericPrecision != null ? numericPrecision : charMaxLength); // PRECISION
+ row.add(charOctetLength != null ? charOctetLength : numericPrecision); // LENGTH
+ row.add(getShortOrNull(resultSet, "numeric_scale")); // SCALE
+ row.add(getShortOrNull(resultSet, "numeric_precision_radix")); // RADIX
+ row.add((short) procedureNullableUnknown); // NULLABLE
+ row.add(getStringOrNull(resultSet, "comment")); // REMARKS
+ row.add(getStringOrNull(resultSet, "parameter_default")); // COLUMN_DEF
+ row.add(null); // SQL_DATA_TYPE (reserved)
+ row.add(null); // SQL_DATETIME_SUB (reserved)
+ row.add(getIntOrNull(resultSet, "character_octet_length")); // CHAR_OCTET_LENGTH
+ row.add(getIntOrNull(resultSet, "ordinal_position")); // ORDINAL_POSITION
+ row.add(""); // IS_NULLABLE (unknown)
+ row.add(getStringOrNull(resultSet, "specific_name")); // SPECIFIC_NAME
+ rows.add(row);
+ }
+ return rows;
+ }
+
+ private static short mapParameterModeToColumnType(String parameterMode, String isResult) {
+ if ("YES".equalsIgnoreCase(isResult)) {
+ return (short) procedureColumnReturn;
+ }
+ if (parameterMode == null) {
+ return (short) procedureColumnUnknown;
+ }
+ switch (parameterMode.toUpperCase()) {
+ case "IN":
+ return (short) procedureColumnIn;
+ case "INOUT":
+ return (short) procedureColumnInOut;
+ case "OUT":
+ return (short) procedureColumnOut;
+ default:
+ return (short) procedureColumnUnknown;
+ }
+ }
+
+ private static String getStringOrNull(DatabricksResultSet resultSet, String columnName)
+ throws SQLException {
+ try {
+ Object val = resultSet.getObject(columnName);
+ return val != null ? val.toString() : null;
+ } catch (SQLException e) {
+ return null;
+ }
+ }
+
+ private static Integer getIntOrNull(DatabricksResultSet resultSet, String columnName)
+ throws SQLException {
+ try {
+ Object val = resultSet.getObject(columnName);
+ if (val == null) return null;
+ if (val instanceof Number) return ((Number) val).intValue();
+ return Integer.parseInt(val.toString());
+ } catch (SQLException | NumberFormatException e) {
+ return null;
+ }
+ }
+
+ private static Short getShortOrNull(DatabricksResultSet resultSet, String columnName)
+ throws SQLException {
+ try {
+ Object val = resultSet.getObject(columnName);
+ if (val == null) return null;
+ if (val instanceof Number) return ((Number) val).shortValue();
+ return Short.parseShort(val.toString());
+ } catch (SQLException | NumberFormatException e) {
+ return null;
+ }
+ }
+
private List> getRowsForSchemas(
DatabricksResultSet resultSet,
List columns,
diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksEmptyMetadataClient.java b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksEmptyMetadataClient.java
index 048142dff8..8993838bce 100644
--- a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksEmptyMetadataClient.java
+++ b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksEmptyMetadataClient.java
@@ -81,6 +81,29 @@ public DatabricksResultSet listFunctions(
return metadataResultSetBuilder.getFunctionsResult("", null);
}
+ @Override
+ public DatabricksResultSet listProcedures(
+ IDatabricksSession session,
+ String catalog,
+ String schemaNamePattern,
+ String procedureNamePattern)
+ throws SQLException {
+ LOGGER.warn("Empty metadata implementation for listProcedures.");
+ return metadataResultSetBuilder.getProceduresResult(new ArrayList<>());
+ }
+
+ @Override
+ public DatabricksResultSet listProcedureColumns(
+ IDatabricksSession session,
+ String catalog,
+ String schemaNamePattern,
+ String procedureNamePattern,
+ String columnNamePattern)
+ throws SQLException {
+ LOGGER.warn("Empty metadata implementation for listProcedureColumns.");
+ return metadataResultSetBuilder.getProcedureColumnsResult(new ArrayList<>());
+ }
+
@Override
public DatabricksResultSet listPrimaryKeys(
IDatabricksSession session, String catalog, String schema, String table) throws SQLException {
diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java
index 68cffee45b..caa031374a 100644
--- a/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java
+++ b/src/main/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClient.java
@@ -4,6 +4,7 @@
import static com.databricks.jdbc.dbclient.impl.common.CommandConstants.METADATA_STATEMENT_ID;
import com.databricks.jdbc.api.impl.DatabricksResultSet;
+import com.databricks.jdbc.api.impl.ImmutableSqlParameter;
import com.databricks.jdbc.api.internal.IDatabricksSession;
import com.databricks.jdbc.common.MetadataOperationType;
import com.databricks.jdbc.common.StatementType;
@@ -11,6 +12,7 @@
import com.databricks.jdbc.common.util.WildcardUtil;
import com.databricks.jdbc.dbclient.IDatabricksClient;
import com.databricks.jdbc.dbclient.IDatabricksMetadataClient;
+import com.databricks.jdbc.dbclient.impl.common.CommandConstants;
import com.databricks.jdbc.dbclient.impl.common.MetadataResultSetBuilder;
import com.databricks.jdbc.log.JdbcLogger;
import com.databricks.jdbc.log.JdbcLoggerFactory;
@@ -20,6 +22,7 @@
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@@ -239,6 +242,51 @@ public DatabricksResultSet listFunctions(
getResultSet(SQL, session, MetadataOperationType.GET_FUNCTIONS), catalog);
}
+ @Override
+ public DatabricksResultSet listProcedures(
+ IDatabricksSession session,
+ String catalog,
+ String schemaNamePattern,
+ String procedureNamePattern)
+ throws SQLException {
+ String currentCatalog = isMultipleCatalogSupportDisabled() ? session.getCurrentCatalog() : null;
+ if (!metadataResultSetBuilder.shouldAllowCatalogAccess(catalog, currentCatalog, session)) {
+ return metadataResultSetBuilder.getProceduresResult(new ArrayList<>());
+ }
+
+ catalog = autoFillCatalog(catalog, currentCatalog);
+ Map params = new HashMap<>();
+ String SQL =
+ CommandConstants.buildProceduresSQL(
+ catalog, schemaNamePattern, procedureNamePattern, params);
+ LOGGER.debug("SQL command to fetch procedures: {}", SQL);
+ return metadataResultSetBuilder.getProceduresResult(
+ getResultSet(SQL, params, session, MetadataOperationType.GET_PROCEDURES));
+ }
+
+ @Override
+ public DatabricksResultSet listProcedureColumns(
+ IDatabricksSession session,
+ String catalog,
+ String schemaNamePattern,
+ String procedureNamePattern,
+ String columnNamePattern)
+ throws SQLException {
+ String currentCatalog = isMultipleCatalogSupportDisabled() ? session.getCurrentCatalog() : null;
+ if (!metadataResultSetBuilder.shouldAllowCatalogAccess(catalog, currentCatalog, session)) {
+ return metadataResultSetBuilder.getProcedureColumnsResult(new ArrayList<>());
+ }
+
+ catalog = autoFillCatalog(catalog, currentCatalog);
+ Map params = new HashMap<>();
+ String SQL =
+ CommandConstants.buildProcedureColumnsSQL(
+ catalog, schemaNamePattern, procedureNamePattern, columnNamePattern, params);
+ LOGGER.debug("SQL command to fetch procedure columns: {}", SQL);
+ return metadataResultSetBuilder.getProcedureColumnsResult(
+ getResultSet(SQL, params, session, MetadataOperationType.GET_PROCEDURE_COLUMNS));
+ }
+
@Override
public DatabricksResultSet listPrimaryKeys(
IDatabricksSession session, String catalog, String schema, String table) throws SQLException {
@@ -407,10 +455,19 @@ private String autoFillCatalog(String catalog, String currentCatalog) {
private DatabricksResultSet getResultSet(
String SQL, IDatabricksSession session, MetadataOperationType metadataOperationType)
throws SQLException {
+ return getResultSet(SQL, new HashMap<>(), session, metadataOperationType);
+ }
+
+ private DatabricksResultSet getResultSet(
+ String SQL,
+ Map params,
+ IDatabricksSession session,
+ MetadataOperationType metadataOperationType)
+ throws SQLException {
return queryExecutionClient.executeStatement(
SQL,
session.getComputeResource(),
- new HashMap<>(),
+ params,
StatementType.METADATA,
session,
null /* parentStatement */,
diff --git a/src/main/java/com/databricks/jdbc/dbclient/impl/thrift/DatabricksThriftServiceClient.java b/src/main/java/com/databricks/jdbc/dbclient/impl/thrift/DatabricksThriftServiceClient.java
index ff2e349056..a919ac7825 100644
--- a/src/main/java/com/databricks/jdbc/dbclient/impl/thrift/DatabricksThriftServiceClient.java
+++ b/src/main/java/com/databricks/jdbc/dbclient/impl/thrift/DatabricksThriftServiceClient.java
@@ -21,6 +21,7 @@
import com.databricks.jdbc.common.util.WildcardUtil;
import com.databricks.jdbc.dbclient.IDatabricksClient;
import com.databricks.jdbc.dbclient.IDatabricksMetadataClient;
+import com.databricks.jdbc.dbclient.impl.common.CommandConstants;
import com.databricks.jdbc.dbclient.impl.common.MetadataResultSetBuilder;
import com.databricks.jdbc.dbclient.impl.common.StatementId;
import com.databricks.jdbc.dbclient.impl.sqlexec.CommandBuilder;
@@ -602,6 +603,76 @@ public DatabricksResultSet listFunctions(
catalog, extractRowsFromColumnar(response.getResults()));
}
+ @Override
+ public DatabricksResultSet listProcedures(
+ IDatabricksSession session,
+ String catalog,
+ String schemaNamePattern,
+ String procedureNamePattern)
+ throws SQLException {
+ LOGGER.debug(
+ "Fetching procedures using SQL via Thrift client. Session {}, catalog {}, schemaPattern {}, procedureNamePattern {}.",
+ session.toString(),
+ catalog,
+ schemaNamePattern,
+ procedureNamePattern);
+ DatabricksThreadContextHolder.setSessionId(session.getSessionId());
+
+ if (!metadataResultSetBuilder.shouldAllowCatalogAccess(catalog, null, session)) {
+ return metadataResultSetBuilder.getProceduresResult(new ArrayList<>());
+ }
+
+ Map params = new HashMap<>();
+ String sql =
+ CommandConstants.buildProceduresSQL(
+ catalog, schemaNamePattern, procedureNamePattern, params);
+ return metadataResultSetBuilder.getProceduresResult(
+ executeStatement(
+ sql,
+ session.getComputeResource(),
+ params,
+ StatementType.METADATA,
+ session,
+ null,
+ MetadataOperationType.GET_PROCEDURES));
+ }
+
+ @Override
+ public DatabricksResultSet listProcedureColumns(
+ IDatabricksSession session,
+ String catalog,
+ String schemaNamePattern,
+ String procedureNamePattern,
+ String columnNamePattern)
+ throws SQLException {
+ LOGGER.debug(
+ "Fetching procedure columns using SQL via Thrift client. Session {}, catalog {}, schemaPattern {}, procedureNamePattern {}, columnNamePattern {}.",
+ session.toString(),
+ catalog,
+ schemaNamePattern,
+ procedureNamePattern,
+ columnNamePattern);
+ DatabricksThreadContextHolder.setSessionId(session.getSessionId());
+
+ if (!metadataResultSetBuilder.shouldAllowCatalogAccess(catalog, null, session)) {
+ return metadataResultSetBuilder.getProcedureColumnsResult(new ArrayList<>());
+ }
+
+ Map params = new HashMap<>();
+ String sql =
+ CommandConstants.buildProcedureColumnsSQL(
+ catalog, schemaNamePattern, procedureNamePattern, columnNamePattern, params);
+ return metadataResultSetBuilder.getProcedureColumnsResult(
+ executeStatement(
+ sql,
+ session.getComputeResource(),
+ params,
+ StatementType.METADATA,
+ session,
+ null,
+ MetadataOperationType.GET_PROCEDURE_COLUMNS));
+ }
+
@Override
public DatabricksResultSet listPrimaryKeys(
IDatabricksSession session, String catalog, String schema, String table) throws SQLException {
diff --git a/src/main/java/org/apache/arrow/vector/util/DecimalUtility.java b/src/main/java/org/apache/arrow/vector/util/DecimalUtility.java
index ae76865c6a..d59d033029 100644
--- a/src/main/java/org/apache/arrow/vector/util/DecimalUtility.java
+++ b/src/main/java/org/apache/arrow/vector/util/DecimalUtility.java
@@ -33,215 +33,215 @@
/** Utility methods for configurable precision Decimal values (e.g. {@link BigDecimal}). */
public class DecimalUtility {
- private DecimalUtility() {}
-
- public static final byte[] zeroes =
- new byte[] {
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
- };
- public static final byte[] minus_one =
- new byte[] {
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
- -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1
- };
- private static final boolean LITTLE_ENDIAN = ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN;
-
- /**
- * Read an ArrowType.Decimal at the given value index in the ArrowBuf and convert to a BigDecimal
- * with the given scale.
- */
- public static BigDecimal getBigDecimalFromArrowBuf(
- ArrowBuf bytebuf, int index, int scale, int byteWidth) {
- byte[] value = new byte[byteWidth];
- byte temp;
- final long startIndex = (long) index * byteWidth;
-
- bytebuf.getBytes(startIndex, value, 0, byteWidth);
- if (LITTLE_ENDIAN) {
- // Decimal stored as native endian, need to swap bytes to make BigDecimal if native endian is
- // LE
- int stop = byteWidth / 2;
- for (int i = 0, j; i < stop; i++) {
- temp = value[i];
- j = (byteWidth - 1) - i;
- value[i] = value[j];
- value[j] = temp;
- }
- }
- BigInteger unscaledValue = new BigInteger(value);
- return new BigDecimal(unscaledValue, scale);
+ private DecimalUtility() {}
+
+ public static final byte[] zeroes =
+ new byte[] {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ public static final byte[] minus_one =
+ new byte[] {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1
+ };
+ private static final boolean LITTLE_ENDIAN = ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN;
+
+ /**
+ * Read an ArrowType.Decimal at the given value index in the ArrowBuf and convert to a BigDecimal
+ * with the given scale.
+ */
+ public static BigDecimal getBigDecimalFromArrowBuf(
+ ArrowBuf bytebuf, int index, int scale, int byteWidth) {
+ byte[] value = new byte[byteWidth];
+ byte temp;
+ final long startIndex = (long) index * byteWidth;
+
+ bytebuf.getBytes(startIndex, value, 0, byteWidth);
+ if (LITTLE_ENDIAN) {
+ // Decimal stored as native endian, need to swap bytes to make BigDecimal if native endian is
+ // LE
+ int stop = byteWidth / 2;
+ for (int i = 0, j; i < stop; i++) {
+ temp = value[i];
+ j = (byteWidth - 1) - i;
+ value[i] = value[j];
+ value[j] = temp;
+ }
}
-
- /**
- * Read an ArrowType.Decimal from the ByteBuffer and convert to a BigDecimal with the given scale.
- */
- public static BigDecimal getBigDecimalFromByteBuffer(
- ByteBuffer bytebuf, int scale, int byteWidth) {
- byte[] value = new byte[byteWidth];
- bytebuf.get(value);
- BigInteger unscaledValue = new BigInteger(value);
- return new BigDecimal(unscaledValue, scale);
+ BigInteger unscaledValue = new BigInteger(value);
+ return new BigDecimal(unscaledValue, scale);
+ }
+
+ /**
+ * Read an ArrowType.Decimal from the ByteBuffer and convert to a BigDecimal with the given scale.
+ */
+ public static BigDecimal getBigDecimalFromByteBuffer(
+ ByteBuffer bytebuf, int scale, int byteWidth) {
+ byte[] value = new byte[byteWidth];
+ bytebuf.get(value);
+ BigInteger unscaledValue = new BigInteger(value);
+ return new BigDecimal(unscaledValue, scale);
+ }
+
+ /**
+ * Read an ArrowType.Decimal from the ArrowBuf at the given value index and return it as a byte
+ * array.
+ */
+ public static byte[] getByteArrayFromArrowBuf(ArrowBuf bytebuf, int index, int byteWidth) {
+ final byte[] value = new byte[byteWidth];
+ final long startIndex = (long) index * byteWidth;
+ bytebuf.getBytes(startIndex, value, 0, byteWidth);
+ return value;
+ }
+
+ /**
+ * Check that the BigDecimal scale equals the vectorScale and that the BigDecimal precision is
+ * less than or equal to the vectorPrecision. If not, then an UnsupportedOperationException is
+ * thrown, otherwise returns true.
+ */
+ public static boolean checkPrecisionAndScale(
+ BigDecimal value, int vectorPrecision, int vectorScale) {
+ if (value.scale() != vectorScale) {
+ throw new UnsupportedOperationException(
+ "BigDecimal scale must equal that in the Arrow vector: "
+ + value.scale()
+ + " != "
+ + vectorScale);
}
-
- /**
- * Read an ArrowType.Decimal from the ArrowBuf at the given value index and return it as a byte
- * array.
- */
- public static byte[] getByteArrayFromArrowBuf(ArrowBuf bytebuf, int index, int byteWidth) {
- final byte[] value = new byte[byteWidth];
- final long startIndex = (long) index * byteWidth;
- bytebuf.getBytes(startIndex, value, 0, byteWidth);
- return value;
+ if (value.precision() > vectorPrecision) {
+ throw new UnsupportedOperationException(
+ "BigDecimal precision cannot be greater than that in the Arrow "
+ + "vector: "
+ + value.precision()
+ + " > "
+ + vectorPrecision);
}
-
- /**
- * Check that the BigDecimal scale equals the vectorScale and that the BigDecimal precision is
- * less than or equal to the vectorPrecision. If not, then an UnsupportedOperationException is
- * thrown, otherwise returns true.
- */
- public static boolean checkPrecisionAndScale(
- BigDecimal value, int vectorPrecision, int vectorScale) {
- if (value.scale() != vectorScale) {
- throw new UnsupportedOperationException(
- "BigDecimal scale must equal that in the Arrow vector: "
- + value.scale()
- + " != "
- + vectorScale);
- }
- if (value.precision() > vectorPrecision) {
- throw new UnsupportedOperationException(
- "BigDecimal precision cannot be greater than that in the Arrow "
- + "vector: "
- + value.precision()
- + " > "
- + vectorPrecision);
- }
- return true;
+ return true;
+ }
+
+ /**
+ * Check that the BigDecimal scale equals the vectorScale and that the BigDecimal precision is
+ * less than or equal to the vectorPrecision. Return true if so, otherwise return false.
+ */
+ public static boolean checkPrecisionAndScaleNoThrow(
+ BigDecimal value, int vectorPrecision, int vectorScale) {
+ return value.scale() == vectorScale && value.precision() < vectorPrecision;
+ }
+
+ /**
+ * Check that the decimal scale equals the vectorScale and that the decimal precision is less than
+ * or equal to the vectorPrecision. If not, then an UnsupportedOperationException is thrown,
+ * otherwise returns true.
+ */
+ public static boolean checkPrecisionAndScale(
+ int decimalPrecision, int decimalScale, int vectorPrecision, int vectorScale) {
+ if (decimalScale != vectorScale) {
+ throw new UnsupportedOperationException(
+ "BigDecimal scale must equal that in the Arrow vector: "
+ + decimalScale
+ + " != "
+ + vectorScale);
}
-
- /**
- * Check that the BigDecimal scale equals the vectorScale and that the BigDecimal precision is
- * less than or equal to the vectorPrecision. Return true if so, otherwise return false.
- */
- public static boolean checkPrecisionAndScaleNoThrow(
- BigDecimal value, int vectorPrecision, int vectorScale) {
- return value.scale() == vectorScale && value.precision() < vectorPrecision;
+ if (decimalPrecision > vectorPrecision) {
+ throw new UnsupportedOperationException(
+ "BigDecimal precision cannot be greater than that in the Arrow "
+ + "vector: "
+ + decimalPrecision
+ + " > "
+ + vectorPrecision);
}
-
- /**
- * Check that the decimal scale equals the vectorScale and that the decimal precision is less than
- * or equal to the vectorPrecision. If not, then an UnsupportedOperationException is thrown,
- * otherwise returns true.
- */
- public static boolean checkPrecisionAndScale(
- int decimalPrecision, int decimalScale, int vectorPrecision, int vectorScale) {
- if (decimalScale != vectorScale) {
- throw new UnsupportedOperationException(
- "BigDecimal scale must equal that in the Arrow vector: "
- + decimalScale
- + " != "
- + vectorScale);
+ return true;
+ }
+
+ /**
+ * Write the given BigDecimal to the ArrowBuf at the given value index. Will throw an
+ * UnsupportedOperationException if the decimal size is greater than the Decimal vector byte
+ * width.
+ */
+ public static void writeBigDecimalToArrowBuf(
+ BigDecimal value, ArrowBuf bytebuf, int index, int byteWidth) {
+ final byte[] bytes = value.unscaledValue().toByteArray();
+ writeByteArrayToArrowBufHelper(bytes, bytebuf, index, byteWidth);
+ }
+
+ /**
+ * Write the given long to the ArrowBuf at the given value index. This routine extends the
+ * original sign bit to a new upper area in 128-bit or 256-bit.
+ */
+ public static void writeLongToArrowBuf(long value, ArrowBuf bytebuf, int index, int byteWidth) {
+ if (byteWidth != 16 && byteWidth != 32) {
+ throw new UnsupportedOperationException(
+ "DecimalUtility.writeLongToArrowBuf() currently supports "
+ + "128-bit or 256-bit width data");
+ }
+ final long padValue = Long.signum(value) == -1 ? -1L : 0L;
+
+ // ---- Databricks patch start ----
+ if (bytebuf instanceof DatabricksArrowBuf) {
+ DatabricksArrowBuf buf = (DatabricksArrowBuf) bytebuf;
+ final int startIdx = index * byteWidth;
+ if (LITTLE_ENDIAN) {
+ buf.setLong(startIdx, value);
+ for (int i = 1; i <= (byteWidth - 8) / 8; i++) {
+ buf.setLong(startIdx + Long.BYTES * i, padValue);
}
- if (decimalPrecision > vectorPrecision) {
- throw new UnsupportedOperationException(
- "BigDecimal precision cannot be greater than that in the Arrow "
- + "vector: "
- + decimalPrecision
- + " > "
- + vectorPrecision);
+ } else {
+ for (int i = 0; i < (byteWidth - 8) / 8; i++) {
+ MemoryUtil.putLong(startIdx + Long.BYTES * i, padValue);
}
- return true;
- }
-
- /**
- * Write the given BigDecimal to the ArrowBuf at the given value index. Will throw an
- * UnsupportedOperationException if the decimal size is greater than the Decimal vector byte
- * width.
- */
- public static void writeBigDecimalToArrowBuf(
- BigDecimal value, ArrowBuf bytebuf, int index, int byteWidth) {
- final byte[] bytes = value.unscaledValue().toByteArray();
- writeByteArrayToArrowBufHelper(bytes, bytebuf, index, byteWidth);
- }
-
- /**
- * Write the given long to the ArrowBuf at the given value index. This routine extends the
- * original sign bit to a new upper area in 128-bit or 256-bit.
- */
- public static void writeLongToArrowBuf(long value, ArrowBuf bytebuf, int index, int byteWidth) {
- if (byteWidth != 16 && byteWidth != 32) {
- throw new UnsupportedOperationException(
- "DecimalUtility.writeLongToArrowBuf() currently supports "
- + "128-bit or 256-bit width data");
+ buf.setLong(startIdx + Long.BYTES * (byteWidth - 8) / 8, value);
+ }
+ } else {
+ final long addressOfValue = bytebuf.memoryAddress() + (long) index * byteWidth;
+ if (LITTLE_ENDIAN) {
+ MemoryUtil.putLong(addressOfValue, value);
+ for (int i = 1; i <= (byteWidth - 8) / 8; i++) {
+ MemoryUtil.putLong(addressOfValue + Long.BYTES * i, padValue);
}
- final long padValue = Long.signum(value) == -1 ? -1L : 0L;
-
- // ---- Databricks patch start ----
- if (bytebuf instanceof DatabricksArrowBuf) {
- DatabricksArrowBuf buf = (DatabricksArrowBuf) bytebuf;
- final int startIdx = index * byteWidth;
- if (LITTLE_ENDIAN) {
- buf.setLong(startIdx, value);
- for (int i = 1; i <= (byteWidth - 8) / 8; i++) {
- buf.setLong(startIdx + Long.BYTES * i, padValue);
- }
- } else {
- for (int i = 0; i < (byteWidth - 8) / 8; i++) {
- MemoryUtil.putLong(startIdx + Long.BYTES * i, padValue);
- }
- buf.setLong(startIdx + Long.BYTES * (byteWidth - 8) / 8, value);
- }
- } else {
- final long addressOfValue = bytebuf.memoryAddress() + (long) index * byteWidth;
- if (LITTLE_ENDIAN) {
- MemoryUtil.putLong(addressOfValue, value);
- for (int i = 1; i <= (byteWidth - 8) / 8; i++) {
- MemoryUtil.putLong(addressOfValue + Long.BYTES * i, padValue);
- }
- } else {
- for (int i = 0; i < (byteWidth - 8) / 8; i++) {
- MemoryUtil.putLong(addressOfValue + Long.BYTES * i, padValue);
- }
- MemoryUtil.putLong(addressOfValue + Long.BYTES * (byteWidth - 8) / 8, value);
- }
+ } else {
+ for (int i = 0; i < (byteWidth - 8) / 8; i++) {
+ MemoryUtil.putLong(addressOfValue + Long.BYTES * i, padValue);
}
- // ---- Databricks patch end ----
+ MemoryUtil.putLong(addressOfValue + Long.BYTES * (byteWidth - 8) / 8, value);
+ }
}
-
- /**
- * Write the given byte array to the ArrowBuf at the given value index. Will throw an
- * UnsupportedOperationException if the decimal size is greater than the Decimal vector byte
- * width.
- */
- public static void writeByteArrayToArrowBuf(
- byte[] bytes, ArrowBuf bytebuf, int index, int byteWidth) {
- writeByteArrayToArrowBufHelper(bytes, bytebuf, index, byteWidth);
+ // ---- Databricks patch end ----
+ }
+
+ /**
+ * Write the given byte array to the ArrowBuf at the given value index. Will throw an
+ * UnsupportedOperationException if the decimal size is greater than the Decimal vector byte
+ * width.
+ */
+ public static void writeByteArrayToArrowBuf(
+ byte[] bytes, ArrowBuf bytebuf, int index, int byteWidth) {
+ writeByteArrayToArrowBufHelper(bytes, bytebuf, index, byteWidth);
+ }
+
+ private static void writeByteArrayToArrowBufHelper(
+ byte[] bytes, ArrowBuf bytebuf, int index, int byteWidth) {
+ final long startIndex = (long) index * byteWidth;
+ if (bytes.length > byteWidth) {
+ throw new UnsupportedOperationException(
+ "Decimal size greater than " + byteWidth + " bytes: " + bytes.length);
}
- private static void writeByteArrayToArrowBufHelper(
- byte[] bytes, ArrowBuf bytebuf, int index, int byteWidth) {
- final long startIndex = (long) index * byteWidth;
- if (bytes.length > byteWidth) {
- throw new UnsupportedOperationException(
- "Decimal size greater than " + byteWidth + " bytes: " + bytes.length);
- }
-
- byte[] padBytes = bytes[0] < 0 ? minus_one : zeroes;
- if (LITTLE_ENDIAN) {
- // Decimal stored as native-endian, need to swap data bytes before writing to ArrowBuf if LE
- byte[] bytesLE = new byte[bytes.length];
- for (int i = 0; i < bytes.length; i++) {
- bytesLE[i] = bytes[bytes.length - 1 - i];
- }
-
- // Write LE data
- bytebuf.setBytes(startIndex, bytesLE, 0, bytes.length);
- bytebuf.setBytes(startIndex + bytes.length, padBytes, 0, byteWidth - bytes.length);
- } else {
- // Write BE data
- bytebuf.setBytes(startIndex + byteWidth - bytes.length, bytes, 0, bytes.length);
- bytebuf.setBytes(startIndex, padBytes, 0, byteWidth - bytes.length);
- }
+ byte[] padBytes = bytes[0] < 0 ? minus_one : zeroes;
+ if (LITTLE_ENDIAN) {
+ // Decimal stored as native-endian, need to swap data bytes before writing to ArrowBuf if LE
+ byte[] bytesLE = new byte[bytes.length];
+ for (int i = 0; i < bytes.length; i++) {
+ bytesLE[i] = bytes[bytes.length - 1 - i];
+ }
+
+ // Write LE data
+ bytebuf.setBytes(startIndex, bytesLE, 0, bytes.length);
+ bytebuf.setBytes(startIndex + bytes.length, padBytes, 0, byteWidth - bytes.length);
+ } else {
+ // Write BE data
+ bytebuf.setBytes(startIndex + byteWidth - bytes.length, bytes, 0, bytes.length);
+ bytebuf.setBytes(startIndex, padBytes, 0, byteWidth - bytes.length);
}
+ }
}
diff --git a/src/test/java/com/databricks/jdbc/TestConstants.java b/src/test/java/com/databricks/jdbc/TestConstants.java
index e4381a83d2..5e51a4ab81 100644
--- a/src/test/java/com/databricks/jdbc/TestConstants.java
+++ b/src/test/java/com/databricks/jdbc/TestConstants.java
@@ -31,6 +31,8 @@ public class TestConstants {
public static final String TEST_FOREIGN_SCHEMA = "foreignSchema";
public static final String TEST_FOREIGN_TABLE = "foreignTable";
public static final String TEST_FUNCTION_PATTERN = "functionPattern";
+ public static final String TEST_PROCEDURE_PATTERN = "procedurePattern";
+ public static final String TEST_COLUMN_PATTERN = "columnPattern";
public static final String TEST_STRING = "test";
public static final String TEST_STRING_2 = "test2";
public static final String TEST_USER = "testUser";
diff --git a/src/test/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaDataTest.java b/src/test/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaDataTest.java
index 39798d5a1a..c82d840788 100644
--- a/src/test/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaDataTest.java
+++ b/src/test/java/com/databricks/jdbc/api/impl/DatabricksDatabaseMetaDataTest.java
@@ -13,6 +13,7 @@
import com.databricks.jdbc.dbclient.IDatabricksMetadataClient;
import com.databricks.jdbc.exception.DatabricksSQLException;
import java.sql.*;
+import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.stream.Stream;
@@ -68,6 +69,11 @@ public void setup() throws SQLException {
.thenReturn(Mockito.mock(DatabricksResultSet.class));
when(metadataClient.listCrossReferences(any(), any(), any(), any(), any(), any(), any()))
.thenReturn(Mockito.mock(DatabricksResultSet.class));
+ when(metadataClient.listProcedures(any(), any(), any(), any()))
+ .thenAnswer(invocation -> metadataResultSetBuilder.getProceduresResult(new ArrayList<>()));
+ when(metadataClient.listProcedureColumns(any(), any(), any(), any(), any()))
+ .thenAnswer(
+ invocation -> metadataResultSetBuilder.getProcedureColumnsResult(new ArrayList<>()));
when(connection.getConnection()).thenReturn(Mockito.mock(Connection.class));
when(session.isOpen()).thenReturn(true);
}
diff --git a/src/test/java/com/databricks/jdbc/common/MetadataOperationTypeTest.java b/src/test/java/com/databricks/jdbc/common/MetadataOperationTypeTest.java
index fe4009db1e..613f1f70af 100644
--- a/src/test/java/com/databricks/jdbc/common/MetadataOperationTypeTest.java
+++ b/src/test/java/com/databricks/jdbc/common/MetadataOperationTypeTest.java
@@ -13,7 +13,7 @@ public class MetadataOperationTypeTest {
@Test
void testAllEnumValuesExist() {
// Verify all expected enum values exist
- assertEquals(7, MetadataOperationType.values().length);
+ assertEquals(9, MetadataOperationType.values().length);
assertNotNull(MetadataOperationType.GET_CATALOGS);
assertNotNull(MetadataOperationType.GET_SCHEMAS);
assertNotNull(MetadataOperationType.GET_TABLES);
@@ -21,6 +21,8 @@ void testAllEnumValuesExist() {
assertNotNull(MetadataOperationType.GET_FUNCTIONS);
assertNotNull(MetadataOperationType.GET_PRIMARY_KEYS);
assertNotNull(MetadataOperationType.GET_CROSS_REFERENCE);
+ assertNotNull(MetadataOperationType.GET_PROCEDURES);
+ assertNotNull(MetadataOperationType.GET_PROCEDURE_COLUMNS);
}
@ParameterizedTest
@@ -31,7 +33,9 @@ void testAllEnumValuesExist() {
"GET_COLUMNS, GetColumns",
"GET_FUNCTIONS, GetFunctions",
"GET_PRIMARY_KEYS, GetPrimaryKeys",
- "GET_CROSS_REFERENCE, GetCrossReference"
+ "GET_CROSS_REFERENCE, GetCrossReference",
+ "GET_PROCEDURES, GetProcedures",
+ "GET_PROCEDURE_COLUMNS, GetProcedureColumns"
})
void testHeaderValues(String enumName, String expectedHeaderValue) {
MetadataOperationType operationType = MetadataOperationType.valueOf(enumName);
@@ -72,4 +76,15 @@ void testGetPrimaryKeysHeaderValue() {
void testGetCrossReferenceHeaderValue() {
assertEquals("GetCrossReference", MetadataOperationType.GET_CROSS_REFERENCE.getHeaderValue());
}
+
+ @Test
+ void testGetProceduresHeaderValue() {
+ assertEquals("GetProcedures", MetadataOperationType.GET_PROCEDURES.getHeaderValue());
+ }
+
+ @Test
+ void testGetProcedureColumnsHeaderValue() {
+ assertEquals(
+ "GetProcedureColumns", MetadataOperationType.GET_PROCEDURE_COLUMNS.getHeaderValue());
+ }
}
diff --git a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java
index 8174708c23..e7e641e2c8 100644
--- a/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java
+++ b/src/test/java/com/databricks/jdbc/dbclient/impl/sqlexec/DatabricksMetadataQueryClientTest.java
@@ -1286,4 +1286,199 @@ void testNoUnnecessaryGetCurrentCatalogCallWhenSupportEnabled() throws SQLExcept
// Verify getCurrentCatalog was NEVER called when support is enabled
verify(session, never()).getCurrentCatalog();
}
+
+ // ==================== listProcedures tests ====================
+
+ private static Stream listProceduresTestParams() {
+ return Stream.of(
+ Arguments.of(
+ "SELECT routine_catalog, routine_schema, routine_name, comment, specific_name"
+ + " FROM `catalog1`.information_schema.routines"
+ + " WHERE routine_type = 'PROCEDURE'"
+ + " AND routine_schema LIKE ?"
+ + " AND routine_name LIKE ?"
+ + " ORDER BY routine_catalog, routine_schema, routine_name",
+ TEST_CATALOG,
+ TEST_SCHEMA,
+ TEST_PROCEDURE_PATTERN,
+ "test for get procedures with catalog, schema and name pattern"),
+ Arguments.of(
+ "SELECT routine_catalog, routine_schema, routine_name, comment, specific_name"
+ + " FROM `catalog1`.information_schema.routines"
+ + " WHERE routine_type = 'PROCEDURE'"
+ + " AND routine_name LIKE ?"
+ + " ORDER BY routine_catalog, routine_schema, routine_name",
+ TEST_CATALOG,
+ null,
+ TEST_PROCEDURE_PATTERN,
+ "test for get procedures without schema"),
+ Arguments.of(
+ "SELECT routine_catalog, routine_schema, routine_name, comment, specific_name"
+ + " FROM `catalog1`.information_schema.routines"
+ + " WHERE routine_type = 'PROCEDURE'"
+ + " AND routine_schema LIKE ?"
+ + " ORDER BY routine_catalog, routine_schema, routine_name",
+ TEST_CATALOG,
+ TEST_SCHEMA,
+ null,
+ "test for get procedures without name pattern"),
+ Arguments.of(
+ "SELECT routine_catalog, routine_schema, routine_name, comment, specific_name"
+ + " FROM system.information_schema.routines"
+ + " WHERE routine_type = 'PROCEDURE'"
+ + " ORDER BY routine_catalog, routine_schema, routine_name",
+ null,
+ null,
+ null,
+ "test for get procedures with null catalog"));
+ }
+
+ @ParameterizedTest
+ @MethodSource("listProceduresTestParams")
+ void testListProcedures(
+ String sql, String catalog, String schema, String procedurePattern, String description)
+ throws SQLException {
+ when(session.getComputeResource()).thenReturn(WAREHOUSE_COMPUTE);
+ DatabricksMetadataSdkClient metadataClient = new DatabricksMetadataSdkClient(mockClient);
+ when(mockClient.executeStatement(
+ eq(sql),
+ eq(WAREHOUSE_COMPUTE),
+ any(),
+ eq(StatementType.METADATA),
+ eq(session),
+ any(),
+ eq(MetadataOperationType.GET_PROCEDURES)))
+ .thenReturn(mockedResultSet);
+ when(mockedResultSet.next()).thenReturn(true, false);
+ when(mockedResultSet.getObject("routine_catalog")).thenReturn("main");
+ when(mockedResultSet.getObject("routine_schema")).thenReturn("default");
+ when(mockedResultSet.getObject("routine_name")).thenReturn("test_proc");
+ when(mockedResultSet.getObject("comment")).thenReturn(null);
+ when(mockedResultSet.getObject("specific_name")).thenReturn("test_proc");
+ doReturn(5).when(mockedMetaData).getColumnCount();
+ when(mockedResultSet.getMetaData()).thenReturn(mockedMetaData);
+ DatabricksResultSet actualResult =
+ metadataClient.listProcedures(session, catalog, schema, procedurePattern);
+ assertEquals(
+ StatementState.SUCCEEDED, actualResult.getStatementStatus().getState(), description);
+ assertEquals(GET_PROCEDURES_STATEMENT_ID, actualResult.getStatementId(), description);
+ assertEquals(
+ 1, ((DatabricksResultSetMetaData) actualResult.getMetaData()).getTotalRows(), description);
+ }
+
+ // ==================== listProcedureColumns tests ====================
+
+ private static Stream listProcedureColumnsTestParams() {
+ return Stream.of(
+ Arguments.of(
+ "SELECT p.specific_catalog, p.specific_schema, p.specific_name,"
+ + " p.parameter_name, p.parameter_mode, p.is_result,"
+ + " p.data_type, p.full_data_type,"
+ + " p.numeric_precision, p.numeric_precision_radix, p.numeric_scale,"
+ + " p.character_maximum_length, p.character_octet_length,"
+ + " p.ordinal_position, p.parameter_default, p.comment"
+ + " FROM `catalog1`.information_schema.parameters p"
+ + " JOIN `catalog1`.information_schema.routines r"
+ + " ON p.specific_catalog = r.specific_catalog"
+ + " AND p.specific_schema = r.specific_schema"
+ + " AND p.specific_name = r.specific_name"
+ + " WHERE r.routine_type = 'PROCEDURE'"
+ + " AND p.specific_schema LIKE ?"
+ + " AND p.specific_name LIKE ?"
+ + " AND p.parameter_name LIKE ?"
+ + " ORDER BY p.specific_catalog, p.specific_schema, p.specific_name, p.ordinal_position",
+ TEST_CATALOG,
+ TEST_SCHEMA,
+ TEST_PROCEDURE_PATTERN,
+ TEST_COLUMN_PATTERN,
+ "test for get procedure columns with all filters"),
+ Arguments.of(
+ "SELECT p.specific_catalog, p.specific_schema, p.specific_name,"
+ + " p.parameter_name, p.parameter_mode, p.is_result,"
+ + " p.data_type, p.full_data_type,"
+ + " p.numeric_precision, p.numeric_precision_radix, p.numeric_scale,"
+ + " p.character_maximum_length, p.character_octet_length,"
+ + " p.ordinal_position, p.parameter_default, p.comment"
+ + " FROM `catalog1`.information_schema.parameters p"
+ + " JOIN `catalog1`.information_schema.routines r"
+ + " ON p.specific_catalog = r.specific_catalog"
+ + " AND p.specific_schema = r.specific_schema"
+ + " AND p.specific_name = r.specific_name"
+ + " WHERE r.routine_type = 'PROCEDURE'"
+ + " AND p.specific_name LIKE ?"
+ + " ORDER BY p.specific_catalog, p.specific_schema, p.specific_name, p.ordinal_position",
+ TEST_CATALOG,
+ null,
+ TEST_PROCEDURE_PATTERN,
+ null,
+ "test for get procedure columns without schema and column pattern"),
+ Arguments.of(
+ "SELECT p.specific_catalog, p.specific_schema, p.specific_name,"
+ + " p.parameter_name, p.parameter_mode, p.is_result,"
+ + " p.data_type, p.full_data_type,"
+ + " p.numeric_precision, p.numeric_precision_radix, p.numeric_scale,"
+ + " p.character_maximum_length, p.character_octet_length,"
+ + " p.ordinal_position, p.parameter_default, p.comment"
+ + " FROM system.information_schema.parameters p"
+ + " JOIN system.information_schema.routines r"
+ + " ON p.specific_catalog = r.specific_catalog"
+ + " AND p.specific_schema = r.specific_schema"
+ + " AND p.specific_name = r.specific_name"
+ + " WHERE r.routine_type = 'PROCEDURE'"
+ + " ORDER BY p.specific_catalog, p.specific_schema, p.specific_name, p.ordinal_position",
+ null,
+ null,
+ null,
+ null,
+ "test for get procedure columns with null catalog and no filters"));
+ }
+
+ @ParameterizedTest
+ @MethodSource("listProcedureColumnsTestParams")
+ void testListProcedureColumns(
+ String sql,
+ String catalog,
+ String schema,
+ String procedurePattern,
+ String columnPattern,
+ String description)
+ throws SQLException {
+ when(session.getComputeResource()).thenReturn(WAREHOUSE_COMPUTE);
+ DatabricksMetadataSdkClient metadataClient = new DatabricksMetadataSdkClient(mockClient);
+ when(mockClient.executeStatement(
+ eq(sql),
+ eq(WAREHOUSE_COMPUTE),
+ any(),
+ eq(StatementType.METADATA),
+ eq(session),
+ any(),
+ eq(MetadataOperationType.GET_PROCEDURE_COLUMNS)))
+ .thenReturn(mockedResultSet);
+ when(mockedResultSet.next()).thenReturn(true, false);
+ when(mockedResultSet.getObject("specific_catalog")).thenReturn("main");
+ when(mockedResultSet.getObject("specific_schema")).thenReturn("default");
+ when(mockedResultSet.getObject("specific_name")).thenReturn("test_proc");
+ when(mockedResultSet.getObject("parameter_name")).thenReturn("x");
+ when(mockedResultSet.getObject("parameter_mode")).thenReturn("IN");
+ when(mockedResultSet.getObject("is_result")).thenReturn("NO");
+ when(mockedResultSet.getObject("data_type")).thenReturn("INT");
+ when(mockedResultSet.getObject("numeric_precision")).thenReturn(null);
+ when(mockedResultSet.getObject("numeric_precision_radix")).thenReturn(2);
+ when(mockedResultSet.getObject("numeric_scale")).thenReturn(null);
+ when(mockedResultSet.getObject("character_maximum_length")).thenReturn(null);
+ when(mockedResultSet.getObject("character_octet_length")).thenReturn(null);
+ when(mockedResultSet.getObject("ordinal_position")).thenReturn(0);
+ when(mockedResultSet.getObject("parameter_default")).thenReturn(null);
+ when(mockedResultSet.getObject("comment")).thenReturn(null);
+ doReturn(16).when(mockedMetaData).getColumnCount();
+ when(mockedResultSet.getMetaData()).thenReturn(mockedMetaData);
+ DatabricksResultSet actualResult =
+ metadataClient.listProcedureColumns(
+ session, catalog, schema, procedurePattern, columnPattern);
+ assertEquals(
+ StatementState.SUCCEEDED, actualResult.getStatementStatus().getState(), description);
+ assertEquals(GET_PROCEDURE_COLUMNS_STATEMENT_ID, actualResult.getStatementId(), description);
+ assertEquals(
+ 1, ((DatabricksResultSetMetaData) actualResult.getMetaData()).getTotalRows(), description);
+ }
}
diff --git a/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataIntegrationTests.java b/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataIntegrationTests.java
index 9cadf72515..d87a7edf2e 100644
--- a/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataIntegrationTests.java
+++ b/src/test/java/com/databricks/jdbc/integration/fakeservice/tests/MetadataIntegrationTests.java
@@ -438,6 +438,87 @@ void testParameterMetaData_NoParameters() throws SQLException {
pstmt.close();
}
+ @Test
+ void testGetProceduresAndProcedureColumns() throws SQLException {
+ assumeTrue(isSqlExecSdkClient(), "This test only runs for SQL Execution API");
+
+ DatabaseMetaData metaData = connection.getMetaData();
+ String catalog = "main";
+ String schema = getDatabricksSchema();
+ String procName = "jdbc_test_compute_area";
+
+ // Create a test procedure with IN and OUT parameters
+ executeSQL(
+ connection,
+ "CREATE OR REPLACE PROCEDURE "
+ + catalog
+ + "."
+ + schema
+ + "."
+ + procName
+ + "(x DOUBLE, y DOUBLE, OUT area DOUBLE)\n"
+ + "LANGUAGE SQL\n"
+ + "SQL SECURITY INVOKER\n"
+ + "COMMENT 'Test procedure for JDBC integration tests'\n"
+ + "AS BEGIN\n"
+ + " SET area = x * y;\n"
+ + "END");
+
+ try {
+ // Test getProcedures - find our procedure by exact name
+ try (ResultSet procedures = metaData.getProcedures(catalog, schema, procName)) {
+ assertTrue(procedures.next(), "Should find the created procedure");
+ assertEquals(catalog, procedures.getString("PROCEDURE_CAT"));
+ assertEquals(schema, procedures.getString("PROCEDURE_SCHEM"));
+ assertEquals(procName, procedures.getString("PROCEDURE_NAME"));
+ assertEquals("Test procedure for JDBC integration tests", procedures.getString("REMARKS"));
+ assertEquals(1, procedures.getShort("PROCEDURE_TYPE"), "Should be SQL_PT_PROCEDURE");
+ assertEquals(procName, procedures.getString("SPECIFIC_NAME"));
+ assertFalse(procedures.next(), "Should be exactly one match");
+ }
+
+ // Test getProcedures - pattern matching
+ try (ResultSet procedures = metaData.getProcedures(catalog, schema, "jdbc_test_%")) {
+ assertTrue(procedures.next(), "Pattern should match our procedure");
+ }
+
+ // Test getProcedureColumns - all parameters
+ try (ResultSet columns = metaData.getProcedureColumns(catalog, schema, procName, "%")) {
+ // Parameter x (IN, DOUBLE)
+ assertTrue(columns.next(), "Should have parameter x");
+ assertEquals(procName, columns.getString("PROCEDURE_NAME"));
+ assertEquals("x", columns.getString("COLUMN_NAME"));
+ assertEquals(1, columns.getShort("COLUMN_TYPE"), "x should be SQL_PARAM_INPUT");
+ assertEquals(8, columns.getInt("DATA_TYPE"), "DOUBLE maps to SQL type code 8");
+ assertEquals("DOUBLE", columns.getString("TYPE_NAME"));
+ assertEquals(0, columns.getInt("ORDINAL_POSITION"));
+
+ // Parameter y (IN, DOUBLE)
+ assertTrue(columns.next(), "Should have parameter y");
+ assertEquals("y", columns.getString("COLUMN_NAME"));
+ assertEquals(1, columns.getShort("COLUMN_TYPE"), "y should be SQL_PARAM_INPUT");
+
+ // Parameter area (OUT, DOUBLE)
+ assertTrue(columns.next(), "Should have parameter area");
+ assertEquals("area", columns.getString("COLUMN_NAME"));
+ assertEquals(4, columns.getShort("COLUMN_TYPE"), "area should be SQL_PARAM_OUTPUT");
+
+ assertFalse(columns.next(), "Should have exactly 3 parameters");
+ }
+
+ // Test getProcedureColumns - filter by column name
+ try (ResultSet columns = metaData.getProcedureColumns(catalog, schema, procName, "area")) {
+ assertTrue(columns.next(), "Should find the 'area' parameter");
+ assertEquals("area", columns.getString("COLUMN_NAME"));
+ assertEquals(4, columns.getShort("COLUMN_TYPE"), "area should be SQL_PARAM_OUTPUT");
+ assertFalse(columns.next(), "Should be exactly one match");
+ }
+
+ } finally {
+ executeSQL(connection, "DROP PROCEDURE IF EXISTS " + catalog + "." + schema + "." + procName);
+ }
+ }
+
@Test
void testMetadataOperationsWithHyphenatedIdentifiers() throws SQLException {
assumeTrue(isSqlExecSdkClient(), "This test only runs for SQL Execution API");
diff --git a/src/test/resources/cloudfetchapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164916z_689e65a4-0eb8-4021-a1b9-ba57027605dc-ff17bdaf-78a9-4a58-8d34-ce77f1589736.json b/src/test/resources/cloudfetchapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164916z_689e65a4-0eb8-4021-a1b9-ba57027605dc-ff17bdaf-78a9-4a58-8d34-ce77f1589736.json
new file mode 100644
index 0000000000..090df9a6f7
--- /dev/null
+++ b/src/test/resources/cloudfetchapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164916z_689e65a4-0eb8-4021-a1b9-ba57027605dc-ff17bdaf-78a9-4a58-8d34-ce77f1589736.json
@@ -0,0 +1,25 @@
+{
+ "id" : "ff17bdaf-78a9-4a58-8d34-ce77f1589736",
+ "name" : "oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164916z_689e65a4-0eb8-4021-a1b9-ba57027605dc",
+ "request" : {
+ "url" : "/oregon-staging/6051921418418893.jobs/sql/extended/results_2026-02-28T16%3A49%3A16Z_689e65a4-0eb8-4021-a1b9-ba57027605dc?[REDACTED]X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20260227T154916Z&X-Amz-SignedHeaders=host&X-Amz-Expires=899&[REDACTED]X-Amz-Signature=591a240a91d37cd1d8851014c0bb3685a56dd09b3acefdbe5a0387c53f65bce7",
+ "method" : "GET"
+ },
+ "response" : {
+ "status" : 200,
+ "base64Body" : "BCJNGHRwjvsAAACg/////4gBAAAQAAEAsAoADgAGAA0ACAAKDwAiAAQYAKEBCgAMAAAACAAEGAARCAwAASEA8QMABQAAABABAADAAAAAhAAAAFA2AKQAABb///8UAAAABAAAKwACZAACAgD1EAT///8NAAAAc3BlY2lmaWNfbmFtZQASABgAFAATABJwABcSRAAATABIAAAFAUgA/wJM////BwAAAGNvbW1lbnQAjngADEB8////zABycm91dGluZXcATwAAAMY4AAxXtP///w44AGBzY2hlbWGQAAK0AD8AABO0AAIQGCMAMAAABSIABAIAEQQCADcAAA9QAIBjYXRhbG9nACmPNjgAAAAA7SBirAQiTRh0cI5RAQAAov////+YAQAAFAABANAMABYABgAFAAgADAAMEwCAAwQAGAAAAMgMAAECAPEICgAYAAwABAAIAAoAAAAMAQAAEAAAAAEcAAICABIPBwABAgAEGAARCA4ABggAADQAAAIAEAQFACEAAGQAAAIABDAAECANAAcwABEoDgAGOAAQOA0AB3gAEUAOAAYwABBIDQBBAAAAFgYAMAAAYAUABzAAEWgOAAYwABBwDQBBAAAAKQYAMAAAoAUABzAAEagOAAYwABCwDQAHYAAAAgAXBTAABAIABNAABAIADxAAKQAsAUBtYWluTAAISAAATAH4AWpkYmNfdGVzdF9zY2hlbWEgAAAEAQYgAMpjb21wdXRlX2FyZWFIAAD8APAVVGVzdCBwcm9jZWR1cmUgZm9yIEpEQkMgaW50ZWdyYXRpb24gZwAQc4UAC/AAD2gABFByZWEAABFJ+a8AAAAA/dhIQgQiTRh0cI4IAACA/////wAAAACGhpIIAAAAAIaGkgg=",
+ "headers" : {
+ "Accept-Ranges" : "bytes",
+ "Server" : "AmazonS3",
+ "ETag" : "\"f98ed378fcc39e30de450313a7aae5c9\"",
+ "Last-Modified" : "Fri, 27 Feb 2026 15:49:17 GMT",
+ "x-amz-request-id" : "BRH8HHKD18H7J65A",
+ "x-amz-server-side-encryption" : "AES256",
+ "x-amz-id-2" : "TmCoacHSFDKAtP+eKHWXsyHz9w4vxg8+QMxhCwRd2Jr5ydHyW7BUMRWbKABVwVqrqu10zqmAjmON3ZjHrWAgeaexEDwDF62N4JIL4olh6dw=",
+ "Date" : "Fri, 27 Feb 2026 15:49:18 GMT",
+ "Content-Type" : "binary/octet-stream"
+ }
+ },
+ "uuid" : "ff17bdaf-78a9-4a58-8d34-ce77f1589736",
+ "insertionIndex" : 4
+}
\ No newline at end of file
diff --git a/src/test/resources/cloudfetchapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164920z_07f2c854-dc4a-4f97-ba88-49725454824a-71fda4a6-6195-4ecb-b6e0-1294445d3f5c.json b/src/test/resources/cloudfetchapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164920z_07f2c854-dc4a-4f97-ba88-49725454824a-71fda4a6-6195-4ecb-b6e0-1294445d3f5c.json
new file mode 100644
index 0000000000..31204d3ef5
--- /dev/null
+++ b/src/test/resources/cloudfetchapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164920z_07f2c854-dc4a-4f97-ba88-49725454824a-71fda4a6-6195-4ecb-b6e0-1294445d3f5c.json
@@ -0,0 +1,25 @@
+{
+ "id" : "71fda4a6-6195-4ecb-b6e0-1294445d3f5c",
+ "name" : "oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164920z_07f2c854-dc4a-4f97-ba88-49725454824a",
+ "request" : {
+ "url" : "/oregon-staging/6051921418418893.jobs/sql/extended/results_2026-02-28T16%3A49%3A20Z_07f2c854-dc4a-4f97-ba88-49725454824a?[REDACTED]X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20260227T154920Z&X-Amz-SignedHeaders=host&X-Amz-Expires=899&[REDACTED]X-Amz-Signature=3540b7cdf950b82e6811ce1ebe78f6440a0db384d1970f21747d4c0ddbfdd049",
+ "method" : "GET"
+ },
+ "response" : {
+ "status" : 200,
+ "base64Body" : "BCJNGHRwjvsAAACg/////4gBAAAQAAEAsAoADgAGAA0ACAAKDwAiAAQYAKEBCgAMAAAACAAEGAARCAwAASEA8QMABQAAABABAADAAAAAhAAAAFA2AKQAABb///8UAAAABAAAKwACZAACAgD1EAT///8NAAAAc3BlY2lmaWNfbmFtZQASABgAFAATABJwABcSRAAATABIAAAFAUgA/wJM////BwAAAGNvbW1lbnQAjngADEB8////zABycm91dGluZXcATwAAAMY4AAxXtP///w44AGBzY2hlbWGQAAK0AD8AABO0AAIQGCMAMAAABSIABAIAEQQCADcAAA9QAIBjYXRhbG9nACmPNjgAAAAA7SBirAQiTRh0cI5RAQAAov////+YAQAAFAABANAMABYABgAFAAgADAAMEwCAAwQAGAAAAMgMAAECAPEICgAYAAwABAAIAAoAAAAMAQAAEAAAAAEcAAICABIPBwABAgAEGAARCA4ABggAADQAAAIAEAQFACEAAGQAAAIABDAAECANAAcwABEoDgAGOAAQOA0AB3gAEUAOAAYwABBIDQBBAAAAFgYAMAAAYAUABzAAEWgOAAYwABBwDQBBAAAAKQYAMAAAoAUABzAAEagOAAYwABCwDQAHYAAAAgAXBTAABAIABNAABAIADxAAKQAsAUBtYWluTAAISAAATAH4AWpkYmNfdGVzdF9zY2hlbWEgAAAEAQYgAMpjb21wdXRlX2FyZWFIAAD8APAVVGVzdCBwcm9jZWR1cmUgZm9yIEpEQkMgaW50ZWdyYXRpb24gZwAQc4UAC/AAD2gABFByZWEAABFJ+a8AAAAA/dhIQgQiTRh0cI4IAACA/////wAAAACGhpIIAAAAAIaGkgg=",
+ "headers" : {
+ "Accept-Ranges" : "bytes",
+ "Server" : "AmazonS3",
+ "ETag" : "\"f98ed378fcc39e30de450313a7aae5c9\"",
+ "Last-Modified" : "Fri, 27 Feb 2026 15:49:21 GMT",
+ "x-amz-request-id" : "RRJXP0WGS9DKF1K5",
+ "x-amz-server-side-encryption" : "AES256",
+ "x-amz-id-2" : "GziQFfta7apLDHiGjI15nC0Z80n0ik167S0zuTNlmhsamX6/LIZigq8e2/JX9DRXq1zQqdovdfEVt8Mu3Jtai/SCRy2LchgjDNRQpkTlyfc=",
+ "Date" : "Fri, 27 Feb 2026 15:49:22 GMT",
+ "Content-Type" : "binary/octet-stream"
+ }
+ },
+ "uuid" : "71fda4a6-6195-4ecb-b6e0-1294445d3f5c",
+ "insertionIndex" : 3
+}
\ No newline at end of file
diff --git a/src/test/resources/cloudfetchapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164925z_4eeb4f54-014b-4555-a062-1fe9c646c5ae-8b56451a-af84-4083-a0ed-cd195cb8da96.json b/src/test/resources/cloudfetchapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164925z_4eeb4f54-014b-4555-a062-1fe9c646c5ae-8b56451a-af84-4083-a0ed-cd195cb8da96.json
new file mode 100644
index 0000000000..58cda8f1ac
--- /dev/null
+++ b/src/test/resources/cloudfetchapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164925z_4eeb4f54-014b-4555-a062-1fe9c646c5ae-8b56451a-af84-4083-a0ed-cd195cb8da96.json
@@ -0,0 +1,25 @@
+{
+ "id" : "8b56451a-af84-4083-a0ed-cd195cb8da96",
+ "name" : "oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164925z_4eeb4f54-014b-4555-a062-1fe9c646c5ae",
+ "request" : {
+ "url" : "/oregon-staging/6051921418418893.jobs/sql/extended/results_2026-02-28T16%3A49%3A25Z_4eeb4f54-014b-4555-a062-1fe9c646c5ae?[REDACTED]X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20260227T154925Z&X-Amz-SignedHeaders=host&X-Amz-Expires=899&[REDACTED]X-Amz-Signature=b3fe17c3e20d0c625c1e647f6279aa837fc469168af242261e9d8b7fdddc1db4",
+ "method" : "GET"
+ },
+ "response" : {
+ "status" : 200,
+ "base64Body" : "BCJNGHRwjoECAACg/////4AEAAAQAAEAsAoADgAGAA0ACAAKDwAiAAQYAKEBCgAMAAAACAAEGAARCAwAASEAAiAA8SkEAACwAwAAdAMAADgDAADoAgAAsAIAAHgCAAA8AgAA7AEAAKABAABcAQAADAEAAMAAAAB4AAAAOGIApAAADv3//xQAAAAEAEIAAAUBkAACAgD/AkD8//8HAAAAY29tbWVudAA+MAAM/A9w/P//EQAAAHBhcmFtZXRlcl9kZWZhdWx0AAAAvvxsADIAAhhnAAECAJOg/v//AAAAASDcAPABb3JkaW5hbF9wb3NpdGlvbiUAHb6AACgCAUQAE+REAFBAAAAAFrgAUGhhcmFjiAD8AW9jdGV0X2xlbmd0aAAABv6MAApIACIs/4wAAEgAAGAABkgAdW1heGltdW1KAD8AAFJMAAwTeEwAANgA/wYNAAAAbnVtZXJpY19zY2FsZQAAAJJAAAwXuEAAFxdAAPcCcHJlY2lzaW9uX3JhZGl4ANpIABAcmQAhAAJMAQQCAHQIAAwACAAHQAIBHAAArAEEkAAFUABHAAAAakwAABwCSAAAAAUYAv8KWP7//w4AAABmdWxsX2RhdGFfdHlwZQAAojgADIeQ/v//CQAAADMALwDWNAAME8Q0AGJpc19yZXNMAiwK/8ABCqAAE/igAAaIAjBtb2RtAaUAEgAYABQAEwASWAMXEgADAuwACgQDIkT/7AAGTAB/bmFtZQAAjoQADEB8////8AGDc3BlY2lmaWM3AC8AxjgADFi0////DzgAZXNjaGVtYbwAPwAAE7wAAgCoAgAIAQAiAAQCABEEAgACZAQFiADwAGNhdGFsb2cAAAAAAAAAAC4TbCgAAAAANUB0CQQiTRh0cI6dAgAAov/////4AwAAFAABANAMABYABgAFAAgADAAMEwCQAwQAGAAAAGACDQAAAgDwCAoAGAAMAAQACAAKAAAAvAIAABAAAAADGwADAgATKggAAAIAEAEFAEEAAAAIBgACLAAAAgAAVAAAAgABZABBAAAAKAYABjAAEDANAAcwABFADgAGGAAQcA0ABzAAEXgOAAYwABCIDQBBAAAAQgYAMAAA0AUABzAAEdgOAAYwABDoDQBBAAAABgYAMAAA8AUABzAAEfgOAAYwABMI2QAQBxUAAKgAAxAAAwcAIwAYCQAAHAEAAgATKBAABGAAEzAQAAMHACMAOAkABDAAE0gQABASPQAAgAEDEAADBwAjAGgJAAQwABN4EAAEMAATkBAAAwcAIwCYCQAEcAETqBAAAwcAIwCwCQAEIAATwBAAAwcAIwDICQAEIAAT2BAAAwcAIwDgCQAE2AET+BAAAwcAIwAAQAIEIAATGBAABCAAEyAQAARgABMwEAAEIAATOBAABPAAE0gQAAQCAAQQAAQwABNQIAAEMAAGwAIGAgAAHAAIuAIAAgAPEABlBHgABAgABAIABBAADwgAHQQCAA84AA0EwAIAAgATBKgDAIQBRG1haW4EAAAcAAgoAAA4ARMgjAP/AWpkYmNfdGVzdF9zY2hlbWEQAA0ISABTFgAAACykAwY4ALdjb21wdXRlX2FyZU4ADxYADwCmAAqoAAAMAgAHAgB0AyB4eVAACiAAABwAAPQAAJQAcUlOSU5PVVQMAAQCAAQgAABAACBOTwIAAiwABAIAABgAACwBAHQDaERPVUJMRQYAAiYACFAACDAAaGRvdWJsZQYAAjAADwIABQRIAACUAAQEAA8CAEkEcAAAAgAESAEPAgAcUAAAAAAAdJiwWQAAAADp3QkHBCJNGHRwjggAAID/////AAAAAIaGkggAAAAAhoaSCA==",
+ "headers" : {
+ "Accept-Ranges" : "bytes",
+ "Server" : "AmazonS3",
+ "ETag" : "\"0b6b057178d123f5dbede6d7f15c5f60\"",
+ "Last-Modified" : "Fri, 27 Feb 2026 15:49:26 GMT",
+ "x-amz-request-id" : "9F8Q9H3HK013WBDD",
+ "x-amz-server-side-encryption" : "AES256",
+ "x-amz-id-2" : "dqNec4B2Urx3HaW9Af2xj8QB0I/Z6CkDjy3kED9X6AKjjVw+A6NblRk32vNCx6f4EYvD+v6QORus/08CxnsvAyDySyT7qYjeYM2lZ8gGUAY=",
+ "Date" : "Fri, 27 Feb 2026 15:49:27 GMT",
+ "Content-Type" : "binary/octet-stream"
+ }
+ },
+ "uuid" : "8b56451a-af84-4083-a0ed-cd195cb8da96",
+ "insertionIndex" : 2
+}
\ No newline at end of file
diff --git a/src/test/resources/cloudfetchapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164928z_569c2bc1-2c43-4d5e-bb58-3a12eaa119eb-34a44455-4998-432f-a241-3c63efcd1770.json b/src/test/resources/cloudfetchapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164928z_569c2bc1-2c43-4d5e-bb58-3a12eaa119eb-34a44455-4998-432f-a241-3c63efcd1770.json
new file mode 100644
index 0000000000..48b4866ae7
--- /dev/null
+++ b/src/test/resources/cloudfetchapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164928z_569c2bc1-2c43-4d5e-bb58-3a12eaa119eb-34a44455-4998-432f-a241-3c63efcd1770.json
@@ -0,0 +1,25 @@
+{
+ "id" : "34a44455-4998-432f-a241-3c63efcd1770",
+ "name" : "oregon-staging_6051921418418893.jobs_sql_extended_results_2026-02-28t164928z_569c2bc1-2c43-4d5e-bb58-3a12eaa119eb",
+ "request" : {
+ "url" : "/oregon-staging/6051921418418893.jobs/sql/extended/results_2026-02-28T16%3A49%3A28Z_569c2bc1-2c43-4d5e-bb58-3a12eaa119eb?[REDACTED]X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20260227T154928Z&X-Amz-SignedHeaders=host&X-Amz-Expires=899&[REDACTED]X-Amz-Signature=9c1bb47ff0c7fe781e9a98827ca3a744537d0685705235ca96fa902a4d680cfb",
+ "method" : "GET"
+ },
+ "response" : {
+ "status" : 200,
+ "base64Body" : "BCJNGHRwjoECAACg/////4AEAAAQAAEAsAoADgAGAA0ACAAKDwAiAAQYAKEBCgAMAAAACAAEGAARCAwAASEAAiAA8SkEAACwAwAAdAMAADgDAADoAgAAsAIAAHgCAAA8AgAA7AEAAKABAABcAQAADAEAAMAAAAB4AAAAOGIApAAADv3//xQAAAAEAEIAAAUBkAACAgD/AkD8//8HAAAAY29tbWVudAA+MAAM/A9w/P//EQAAAHBhcmFtZXRlcl9kZWZhdWx0AAAAvvxsADIAAhhnAAECAJOg/v//AAAAASDcAPABb3JkaW5hbF9wb3NpdGlvbiUAHb6AACgCAUQAE+REAFBAAAAAFrgAUGhhcmFjiAD8AW9jdGV0X2xlbmd0aAAABv6MAApIACIs/4wAAEgAAGAABkgAdW1heGltdW1KAD8AAFJMAAwTeEwAANgA/wYNAAAAbnVtZXJpY19zY2FsZQAAAJJAAAwXuEAAFxdAAPcCcHJlY2lzaW9uX3JhZGl4ANpIABAcmQAhAAJMAQQCAHQIAAwACAAHQAIBHAAArAEEkAAFUABHAAAAakwAABwCSAAAAAUYAv8KWP7//w4AAABmdWxsX2RhdGFfdHlwZQAAojgADIeQ/v//CQAAADMALwDWNAAME8Q0AGJpc19yZXNMAiwK/8ABCqAAE/igAAaIAjBtb2RtAaUAEgAYABQAEwASWAMXEgADAuwACgQDIkT/7AAGTAB/bmFtZQAAjoQADEB8////8AGDc3BlY2lmaWM3AC8AxjgADFi0////DzgAZXNjaGVtYbwAPwAAE7wAAgCoAgAIAQAiAAQCABEEAgACZAQFiADwAGNhdGFsb2cAAAAAAAAAAC4TbCgAAAAANUB0CQQiTRh0cI5HAgAAov/////4AwAAFAABANAMABYABgAFAAgADAAMEwCQAwQAGAAAAFgBDQAAAgD1BwoAGAAMAAQACAAKAAAAvAIAABAAAAAfAEEAAAAqBgACAgAEGAASCA8ABQgAADQAAAIAEAQFACEAAGQAAAIABDAAECANAAcwABEoDgAGOAAQOA0AB3gAEUAOAAZoABBIDQBBAAAAFgYAMAAAYAUABzAAEWgOAAYwABBwDQAHkAAReA4ABpAAEIANAAeQABGIDgAwAAADBQBBAAAAkAYABjAAEJgNAAcwABGgDgAwAAACBQBBAAAAqAYABjAAELANAAcwABG4DgAwAAAGBQBBAAAAwAYABjAAEMgNAAcwABHQDgAGMAAQ2A0AB/AAEeAOAAbgABDoDQAHIAAR8A4ABiAAEPgNAAcgAAThAQSwARMIqQEDBwAjABAJAATgARMYEAADBwAjACAJAAQgABMoEAADBwAjADAJAARgABM4EAADBwAjAEAJAARAABNIEAAEAgAEEAAEFwATUAkABDAABsACBgIAAIQCAycABQIADxAAZQR4AAQIAAQCAAQQAA8IAB0EAgAPOAAVAAIAAHwBQG1haW4MAAQ4AAACAAAoAfgBamRiY190ZXN0X3NjaGVtYSAAAGQDBiAAymNvbXB1dGVfYXJlYUgAAGAAAhYAChgAAEQDME9VVBcABGcCAQIAACwDIU5PCwAJGAAAFANqRE9VQkxFSAAAGABiZG91YmxlNAAIAgAEsAAAVAAPAgAhD0AAGFAAAAAAADQL8GQAAAAAcAK/FwQiTRh0cI4IAACA/////wAAAACGhpIIAAAAAIaGkgg=",
+ "headers" : {
+ "Accept-Ranges" : "bytes",
+ "Server" : "AmazonS3",
+ "ETag" : "\"e1a6ffae5c9ba5fe05912be97c14514c\"",
+ "Last-Modified" : "Fri, 27 Feb 2026 15:49:29 GMT",
+ "x-amz-request-id" : "EMFYTXP3GDFT3R69",
+ "x-amz-server-side-encryption" : "AES256",
+ "x-amz-id-2" : "FUHA6THhdYua+jhnAWwiiP74AtG5tWPeHtTwaihlG7bujhmsDAbG7djjle1VUK+SQVxLkya4+4v9VvTEoNulDUOvRlRwFQKZIPtXBMp12Wk=",
+ "Date" : "Fri, 27 Feb 2026 15:49:31 GMT",
+ "Content-Type" : "binary/octet-stream"
+ }
+ },
+ "uuid" : "34a44455-4998-432f-a241-3c63efcd1770",
+ "insertionIndex" : 1
+}
\ No newline at end of file
diff --git a/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_sessions-b9f97ecd-5477-47de-ad83-6eaa1a00f57c.json b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_sessions-b9f97ecd-5477-47de-ad83-6eaa1a00f57c.json
new file mode 100644
index 0000000000..da05110f36
--- /dev/null
+++ b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_sessions-b9f97ecd-5477-47de-ad83-6eaa1a00f57c.json
@@ -0,0 +1,38 @@
+{
+ "id" : "b9f97ecd-5477-47de-ad83-6eaa1a00f57c",
+ "name" : "api_2.0_sql_sessions",
+ "request" : {
+ "url" : "/api/2.0/sql/sessions/",
+ "method" : "POST",
+ "bodyPatterns" : [ {
+ "equalToJson" : "{\"warehouse_id\":\"dd43ee29fedd958d\",\"schema\":\"default\",\"catalog\":\"SPARK\"}",
+ "ignoreArrayOrder" : true,
+ "ignoreExtraElements" : true
+ } ]
+ },
+ "response" : {
+ "status" : 200,
+ "body" : "{\"session_id\":\"01f113f3-dbfa-1457-b8fa-f01e05a73cd0\"}",
+ "headers" : {
+ "x-request-id" : "3b5476e7-1b0b-428b-a3cd-39cf990b0f31",
+ "date" : "Fri, 27 Feb 2026 15:49:11 GMT",
+ "server" : "databricks",
+ "x-databricks-popp-response-code-details" : "via_upstream",
+ "x-databricks-shard-debug" : "oregon-staging",
+ "vary" : "Accept-Encoding",
+ "x-databricks-popp-fast-path-routing-reason" : "not_eligible",
+ "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false",
+ "x-databricks-upstream-cluster" : "oregon-staging-h2",
+ "x-databricks-org-id" : "6051921418418893",
+ "strict-transport-security" : "max-age=31536000; includeSubDomains; preload",
+ "x-content-type-options" : "nosniff",
+ "x-databricks-popp-routing-reason" : "deployment-name",
+ "content-type" : "application/json",
+ "server-timing" : "request_id;dur=0;desc=\"3b5476e7-1b0b-428b-a3cd-39cf990b0f31\", client_protocol;dur=0;desc=\"HTTP/1.1\"",
+ "alt-svc" : "h3=\":5443\"; ma=86400, h3-29=\":5443\"; ma=86400",
+ "x-databricks-apiproxy-response-code-details" : "via_upstream"
+ }
+ },
+ "uuid" : "b9f97ecd-5477-47de-ad83-6eaa1a00f57c",
+ "insertionIndex" : 10
+}
\ No newline at end of file
diff --git a/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_sessions_01f113f3-dbfa-1457-b8fa-f01e05a73cd0-0e1eaea3-fa89-4465-8a41-1e25e840dfb7.json b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_sessions_01f113f3-dbfa-1457-b8fa-f01e05a73cd0-0e1eaea3-fa89-4465-8a41-1e25e840dfb7.json
new file mode 100644
index 0000000000..f314206379
--- /dev/null
+++ b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_sessions_01f113f3-dbfa-1457-b8fa-f01e05a73cd0-0e1eaea3-fa89-4465-8a41-1e25e840dfb7.json
@@ -0,0 +1,32 @@
+{
+ "id" : "0e1eaea3-fa89-4465-8a41-1e25e840dfb7",
+ "name" : "api_2.0_sql_sessions_01f113f3-dbfa-1457-b8fa-f01e05a73cd0",
+ "request" : {
+ "url" : "/api/2.0/sql/sessions/01f113f3-dbfa-1457-b8fa-f01e05a73cd0?warehouse_id=dd43ee29fedd958d",
+ "method" : "DELETE"
+ },
+ "response" : {
+ "status" : 200,
+ "body" : "{}",
+ "headers" : {
+ "x-request-id" : "20c98752-6ca1-4fed-bada-6d5e16d645f9",
+ "date" : "Fri, 27 Feb 2026 15:49:35 GMT",
+ "server" : "databricks",
+ "x-databricks-popp-response-code-details" : "via_upstream",
+ "x-databricks-shard-debug" : "oregon-staging",
+ "x-databricks-popp-fast-path-routing-reason" : "not_eligible",
+ "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false",
+ "x-databricks-upstream-cluster" : "oregon-staging-h2",
+ "x-databricks-org-id" : "6051921418418893",
+ "strict-transport-security" : "max-age=31536000; includeSubDomains; preload",
+ "x-content-type-options" : "nosniff",
+ "x-databricks-popp-routing-reason" : "deployment-name",
+ "content-type" : "application/json",
+ "server-timing" : "request_id;dur=0;desc=\"20c98752-6ca1-4fed-bada-6d5e16d645f9\", client_protocol;dur=0;desc=\"HTTP/1.1\"",
+ "alt-svc" : "h3=\":5443\"; ma=86400, h3-29=\":5443\"; ma=86400",
+ "x-databricks-apiproxy-response-code-details" : "via_upstream"
+ }
+ },
+ "uuid" : "0e1eaea3-fa89-4465-8a41-1e25e840dfb7",
+ "insertionIndex" : 1
+}
\ No newline at end of file
diff --git a/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-4ecc7065-13f6-4a73-975f-f41897b1bc4f.json b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-4ecc7065-13f6-4a73-975f-f41897b1bc4f.json
new file mode 100644
index 0000000000..6b0b6aa505
--- /dev/null
+++ b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-4ecc7065-13f6-4a73-975f-f41897b1bc4f.json
@@ -0,0 +1,38 @@
+{
+ "id" : "4ecc7065-13f6-4a73-975f-f41897b1bc4f",
+ "name" : "api_2.0_sql_statements",
+ "request" : {
+ "url" : "/api/2.0/sql/statements/",
+ "method" : "POST",
+ "bodyPatterns" : [ {
+ "equalToJson" : "{\"statement\":\"CREATE OR REPLACE PROCEDURE main.jdbc_test_schema.jdbc_test_compute_area(x DOUBLE, y DOUBLE, OUT area DOUBLE)\\nLANGUAGE SQL\\nSQL SECURITY INVOKER\\nCOMMENT 'Test procedure for JDBC integration tests'\\nAS BEGIN\\n SET area = x * y;\\nEND\",\"warehouse_id\":\"dd43ee29fedd958d\",\"session_id\":\"01f113f3-dbfa-1457-b8fa-f01e05a73cd0\",\"disposition\":\"EXTERNAL_LINKS\",\"format\":\"ARROW_STREAM\",\"on_wait_timeout\":\"CONTINUE\",\"parameters\":[],\"result_compression\":\"LZ4_FRAME\"}",
+ "ignoreArrayOrder" : true,
+ "ignoreExtraElements" : true
+ } ]
+ },
+ "response" : {
+ "status" : 200,
+ "body" : "{\"statement_id\":\"01f113f3-dcc3-1501-987b-3a6cc9773eee\",\"status\":{\"state\":\"SUCCEEDED\"},\"manifest\":{\"format\":\"ARROW_STREAM\",\"schema\":{\"column_count\":0},\"total_chunk_count\":0,\"total_row_count\":0,\"total_byte_count\":0,\"truncated\":false,\"result_compression\":\"LZ4_FRAME\"},\"result\":{}}",
+ "headers" : {
+ "x-request-id" : "7a28c72b-a138-44e7-8c2a-000d5049bbbd",
+ "date" : "Fri, 27 Feb 2026 15:49:13 GMT",
+ "server" : "databricks",
+ "x-databricks-popp-response-code-details" : "via_upstream",
+ "x-databricks-shard-debug" : "oregon-staging",
+ "vary" : "Accept-Encoding",
+ "x-databricks-popp-fast-path-routing-reason" : "not_eligible",
+ "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false",
+ "x-databricks-upstream-cluster" : "oregon-staging-h2",
+ "x-databricks-org-id" : "6051921418418893",
+ "strict-transport-security" : "max-age=31536000; includeSubDomains; preload",
+ "x-content-type-options" : "nosniff",
+ "x-databricks-popp-routing-reason" : "deployment-name",
+ "content-type" : "application/json",
+ "server-timing" : "request_id;dur=0;desc=\"7a28c72b-a138-44e7-8c2a-000d5049bbbd\", client_protocol;dur=0;desc=\"HTTP/1.1\"",
+ "alt-svc" : "h3=\":5443\"; ma=86400, h3-29=\":5443\"; ma=86400",
+ "x-databricks-apiproxy-response-code-details" : "via_upstream"
+ }
+ },
+ "uuid" : "4ecc7065-13f6-4a73-975f-f41897b1bc4f",
+ "insertionIndex" : 9
+}
\ No newline at end of file
diff --git a/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-508838af-30ec-411f-86e2-830b55318032.json b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-508838af-30ec-411f-86e2-830b55318032.json
new file mode 100644
index 0000000000..70ebfc3a51
--- /dev/null
+++ b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-508838af-30ec-411f-86e2-830b55318032.json
@@ -0,0 +1,38 @@
+{
+ "id" : "508838af-30ec-411f-86e2-830b55318032",
+ "name" : "api_2.0_sql_statements",
+ "request" : {
+ "url" : "/api/2.0/sql/statements/",
+ "method" : "POST",
+ "bodyPatterns" : [ {
+ "equalToJson" : "{\"statement\":\"SELECT routine_catalog, routine_schema, routine_name, comment, specific_name FROM `main`.information_schema.routines WHERE routine_type = 'PROCEDURE' AND routine_schema LIKE 'jdbc_test_schema' AND routine_name LIKE 'jdbc_test_compute_area' ORDER BY routine_catalog, routine_schema, routine_name\",\"warehouse_id\":\"dd43ee29fedd958d\",\"session_id\":\"01f113f3-dbfa-1457-b8fa-f01e05a73cd0\",\"disposition\":\"EXTERNAL_LINKS\",\"format\":\"ARROW_STREAM\",\"on_wait_timeout\":\"CONTINUE\",\"parameters\":[],\"result_compression\":\"LZ4_FRAME\"}",
+ "ignoreArrayOrder" : true,
+ "ignoreExtraElements" : true
+ } ]
+ },
+ "response" : {
+ "status" : 200,
+ "body" : "{\"statement_id\":\"01f113f3-de1f-148b-bfcf-c5ae0ae13505\",\"status\":{\"state\":\"SUCCEEDED\"},\"manifest\":{\"format\":\"ARROW_STREAM\",\"schema\":{\"column_count\":5,\"columns\":[{\"name\":\"routine_catalog\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":0},{\"name\":\"routine_schema\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":1},{\"name\":\"routine_name\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":2},{\"name\":\"comment\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":3},{\"name\":\"specific_name\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":4}]},\"total_chunk_count\":1,\"chunks\":[{\"chunk_index\":0,\"row_offset\":0,\"row_count\":1,\"byte_count\":1024}],\"total_row_count\":1,\"total_byte_count\":1024,\"truncated\":false,\"result_compression\":\"LZ4_FRAME\"},\"result\":{\"external_links\":[{\"chunk_index\":0,\"row_offset\":0,\"row_count\":1,\"byte_count\":665,\"external_link\":\"https://e2-dogfood-core.s3.us-west-2.amazonaws.com/oregon-staging/6051921418418893.jobs/sql/extended/results_2026-02-28T16%3A49%3A16Z_689e65a4-0eb8-4021-a1b9-ba57027605dc?[REDACTED]X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20260227T154916Z&X-Amz-SignedHeaders=host&X-Amz-Expires=899&[REDACTED]X-Amz-Signature=591a240a91d37cd1d8851014c0bb3685a56dd09b3acefdbe5a0387c53f65bce7\",\"expiration\":\"2026-02-27T16:04:16.633Z\"}]}}",
+ "headers" : {
+ "x-request-id" : "73961c44-3cbc-4a11-861e-d0f7ae5e5bd4",
+ "date" : "Fri, 27 Feb 2026 15:49:16 GMT",
+ "server" : "databricks",
+ "x-databricks-popp-response-code-details" : "via_upstream",
+ "x-databricks-shard-debug" : "oregon-staging",
+ "vary" : "Accept-Encoding",
+ "x-databricks-popp-fast-path-routing-reason" : "not_eligible",
+ "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false",
+ "x-databricks-upstream-cluster" : "oregon-staging-h2",
+ "x-databricks-org-id" : "6051921418418893",
+ "strict-transport-security" : "max-age=31536000; includeSubDomains; preload",
+ "x-content-type-options" : "nosniff",
+ "x-databricks-popp-routing-reason" : "deployment-name",
+ "content-type" : "application/json",
+ "server-timing" : "request_id;dur=0;desc=\"73961c44-3cbc-4a11-861e-d0f7ae5e5bd4\", client_protocol;dur=0;desc=\"HTTP/1.1\"",
+ "alt-svc" : "h3=\":5443\"; ma=86400, h3-29=\":5443\"; ma=86400",
+ "x-databricks-apiproxy-response-code-details" : "via_upstream"
+ }
+ },
+ "uuid" : "508838af-30ec-411f-86e2-830b55318032",
+ "insertionIndex" : 8
+}
\ No newline at end of file
diff --git a/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-870d642a-3155-4d9d-8eca-93f608f9daa5.json b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-870d642a-3155-4d9d-8eca-93f608f9daa5.json
new file mode 100644
index 0000000000..e2f2c350eb
--- /dev/null
+++ b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-870d642a-3155-4d9d-8eca-93f608f9daa5.json
@@ -0,0 +1,38 @@
+{
+ "id" : "870d642a-3155-4d9d-8eca-93f608f9daa5",
+ "name" : "api_2.0_sql_statements",
+ "request" : {
+ "url" : "/api/2.0/sql/statements/",
+ "method" : "POST",
+ "bodyPatterns" : [ {
+ "equalToJson" : "{\"statement\":\"SELECT routine_catalog, routine_schema, routine_name, comment, specific_name FROM `main`.information_schema.routines WHERE routine_type = 'PROCEDURE' AND routine_schema LIKE 'jdbc_test_schema' AND routine_name LIKE 'jdbc_test_%' ORDER BY routine_catalog, routine_schema, routine_name\",\"warehouse_id\":\"dd43ee29fedd958d\",\"session_id\":\"01f113f3-dbfa-1457-b8fa-f01e05a73cd0\",\"disposition\":\"EXTERNAL_LINKS\",\"format\":\"ARROW_STREAM\",\"on_wait_timeout\":\"CONTINUE\",\"parameters\":[],\"result_compression\":\"LZ4_FRAME\"}",
+ "ignoreArrayOrder" : true,
+ "ignoreExtraElements" : true
+ } ]
+ },
+ "response" : {
+ "status" : 200,
+ "body" : "{\"statement_id\":\"01f113f3-e0c8-18ab-87b0-6d84889a7540\",\"status\":{\"state\":\"SUCCEEDED\"},\"manifest\":{\"format\":\"ARROW_STREAM\",\"schema\":{\"column_count\":5,\"columns\":[{\"name\":\"routine_catalog\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":0},{\"name\":\"routine_schema\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":1},{\"name\":\"routine_name\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":2},{\"name\":\"comment\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":3},{\"name\":\"specific_name\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":4}]},\"total_chunk_count\":1,\"chunks\":[{\"chunk_index\":0,\"row_offset\":0,\"row_count\":1,\"byte_count\":1024}],\"total_row_count\":1,\"total_byte_count\":1024,\"truncated\":false,\"result_compression\":\"LZ4_FRAME\"},\"result\":{\"external_links\":[{\"chunk_index\":0,\"row_offset\":0,\"row_count\":1,\"byte_count\":665,\"external_link\":\"https://e2-dogfood-core.s3.us-west-2.amazonaws.com/oregon-staging/6051921418418893.jobs/sql/extended/results_2026-02-28T16%3A49%3A20Z_07f2c854-dc4a-4f97-ba88-49725454824a?[REDACTED]X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20260227T154920Z&X-Amz-SignedHeaders=host&X-Amz-Expires=899&[REDACTED]X-Amz-Signature=3540b7cdf950b82e6811ce1ebe78f6440a0db384d1970f21747d4c0ddbfdd049\",\"expiration\":\"2026-02-27T16:04:20.478Z\"}]}}",
+ "headers" : {
+ "x-request-id" : "858a2818-a9e3-4611-a393-ad1d0891fed3",
+ "date" : "Fri, 27 Feb 2026 15:49:20 GMT",
+ "server" : "databricks",
+ "x-databricks-popp-response-code-details" : "via_upstream",
+ "x-databricks-shard-debug" : "oregon-staging",
+ "vary" : "Accept-Encoding",
+ "x-databricks-popp-fast-path-routing-reason" : "not_eligible",
+ "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false",
+ "x-databricks-upstream-cluster" : "oregon-staging-h2",
+ "x-databricks-org-id" : "6051921418418893",
+ "strict-transport-security" : "max-age=31536000; includeSubDomains; preload",
+ "x-content-type-options" : "nosniff",
+ "x-databricks-popp-routing-reason" : "deployment-name",
+ "content-type" : "application/json",
+ "server-timing" : "request_id;dur=0;desc=\"858a2818-a9e3-4611-a393-ad1d0891fed3\", client_protocol;dur=0;desc=\"HTTP/1.1\"",
+ "alt-svc" : "h3=\":5443\"; ma=86400, h3-29=\":5443\"; ma=86400",
+ "x-databricks-apiproxy-response-code-details" : "via_upstream"
+ }
+ },
+ "uuid" : "870d642a-3155-4d9d-8eca-93f608f9daa5",
+ "insertionIndex" : 7
+}
\ No newline at end of file
diff --git a/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-b21e9430-296e-4023-8328-f28887f37db8.json b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-b21e9430-296e-4023-8328-f28887f37db8.json
new file mode 100644
index 0000000000..4561bf13db
--- /dev/null
+++ b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-b21e9430-296e-4023-8328-f28887f37db8.json
@@ -0,0 +1,38 @@
+{
+ "id" : "b21e9430-296e-4023-8328-f28887f37db8",
+ "name" : "api_2.0_sql_statements",
+ "request" : {
+ "url" : "/api/2.0/sql/statements/",
+ "method" : "POST",
+ "bodyPatterns" : [ {
+ "equalToJson" : "{\"statement\":\"DROP PROCEDURE IF EXISTS main.jdbc_test_schema.jdbc_test_compute_area\",\"warehouse_id\":\"dd43ee29fedd958d\",\"session_id\":\"01f113f3-dbfa-1457-b8fa-f01e05a73cd0\",\"disposition\":\"EXTERNAL_LINKS\",\"format\":\"ARROW_STREAM\",\"on_wait_timeout\":\"CONTINUE\",\"parameters\":[],\"result_compression\":\"LZ4_FRAME\"}",
+ "ignoreArrayOrder" : true,
+ "ignoreExtraElements" : true
+ } ]
+ },
+ "response" : {
+ "status" : 200,
+ "body" : "{\"statement_id\":\"01f113f3-e7d6-14de-9e32-e020b45e7c9e\",\"status\":{\"state\":\"SUCCEEDED\"},\"manifest\":{\"format\":\"ARROW_STREAM\",\"schema\":{\"column_count\":0},\"total_chunk_count\":0,\"total_row_count\":0,\"total_byte_count\":0,\"truncated\":false,\"result_compression\":\"LZ4_FRAME\"},\"result\":{}}",
+ "headers" : {
+ "x-request-id" : "41702296-c1ca-409a-aeae-4a9469b95059",
+ "date" : "Fri, 27 Feb 2026 15:49:31 GMT",
+ "server" : "databricks",
+ "x-databricks-popp-response-code-details" : "via_upstream",
+ "x-databricks-shard-debug" : "oregon-staging",
+ "vary" : "Accept-Encoding",
+ "x-databricks-popp-fast-path-routing-reason" : "not_eligible",
+ "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false",
+ "x-databricks-upstream-cluster" : "oregon-staging-h2",
+ "x-databricks-org-id" : "6051921418418893",
+ "strict-transport-security" : "max-age=31536000; includeSubDomains; preload",
+ "x-content-type-options" : "nosniff",
+ "x-databricks-popp-routing-reason" : "deployment-name",
+ "content-type" : "application/json",
+ "server-timing" : "request_id;dur=0;desc=\"41702296-c1ca-409a-aeae-4a9469b95059\", client_protocol;dur=0;desc=\"HTTP/1.1\"",
+ "alt-svc" : "h3=\":5443\"; ma=86400, h3-29=\":5443\"; ma=86400",
+ "x-databricks-apiproxy-response-code-details" : "via_upstream"
+ }
+ },
+ "uuid" : "b21e9430-296e-4023-8328-f28887f37db8",
+ "insertionIndex" : 4
+}
\ No newline at end of file
diff --git a/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-eca6b1d3-04fe-4462-a75f-e1b2ff30bb16.json b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-eca6b1d3-04fe-4462-a75f-e1b2ff30bb16.json
new file mode 100644
index 0000000000..2074b0a337
--- /dev/null
+++ b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-eca6b1d3-04fe-4462-a75f-e1b2ff30bb16.json
@@ -0,0 +1,38 @@
+{
+ "id" : "eca6b1d3-04fe-4462-a75f-e1b2ff30bb16",
+ "name" : "api_2.0_sql_statements",
+ "request" : {
+ "url" : "/api/2.0/sql/statements/",
+ "method" : "POST",
+ "bodyPatterns" : [ {
+ "equalToJson" : "{\"statement\":\"SELECT p.specific_catalog, p.specific_schema, p.specific_name, p.parameter_name, p.parameter_mode, p.is_result, p.data_type, p.full_data_type, p.numeric_precision, p.numeric_precision_radix, p.numeric_scale, p.character_maximum_length, p.character_octet_length, p.ordinal_position, p.parameter_default, p.comment FROM `main`.information_schema.parameters p JOIN `main`.information_schema.routines r ON p.specific_catalog = r.specific_catalog AND p.specific_schema = r.specific_schema AND p.specific_name = r.specific_name WHERE r.routine_type = 'PROCEDURE' AND p.specific_schema LIKE 'jdbc_test_schema' AND p.specific_name LIKE 'jdbc_test_compute_area' AND p.parameter_name LIKE '%' ORDER BY p.specific_catalog, p.specific_schema, p.specific_name, p.ordinal_position\",\"warehouse_id\":\"dd43ee29fedd958d\",\"session_id\":\"01f113f3-dbfa-1457-b8fa-f01e05a73cd0\",\"disposition\":\"EXTERNAL_LINKS\",\"format\":\"ARROW_STREAM\",\"on_wait_timeout\":\"CONTINUE\",\"parameters\":[],\"result_compression\":\"LZ4_FRAME\"}",
+ "ignoreArrayOrder" : true,
+ "ignoreExtraElements" : true
+ } ]
+ },
+ "response" : {
+ "status" : 200,
+ "body" : "{\"statement_id\":\"01f113f3-e2f7-166d-b972-fb1f4edf28d0\",\"status\":{\"state\":\"SUCCEEDED\"},\"manifest\":{\"format\":\"ARROW_STREAM\",\"schema\":{\"column_count\":16,\"columns\":[{\"name\":\"specific_catalog\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":0},{\"name\":\"specific_schema\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":1},{\"name\":\"specific_name\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":2},{\"name\":\"parameter_name\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":3},{\"name\":\"parameter_mode\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":4},{\"name\":\"is_result\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":5},{\"name\":\"data_type\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":6},{\"name\":\"full_data_type\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":7},{\"name\":\"numeric_precision\",\"type_text\":\"INT\",\"type_name\":\"INT\",\"position\":8},{\"name\":\"numeric_precision_radix\",\"type_text\":\"INT\",\"type_name\":\"INT\",\"position\":9},{\"name\":\"numeric_scale\",\"type_text\":\"INT\",\"type_name\":\"INT\",\"position\":10},{\"name\":\"character_maximum_length\",\"type_text\":\"BIGINT\",\"type_name\":\"LONG\",\"position\":11},{\"name\":\"character_octet_length\",\"type_text\":\"BIGINT\",\"type_name\":\"LONG\",\"position\":12},{\"name\":\"ordinal_position\",\"type_text\":\"INT\",\"type_name\":\"INT\",\"position\":13},{\"name\":\"parameter_default\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":14},{\"name\":\"comment\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":15}]},\"total_chunk_count\":1,\"chunks\":[{\"chunk_index\":0,\"row_offset\":0,\"row_count\":3,\"byte_count\":2800}],\"total_row_count\":3,\"total_byte_count\":2800,\"truncated\":false,\"result_compression\":\"LZ4_FRAME\"},\"result\":{\"external_links\":[{\"chunk_index\":0,\"row_offset\":0,\"row_count\":3,\"byte_count\":1387,\"external_link\":\"https://e2-dogfood-core.s3.us-west-2.amazonaws.com/oregon-staging/6051921418418893.jobs/sql/extended/results_2026-02-28T16%3A49%3A25Z_4eeb4f54-014b-4555-a062-1fe9c646c5ae?[REDACTED]X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20260227T154925Z&X-Amz-SignedHeaders=host&X-Amz-Expires=899&[REDACTED]X-Amz-Signature=b3fe17c3e20d0c625c1e647f6279aa837fc469168af242261e9d8b7fdddc1db4\",\"expiration\":\"2026-02-27T16:04:25.164Z\"}]}}",
+ "headers" : {
+ "x-request-id" : "8985fa71-7f9d-4f52-8f46-033939bf59d6",
+ "date" : "Fri, 27 Feb 2026 15:49:25 GMT",
+ "server" : "databricks",
+ "x-databricks-popp-response-code-details" : "via_upstream",
+ "x-databricks-shard-debug" : "oregon-staging",
+ "vary" : "Accept-Encoding",
+ "x-databricks-popp-fast-path-routing-reason" : "not_eligible",
+ "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false",
+ "x-databricks-upstream-cluster" : "oregon-staging-h2",
+ "x-databricks-org-id" : "6051921418418893",
+ "strict-transport-security" : "max-age=31536000; includeSubDomains; preload",
+ "x-content-type-options" : "nosniff",
+ "x-databricks-popp-routing-reason" : "deployment-name",
+ "content-type" : "application/json",
+ "server-timing" : "request_id;dur=0;desc=\"8985fa71-7f9d-4f52-8f46-033939bf59d6\", client_protocol;dur=0;desc=\"HTTP/1.1\"",
+ "alt-svc" : "h3=\":5443\"; ma=86400, h3-29=\":5443\"; ma=86400",
+ "x-databricks-apiproxy-response-code-details" : "via_upstream"
+ }
+ },
+ "uuid" : "eca6b1d3-04fe-4462-a75f-e1b2ff30bb16",
+ "insertionIndex" : 6
+}
\ No newline at end of file
diff --git a/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-f16fdfd9-2ade-477e-a5e4-247ed86f8041.json b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-f16fdfd9-2ade-477e-a5e4-247ed86f8041.json
new file mode 100644
index 0000000000..01a81d4ecb
--- /dev/null
+++ b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements-f16fdfd9-2ade-477e-a5e4-247ed86f8041.json
@@ -0,0 +1,38 @@
+{
+ "id" : "f16fdfd9-2ade-477e-a5e4-247ed86f8041",
+ "name" : "api_2.0_sql_statements",
+ "request" : {
+ "url" : "/api/2.0/sql/statements/",
+ "method" : "POST",
+ "bodyPatterns" : [ {
+ "equalToJson" : "{\"statement\":\"SELECT p.specific_catalog, p.specific_schema, p.specific_name, p.parameter_name, p.parameter_mode, p.is_result, p.data_type, p.full_data_type, p.numeric_precision, p.numeric_precision_radix, p.numeric_scale, p.character_maximum_length, p.character_octet_length, p.ordinal_position, p.parameter_default, p.comment FROM `main`.information_schema.parameters p JOIN `main`.information_schema.routines r ON p.specific_catalog = r.specific_catalog AND p.specific_schema = r.specific_schema AND p.specific_name = r.specific_name WHERE r.routine_type = 'PROCEDURE' AND p.specific_schema LIKE 'jdbc_test_schema' AND p.specific_name LIKE 'jdbc_test_compute_area' AND p.parameter_name LIKE 'area' ORDER BY p.specific_catalog, p.specific_schema, p.specific_name, p.ordinal_position\",\"warehouse_id\":\"dd43ee29fedd958d\",\"session_id\":\"01f113f3-dbfa-1457-b8fa-f01e05a73cd0\",\"disposition\":\"EXTERNAL_LINKS\",\"format\":\"ARROW_STREAM\",\"on_wait_timeout\":\"CONTINUE\",\"parameters\":[],\"result_compression\":\"LZ4_FRAME\"}",
+ "ignoreArrayOrder" : true,
+ "ignoreExtraElements" : true
+ } ]
+ },
+ "response" : {
+ "status" : 200,
+ "body" : "{\"statement_id\":\"01f113f3-e5a9-1a2d-bafa-5a491450cee1\",\"status\":{\"state\":\"SUCCEEDED\"},\"manifest\":{\"format\":\"ARROW_STREAM\",\"schema\":{\"column_count\":16,\"columns\":[{\"name\":\"specific_catalog\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":0},{\"name\":\"specific_schema\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":1},{\"name\":\"specific_name\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":2},{\"name\":\"parameter_name\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":3},{\"name\":\"parameter_mode\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":4},{\"name\":\"is_result\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":5},{\"name\":\"data_type\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":6},{\"name\":\"full_data_type\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":7},{\"name\":\"numeric_precision\",\"type_text\":\"INT\",\"type_name\":\"INT\",\"position\":8},{\"name\":\"numeric_precision_radix\",\"type_text\":\"INT\",\"type_name\":\"INT\",\"position\":9},{\"name\":\"numeric_scale\",\"type_text\":\"INT\",\"type_name\":\"INT\",\"position\":10},{\"name\":\"character_maximum_length\",\"type_text\":\"BIGINT\",\"type_name\":\"LONG\",\"position\":11},{\"name\":\"character_octet_length\",\"type_text\":\"BIGINT\",\"type_name\":\"LONG\",\"position\":12},{\"name\":\"ordinal_position\",\"type_text\":\"INT\",\"type_name\":\"INT\",\"position\":13},{\"name\":\"parameter_default\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":14},{\"name\":\"comment\",\"type_text\":\"STRING\",\"type_name\":\"STRING\",\"position\":15}]},\"total_chunk_count\":1,\"chunks\":[{\"chunk_index\":0,\"row_offset\":0,\"row_count\":1,\"byte_count\":2536}],\"total_row_count\":1,\"total_byte_count\":2536,\"truncated\":false,\"result_compression\":\"LZ4_FRAME\"},\"result\":{\"external_links\":[{\"chunk_index\":0,\"row_offset\":0,\"row_count\":1,\"byte_count\":1301,\"external_link\":\"https://e2-dogfood-core.s3.us-west-2.amazonaws.com/oregon-staging/6051921418418893.jobs/sql/extended/results_2026-02-28T16%3A49%3A28Z_569c2bc1-2c43-4d5e-bb58-3a12eaa119eb?[REDACTED]X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20260227T154928Z&X-Amz-SignedHeaders=host&X-Amz-Expires=899&[REDACTED]X-Amz-Signature=9c1bb47ff0c7fe781e9a98827ca3a744537d0685705235ca96fa902a4d680cfb\",\"expiration\":\"2026-02-27T16:04:28.870Z\"}]}}",
+ "headers" : {
+ "x-request-id" : "5a22ded3-320c-4b5b-810d-62b870e38ec6",
+ "date" : "Fri, 27 Feb 2026 15:49:28 GMT",
+ "server" : "databricks",
+ "x-databricks-popp-response-code-details" : "via_upstream",
+ "x-databricks-shard-debug" : "oregon-staging",
+ "vary" : "Accept-Encoding",
+ "x-databricks-popp-fast-path-routing-reason" : "not_eligible",
+ "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false",
+ "x-databricks-upstream-cluster" : "oregon-staging-h2",
+ "x-databricks-org-id" : "6051921418418893",
+ "strict-transport-security" : "max-age=31536000; includeSubDomains; preload",
+ "x-content-type-options" : "nosniff",
+ "x-databricks-popp-routing-reason" : "deployment-name",
+ "content-type" : "application/json",
+ "server-timing" : "request_id;dur=0;desc=\"5a22ded3-320c-4b5b-810d-62b870e38ec6\", client_protocol;dur=0;desc=\"HTTP/1.1\"",
+ "alt-svc" : "h3=\":5443\"; ma=86400, h3-29=\":5443\"; ma=86400",
+ "x-databricks-apiproxy-response-code-details" : "via_upstream"
+ }
+ },
+ "uuid" : "f16fdfd9-2ade-477e-a5e4-247ed86f8041",
+ "insertionIndex" : 5
+}
\ No newline at end of file
diff --git a/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements_01f113f3-dcc3-1501-987b-3a6cc9773eee-153bfd8f-90e1-4df4-824d-9b12a426bb50.json b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements_01f113f3-dcc3-1501-987b-3a6cc9773eee-153bfd8f-90e1-4df4-824d-9b12a426bb50.json
new file mode 100644
index 0000000000..90d9bd93c7
--- /dev/null
+++ b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements_01f113f3-dcc3-1501-987b-3a6cc9773eee-153bfd8f-90e1-4df4-824d-9b12a426bb50.json
@@ -0,0 +1,32 @@
+{
+ "id" : "153bfd8f-90e1-4df4-824d-9b12a426bb50",
+ "name" : "api_2.0_sql_statements_01f113f3-dcc3-1501-987b-3a6cc9773eee",
+ "request" : {
+ "url" : "/api/2.0/sql/statements/01f113f3-dcc3-1501-987b-3a6cc9773eee",
+ "method" : "DELETE"
+ },
+ "response" : {
+ "status" : 200,
+ "body" : "{}",
+ "headers" : {
+ "x-request-id" : "501a94b7-8615-4551-8c9a-fce4ea976ae1",
+ "date" : "Fri, 27 Feb 2026 15:49:34 GMT",
+ "server" : "databricks",
+ "x-databricks-popp-response-code-details" : "via_upstream",
+ "x-databricks-shard-debug" : "oregon-staging",
+ "x-databricks-popp-fast-path-routing-reason" : "not_eligible",
+ "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false",
+ "x-databricks-upstream-cluster" : "oregon-staging-h2",
+ "x-databricks-org-id" : "6051921418418893",
+ "strict-transport-security" : "max-age=31536000; includeSubDomains; preload",
+ "x-content-type-options" : "nosniff",
+ "x-databricks-popp-routing-reason" : "deployment-name",
+ "content-type" : "application/json",
+ "server-timing" : "request_id;dur=0;desc=\"501a94b7-8615-4551-8c9a-fce4ea976ae1\", client_protocol;dur=0;desc=\"HTTP/1.1\"",
+ "alt-svc" : "h3=\":5443\"; ma=86400, h3-29=\":5443\"; ma=86400",
+ "x-databricks-apiproxy-response-code-details" : "via_upstream"
+ }
+ },
+ "uuid" : "153bfd8f-90e1-4df4-824d-9b12a426bb50",
+ "insertionIndex" : 2
+}
\ No newline at end of file
diff --git a/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements_01f113f3-e7d6-14de-9e32-e020b45e7c9e-f747f76f-4540-4278-8a5a-fcd039748a17.json b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements_01f113f3-e7d6-14de-9e32-e020b45e7c9e-f747f76f-4540-4278-8a5a-fcd039748a17.json
new file mode 100644
index 0000000000..6637699544
--- /dev/null
+++ b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/api_2.0_sql_statements_01f113f3-e7d6-14de-9e32-e020b45e7c9e-f747f76f-4540-4278-8a5a-fcd039748a17.json
@@ -0,0 +1,32 @@
+{
+ "id" : "f747f76f-4540-4278-8a5a-fcd039748a17",
+ "name" : "api_2.0_sql_statements_01f113f3-e7d6-14de-9e32-e020b45e7c9e",
+ "request" : {
+ "url" : "/api/2.0/sql/statements/01f113f3-e7d6-14de-9e32-e020b45e7c9e",
+ "method" : "DELETE"
+ },
+ "response" : {
+ "status" : 200,
+ "body" : "{}",
+ "headers" : {
+ "x-request-id" : "721c87d3-ecd2-400f-acf3-bd411f6eb86f",
+ "date" : "Fri, 27 Feb 2026 15:49:32 GMT",
+ "server" : "databricks",
+ "x-databricks-popp-response-code-details" : "via_upstream",
+ "x-databricks-shard-debug" : "oregon-staging",
+ "x-databricks-popp-fast-path-routing-reason" : "not_eligible",
+ "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false",
+ "x-databricks-upstream-cluster" : "oregon-staging-h2",
+ "x-databricks-org-id" : "6051921418418893",
+ "strict-transport-security" : "max-age=31536000; includeSubDomains; preload",
+ "x-content-type-options" : "nosniff",
+ "x-databricks-popp-routing-reason" : "deployment-name",
+ "content-type" : "application/json",
+ "server-timing" : "request_id;dur=0;desc=\"721c87d3-ecd2-400f-acf3-bd411f6eb86f\", client_protocol;dur=0;desc=\"HTTP/1.1\"",
+ "alt-svc" : "h3=\":5443\"; ma=86400, h3-29=\":5443\"; ma=86400",
+ "x-databricks-apiproxy-response-code-details" : "via_upstream"
+ }
+ },
+ "uuid" : "f747f76f-4540-4278-8a5a-fcd039748a17",
+ "insertionIndex" : 3
+}
\ No newline at end of file
diff --git a/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oidc_.well-known_oauth-authorization-server-be7e8572-6195-4608-b385-9344c6b044f6.json b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oidc_.well-known_oauth-authorization-server-be7e8572-6195-4608-b385-9344c6b044f6.json
new file mode 100644
index 0000000000..5a5e99cb27
--- /dev/null
+++ b/src/test/resources/sqlexecapi/metadataintegrationtests/testgetproceduresandprocedurecolumns/mappings/oidc_.well-known_oauth-authorization-server-be7e8572-6195-4608-b385-9344c6b044f6.json
@@ -0,0 +1,33 @@
+{
+ "id" : "be7e8572-6195-4608-b385-9344c6b044f6",
+ "name" : "oidc_.well-known_oauth-authorization-server",
+ "request" : {
+ "url" : "/oidc/.well-known/oauth-authorization-server",
+ "method" : "GET"
+ },
+ "response" : {
+ "status" : 200,
+ "body" : "{\"authorization_endpoint\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\\/v1\\/authorize\",\"token_endpoint\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\\/v1\\/token\",\"issuer\":\"https:\\/\\/e2-dogfood.staging.cloud.databricks.com\\/oidc\",\"jwks_uri\":\"https:\\/\\/oregon.staging.cloud.databricks.com\\/oidc\\/jwks.json\",\"scopes_supported\":[\"access-management\",\"alerts\",\"all-apis\",\"apps\",\"authentication\",\"billing\",\"cleanrooms\",\"clusters\",\"command-execution\",\"custom-llms\",\"dashboards\",\"dataclassification\",\"dataquality\",\"email\",\"environments\",\"files\",\"forecasting\",\"genie\",\"global-init-scripts\",\"identity\",\"instance-pools\",\"instance-profiles\",\"jobs\",\"libraries\",\"marketplace\",\"mlflow\",\"model-serving\",\"networking\",\"notifications\",\"offline_access\",\"openid\",\"pipelines\",\"postgres\",\"profile\",\"provisioning\",\"qualitymonitor\",\"query-history\",\"scim\",\"secrets\",\"settings\",\"sharing\",\"sql\",\"tags\",\"unity-catalog\",\"vector-search\",\"workspace\"],\"response_types_supported\":[\"code\",\"id_token\"],\"response_modes_supported\":[\"query\",\"fragment\",\"form_post\"],\"grant_types_supported\":[\"client_credentials\",\"authorization_code\",\"refresh_token\"],\"code_challenge_methods_supported\":[\"S256\"],\"token_endpoint_auth_methods_supported\":[\"client_secret_basic\",\"client_secret_post\",\"none\"],\"subject_types_supported\":[\"public\"],\"id_token_signing_alg_values_supported\":[\"RS256\"],\"claims_supported\":[\"iss\",\"sub\",\"aud\",\"iat\",\"exp\",\"jti\",\"name\",\"family_name\",\"given_name\",\"preferred_username\"],\"request_uri_parameter_supported\":false}",
+ "headers" : {
+ "x-request-id" : "478cb8ea-5845-4df0-b820-c6fd6fc122e6",
+ "date" : "Fri, 27 Feb 2026 15:49:09 GMT",
+ "server" : "databricks",
+ "x-databricks-popp-response-code-details" : "via_upstream",
+ "x-databricks-shard-debug" : "oregon-staging",
+ "vary" : "Accept-Encoding",
+ "x-databricks-popp-fast-path-routing-reason" : "not_eligible",
+ "x-databricks-popp-shadow-routing-reason" : "spog-domain-checker-false",
+ "x-databricks-upstream-cluster" : "oregon-staging-h2",
+ "x-databricks-org-id" : "6051921418418893",
+ "strict-transport-security" : "max-age=31536000; includeSubDomains; preload",
+ "x-content-type-options" : "nosniff",
+ "x-databricks-popp-routing-reason" : "deployment-name",
+ "content-type" : "application/json; charset=UTF-8",
+ "server-timing" : "request_id;dur=0;desc=\"478cb8ea-5845-4df0-b820-c6fd6fc122e6\", client_protocol;dur=0;desc=\"HTTP/1.1\"",
+ "alt-svc" : "h3=\":5443\"; ma=86400, h3-29=\":5443\"; ma=86400",
+ "x-databricks-apiproxy-response-code-details" : "via_upstream"
+ }
+ },
+ "uuid" : "be7e8572-6195-4608-b385-9344c6b044f6",
+ "insertionIndex" : 11
+}
\ No newline at end of file