Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -892,21 +892,6 @@ public boolean supportsSharding() throws SQLException {
return false;
}

/**
* Builds the result set for stored procedures metadata.
*
* <p>The result set structure is defined based on the JDBC driver specifications to ensure
* consistency. The following columns are included in the result set:
*
* <ul>
* <li>PROCEDURE_CAT: The catalog of the procedure (String)
* <li>PROCEDURE_SCHEM: The schema of the procedure (String)
* <li>PROCEDURE_NAME: The name of the procedure (String)
* <li>REMARKS: A description or remarks about the procedure (String)
* <li>PROCEDURE_TYPE: The type of procedure (e.g., FUNCTION, PROCEDURE) (String)
* <li>SPECIFIC_NAME: The specific name for the procedure (String)
* </ul>
*/
@Override
public ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern)
throws SQLException {
Expand All @@ -916,44 +901,14 @@ public ResultSet getProcedures(String catalog, String schemaPattern, String proc
schemaPattern,
procedureNamePattern);
throwExceptionIfConnectionIsClosed();
return new DatabricksResultSet(
new StatementStatus().setState(StatementState.SUCCEEDED),
new StatementId("getprocedures-metadata"),
Arrays.asList(
"PROCEDURE_CAT",
"PROCEDURE_SCHEM",
"PROCEDURE_NAME",
"NUM_INPUT_PARAMS",
"NUM_OUTPUT_PARAMS",
"NUM_RESULT_SETS",
"REMARKS",
"PROCEDURE_TYPE",
"SPECIFIC_NAME"),
Arrays.asList(
"VARCHAR",
"VARCHAR",
"VARCHAR",
"INTEGER",
"INTEGER",
"INTEGER",
"VARCHAR",
"SMALLINT",
"VARCHAR"),
new int[] {
Types.VARCHAR,
Types.VARCHAR,
Types.VARCHAR,
Types.INTEGER,
Types.INTEGER,
Types.INTEGER,
Types.VARCHAR,
Types.SMALLINT,
Types.VARCHAR
},
new int[] {128, 128, 128, 10, 10, 10, 254, 5, 128},
new int[] {1, 1, 0, 1, 1, 1, 1, 1, 0},
new Object[0][0],
StatementType.METADATA);
try {
return session
.getDatabricksMetadataClient()
.listProcedures(session, catalog, schemaPattern, procedureNamePattern);
} catch (Exception e) {
LOGGER.error(e, "Unable to fetch procedures, returning empty result set");
return metadataResultSetBuilder.getProceduresResult(new ArrayList<>());
}
}

@Override
Expand All @@ -967,12 +922,15 @@ public ResultSet getProcedureColumns(
procedureNamePattern,
columnNamePattern);
throwExceptionIfConnectionIsClosed();

return metadataResultSetBuilder.getResultSetWithGivenRowsAndColumns(
PROCEDURE_COLUMNS_COLUMNS,
new ArrayList<>(),
METADATA_STATEMENT_ID,
CommandName.GET_PROCEDURES_COLUMNS);
try {
return session
.getDatabricksMetadataClient()
.listProcedureColumns(
session, catalog, schemaPattern, procedureNamePattern, columnNamePattern);
} catch (Exception e) {
LOGGER.error(e, "Unable to fetch procedure columns, returning empty result set");
return metadataResultSetBuilder.getProcedureColumnsResult(new ArrayList<>());
}
}

@Override
Expand Down
3 changes: 2 additions & 1 deletion src/main/java/com/databricks/jdbc/common/CommandName.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ public enum CommandName {
GET_TABLE_PRIVILEGES,
GET_VERSION_COLUMNS,
GET_SUPER_TYPES,
GET_PROCEDURES_COLUMNS,
LIST_PROCEDURES,
LIST_PROCEDURE_COLUMNS,
GET_INDEX_INFO,
GET_SUPER_TABLES,
GET_FUNCTION_COLUMNS,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@ public enum MetadataOperationType {
GET_COLUMNS("GetColumns"),
GET_FUNCTIONS("GetFunctions"),
GET_PRIMARY_KEYS("GetPrimaryKeys"),
GET_CROSS_REFERENCE("GetCrossReference");
GET_CROSS_REFERENCE("GetCrossReference"),
GET_PROCEDURES("GetProcedures"),
GET_PROCEDURE_COLUMNS("GetProcedureColumns");

private final String headerValue;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,14 @@ public class MetadataResultConstants {
private static final ResultColumn RADIX = new ResultColumn("RADIX", "radix", Types.SMALLINT);
private static final ResultColumn NULLABLE_SHORT =
new ResultColumn("NULLABLE", "nullable", Types.SMALLINT);
private static final ResultColumn NUM_INPUT_PARAMS =
new ResultColumn("NUM_INPUT_PARAMS", "numInputParams", Types.INTEGER);
private static final ResultColumn NUM_OUTPUT_PARAMS =
new ResultColumn("NUM_OUTPUT_PARAMS", "numOutputParams", Types.INTEGER);
private static final ResultColumn NUM_RESULT_SETS =
new ResultColumn("NUM_RESULT_SETS", "numResultSets", Types.INTEGER);
private static final ResultColumn PROCEDURE_TYPE =
new ResultColumn("PROCEDURE_TYPE", "procedureType", Types.SMALLINT);
private static final ResultColumn NON_UNIQUE =
new ResultColumn("NON_UNIQUE", "nonUnique", Types.BOOLEAN);
private static final ResultColumn INDEX_QUALIFIER =
Expand Down Expand Up @@ -225,6 +233,18 @@ public class MetadataResultConstants {
FUNCTION_TYPE_COLUMN,
SPECIFIC_NAME_COLUMN);

public static final List<ResultColumn> PROCEDURES_COLUMNS =
List.of(
PROCEDURE_CAT,
PROCEDURE_SCHEM,
PROCEDURE_NAME,
NUM_INPUT_PARAMS,
NUM_OUTPUT_PARAMS,
NUM_RESULT_SETS,
REMARKS_COLUMN,
PROCEDURE_TYPE,
SPECIFIC_NAME_COLUMN);

public static List<ResultColumn> COLUMN_COLUMNS =
List.of(
CATALOG_COLUMN,
Expand Down Expand Up @@ -618,8 +638,9 @@ public class MetadataResultConstants {
CommandName.GET_VERSION_COLUMNS,
List.of(SCOPE, COL_NAME_COLUMN, DATA_TYPE_COLUMN, TYPE_NAME_COLUMN, PSEUDO_COLUMN));
put(CommandName.GET_SUPER_TYPES, List.of(TYPE_NAME_COLUMN, SUPERTYPE_NAME));
put(CommandName.LIST_PROCEDURES, List.of(PROCEDURE_NAME, SPECIFIC_NAME_COLUMN));
put(
CommandName.GET_PROCEDURES_COLUMNS,
CommandName.LIST_PROCEDURE_COLUMNS,
List.of(
PROCEDURE_NAME,
COLUMN_NAME_COLUMN,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,42 @@ DatabricksResultSet listImportedKeys(
DatabricksResultSet listExportedKeys(
IDatabricksSession session, String catalog, String schema, String table) throws SQLException;

/**
* Returns the list of stored procedures
*
* @param session underlying session
* @param catalog catalogName; null means use system catalog
* @param schemaNamePattern schema name pattern (can be a LIKE pattern)
* @param procedureNamePattern procedure name pattern (can be a LIKE pattern)
* @return a DatabricksResultSet representing list of procedures
*/
@DatabricksMetricsTimed
DatabricksResultSet listProcedures(
IDatabricksSession session,
String catalog,
String schemaNamePattern,
String procedureNamePattern)
throws SQLException;

/**
* Returns the list of stored procedure columns/parameters
*
* @param session underlying session
* @param catalog catalogName; null means use system catalog
* @param schemaNamePattern schema name pattern (can be a LIKE pattern)
* @param procedureNamePattern procedure name pattern (can be a LIKE pattern)
* @param columnNamePattern column/parameter name pattern (can be a LIKE pattern)
* @return a DatabricksResultSet representing list of procedure columns
*/
@DatabricksMetricsTimed
DatabricksResultSet listProcedureColumns(
IDatabricksSession session,
String catalog,
String schemaNamePattern,
String procedureNamePattern,
String columnNamePattern)
throws SQLException;

/**
* Returns the list of cross references between a parent table and a foreign table
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ public class CommandConstants {
public static final String GET_CATALOGS_STATEMENT_ID = "getcatalogs-metadata";
public static final String GET_TABLE_TYPE_STATEMENT_ID = "gettabletype-metadata";
public static final String GET_FUNCTIONS_STATEMENT_ID = "getfunctions-metadata";
public static final String GET_PROCEDURES_STATEMENT_ID = "getprocedures-metadata";
public static final String GET_PROCEDURE_COLUMNS_STATEMENT_ID = "getprocedurecolumns-metadata";
public static final String SHOW_CATALOGS_SQL = "SHOW CATALOGS";
public static final String SHOW_TABLE_TYPES_SQL = "SHOW TABLE_TYPES";
public static final String IN_CATALOG_SQL = " IN CATALOG `%s`";
Expand All @@ -26,4 +28,75 @@ public class CommandConstants {
"SHOW KEYS" + IN_CATALOG_SQL + IN_ABSOLUTE_SCHEMA_SQL + IN_ABSOLUTE_TABLE_SQL;
public static final String SHOW_FOREIGN_KEYS_SQL =
"SHOW FOREIGN KEYS" + IN_CATALOG_SQL + IN_ABSOLUTE_SCHEMA_SQL + IN_ABSOLUTE_TABLE_SQL;

private static final String INFORMATION_SCHEMA_ROUTINES = "information_schema.routines";
private static final String INFORMATION_SCHEMA_PARAMETERS = "information_schema.parameters";
private static final String PROCEDURE_TYPE_FILTER = "routine_type = 'PROCEDURE'";

private static final String ROUTINES_SELECT_COLUMNS =
"routine_catalog, routine_schema, routine_name, comment, specific_name";

private static final String PARAMETERS_SELECT_COLUMNS =
"p.specific_catalog, p.specific_schema, p.specific_name,"
+ " p.parameter_name, p.parameter_mode, p.is_result,"
+ " p.data_type, p.full_data_type,"
+ " p.numeric_precision, p.numeric_precision_radix, p.numeric_scale,"
+ " p.character_maximum_length, p.character_octet_length,"
+ " p.ordinal_position, p.parameter_default, p.comment";

public static String buildProceduresSQL(
String catalog, String schemaPattern, String procedureNamePattern) {
String catalogPrefix = getCatalogPrefix(catalog);
String routinesTable = catalogPrefix + "." + INFORMATION_SCHEMA_ROUTINES;

StringBuilder sql = new StringBuilder();
sql.append("SELECT ").append(ROUTINES_SELECT_COLUMNS);
sql.append(" FROM ").append(routinesTable);
sql.append(" WHERE ").append(PROCEDURE_TYPE_FILTER);
if (schemaPattern != null) {
sql.append(" AND routine_schema LIKE '").append(escapeSql(schemaPattern)).append("'");
}
if (procedureNamePattern != null) {
sql.append(" AND routine_name LIKE '").append(escapeSql(procedureNamePattern)).append("'");
}
sql.append(" ORDER BY routine_catalog, routine_schema, routine_name");
return sql.toString();
}

public static String buildProcedureColumnsSQL(
String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern) {
String catalogPrefix = getCatalogPrefix(catalog);
String parametersTable = catalogPrefix + "." + INFORMATION_SCHEMA_PARAMETERS + " p";
String routinesTable = catalogPrefix + "." + INFORMATION_SCHEMA_ROUTINES + " r";

StringBuilder sql = new StringBuilder();
sql.append("SELECT ").append(PARAMETERS_SELECT_COLUMNS);
sql.append(" FROM ").append(parametersTable);
sql.append(" JOIN ").append(routinesTable);
sql.append(" ON p.specific_catalog = r.specific_catalog");
sql.append(" AND p.specific_schema = r.specific_schema");
sql.append(" AND p.specific_name = r.specific_name");
sql.append(" WHERE r.").append(PROCEDURE_TYPE_FILTER);
if (schemaPattern != null) {
sql.append(" AND p.specific_schema LIKE '").append(escapeSql(schemaPattern)).append("'");
}
if (procedureNamePattern != null) {
sql.append(" AND p.specific_name LIKE '").append(escapeSql(procedureNamePattern)).append("'");
}
if (columnNamePattern != null) {
sql.append(" AND p.parameter_name LIKE '").append(escapeSql(columnNamePattern)).append("'");
}
sql.append(
" ORDER BY p.specific_catalog, p.specific_schema, p.specific_name, p.ordinal_position");
return sql.toString();
}

/** Escapes single quotes in SQL string literals to prevent SQL injection. */
private static String escapeSql(String value) {
return value.replace("'", "''");
}

private static String getCatalogPrefix(String catalog) {
return (catalog == null) ? "system" : "`" + catalog + "`";
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what if catalogName already contains a backtick character? Should we escape that?

}
}
Loading
Loading