Package org.apache.spark.sql.jdbc
Class NoopDialect
Object
org.apache.spark.sql.jdbc.NoopDialect
NOOP dialect object, always returning the neutral element.
- 
Constructor SummaryConstructors
- 
Method SummaryModifier and TypeMethodDescriptionstatic String[]alterTable(String tableName, scala.collection.immutable.Seq<TableChange> changes, int dbMajorVersion) static voidbeforeFetch(Connection connection, scala.collection.immutable.Map<String, String> properties) static booleanstatic AnalysisExceptionclassifyException(String message, Throwable e) static ThrowableclassifyException(Throwable e, String condition, scala.collection.immutable.Map<String, String> messageParameters, String description, boolean isRuntime) static scala.Option<String>compileAggregate(AggregateFunc aggFunction) static scala.Option<String>compileExpression(Expression expr) static ObjectcompileValue(Object value) static Datestatic Timestampstatic LocalDateTimestatic Timestampstatic scala.Function1<Object,Connection> createConnectionFactory(org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions options) static StringcreateIndex(String indexName, Identifier tableIdent, NamedReference[] columns, Map<NamedReference, Map<String, String>> columnsProperties, Map<String, String> properties) static voidcreateSchema(Statement statement, String schema, String comment) static voidcreateTable(Statement statement, String tableName, String strSchema, org.apache.spark.sql.execution.datasources.jdbc.JdbcOptionsInWrite options) static StringdropIndex(String indexName, Identifier tableIdent) static StringdropSchema(String schema, boolean cascade) static Stringstatic scala.collection.immutable.Seq<scala.Tuple2<String,UnboundFunction>> static StringgetAddColumnQuery(String tableName, String columnName, String dataType) static scala.Option<DataType>getCatalystType(int sqlType, String typeName, int size, MetadataBuilder md) static longgetDayTimeIntervalAsMicros(String daytimeStr) static StringgetDeleteColumnQuery(String tableName, String columnName) static Stringstatic JdbcSQLQueryBuildergetJdbcSQLQueryBuilder(org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions options) static scala.Option<JdbcType>getJDBCType(DataType dt) static StringgetLimitClause(Integer limit) static StringgetOffsetClause(Integer offset) static StringgetRenameColumnQuery(String tableName, String columnName, String newName, int dbMajorVersion) static StringgetSchemaCommentQuery(String schema, String comment) static StringgetSchemaQuery(String table) static StringgetTableCommentQuery(String table, String comment) static StringgetTableExistsQuery(String table) static StringgetTableSample(org.apache.spark.sql.execution.datasources.v2.TableSampleInfo sample) static StringgetTruncateQuery(String table) static StringgetTruncateQuery(String table, scala.Option<Object> cascade) static scala.Option<Object>static StringgetUpdateColumnNullabilityQuery(String tableName, String columnName, boolean isNullable) static StringgetUpdateColumnTypeQuery(String tableName, String columnName, String newDataType) static intgetYearMonthIntervalAsMonths(String yearmonthStr) static booleanindexExists(Connection conn, String indexName, Identifier tableIdent, org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions options) static StringinsertIntoTable(String table, StructField[] fields) static scala.Option<Object>static booleanstatic booleanisSupportedFunction(String funcName) static booleanisSyntaxErrorBestEffort(SQLException exception) static TableIndex[]listIndexes(Connection conn, Identifier tableIdent, org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions options) static String[][]listSchemas(Connection conn, org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions options) static org.apache.spark.internal.Logging.LogStringContextLogStringContext(scala.StringContext sc) static org.slf4j.Loggerstatic voidorg$apache$spark$internal$Logging$$log__$eq(org.slf4j.Logger x$1) static StringquoteIdentifier(String colName) static StringremoveSchemaCommentQuery(String schema) static StringrenameTable(String oldTable, String newTable) static StringrenameTable(Identifier oldTable, Identifier newTable) static booleanschemasExists(Connection conn, org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions options, String schema) static booleanstatic booleanstatic booleanstatic booleanstatic booleanstatic voidupdateExtraColumnMeta(Connection conn, ResultSetMetaData rsmd, int columnIdx, MetadataBuilder metadata) 
- 
Constructor Details- 
NoopDialectpublic NoopDialect()
 
- 
- 
Method Details- 
canHandle
- 
LogStringContextpublic static org.apache.spark.internal.Logging.LogStringContext LogStringContext(scala.StringContext sc) 
- 
getCatalystTypepublic static scala.Option<DataType> getCatalystType(int sqlType, String typeName, int size, MetadataBuilder md) 
- 
getJDBCType
- 
convertJavaTimestampToTimestamp
- 
convertJavaDateToDate
- 
getYearMonthIntervalAsMonths
- 
getDayTimeIntervalAsMicros
- 
convertJavaTimestampToTimestampNTZ
- 
convertTimestampNTZToJavaTimestamp
- 
createConnectionFactorypublic static scala.Function1<Object,Connection> createConnectionFactory(org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions options) 
- 
quoteIdentifier
- 
createTable
- 
insertIntoTable
- 
getTableExistsQuery
- 
getSchemaQuery
- 
getTruncateQuery
- 
getTruncateQuery
- 
getTruncateQuery$default$2
- 
beforeFetchpublic static void beforeFetch(Connection connection, scala.collection.immutable.Map<String, String> properties) 
- 
compileValue
- 
isSupportedFunction
- 
compileExpression
- 
compileAggregate
- 
functions
- 
createSchema
- 
schemasExistspublic static boolean schemasExists(Connection conn, org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions options, String schema) 
- 
listSchemaspublic static String[][] listSchemas(Connection conn, org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions options) 
- 
isCascadingTruncateTable
- 
isSyntaxErrorBestEffort
- 
renameTable
- 
renameTable
- 
alterTablepublic static String[] alterTable(String tableName, scala.collection.immutable.Seq<TableChange> changes, int dbMajorVersion) 
- 
getAddColumnQuery
- 
getRenameColumnQuery
- 
getDeleteColumnQuery
- 
getUpdateColumnTypeQuery
- 
getUpdateColumnNullabilityQuery
- 
getTableCommentQuery
- 
getSchemaCommentQuery
- 
removeSchemaCommentQuery
- 
dropSchema
- 
dropTable
- 
createIndexpublic static String createIndex(String indexName, Identifier tableIdent, NamedReference[] columns, Map<NamedReference, Map<String, String>> columnsProperties, Map<String, String> properties) 
- 
indexExistspublic static boolean indexExists(Connection conn, String indexName, Identifier tableIdent, org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions options) 
- 
dropIndex
- 
listIndexespublic static TableIndex[] listIndexes(Connection conn, Identifier tableIdent, org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions options) 
- 
isObjectNotFoundException
- 
classifyException
- 
classifyException
- 
getLimitClause
- 
getOffsetClause
- 
getJdbcSQLQueryBuilderpublic static JdbcSQLQueryBuilder getJdbcSQLQueryBuilder(org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions options) 
- 
supportsLimitpublic static boolean supportsLimit()
- 
supportsOffsetpublic static boolean supportsOffset()
- 
supportsTableSamplepublic static boolean supportsTableSample()
- 
getTableSamplepublic static String getTableSample(org.apache.spark.sql.execution.datasources.v2.TableSampleInfo sample) 
- 
supportsHintpublic static boolean supportsHint()
- 
supportsJoinpublic static boolean supportsJoin()
- 
getFullyQualifiedQuotedTableName
- 
updateExtraColumnMetapublic static void updateExtraColumnMeta(Connection conn, ResultSetMetaData rsmd, int columnIdx, MetadataBuilder metadata) 
- 
org$apache$spark$internal$Logging$$log_public static org.slf4j.Logger org$apache$spark$internal$Logging$$log_()
- 
org$apache$spark$internal$Logging$$log__$eqpublic static void org$apache$spark$internal$Logging$$log__$eq(org.slf4j.Logger x$1) 
 
-