Class QueryCompilationErrors

Object
org.apache.spark.sql.errors.QueryCompilationErrors

public class QueryCompilationErrors extends Object
Object for grouping error messages from exceptions thrown during query compilation. As commands are executed eagerly, this also includes errors thrown during the execution of commands, which users can see immediately.
  • Constructor Details

    • QueryCompilationErrors

      public QueryCompilationErrors()
  • Method Details

    • unexpectedRequiredParameterInFunctionSignature

      public static Throwable unexpectedRequiredParameterInFunctionSignature(String functionName, org.apache.spark.sql.catalyst.plans.logical.FunctionSignature functionSignature)
    • namedArgumentsNotSupported

      public static Throwable namedArgumentsNotSupported(String functionName)
    • positionalAndNamedArgumentDoubleReference

      public static Throwable positionalAndNamedArgumentDoubleReference(String functionName, String parameterName)
    • doubleNamedArgumentReference

      public static Throwable doubleNamedArgumentReference(String functionName, String parameterName)
    • requiredParameterNotFound

      public static Throwable requiredParameterNotFound(String functionName, String parameterName, int index)
    • unrecognizedParameterName

      public static Throwable unrecognizedParameterName(String functionName, String argumentName, scala.collection.immutable.Seq<String> candidates)
    • unexpectedPositionalArgument

      public static Throwable unexpectedPositionalArgument(String functionName, String precedingNamedArgument)
    • groupingIDMismatchError

      public static Throwable groupingIDMismatchError(org.apache.spark.sql.catalyst.expressions.GroupingID groupingID, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupByExprs)
    • groupingColInvalidError

      public static Throwable groupingColInvalidError(org.apache.spark.sql.catalyst.expressions.Expression groupingCol, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupByExprs)
    • groupingSizeTooLargeError

      public static Throwable groupingSizeTooLargeError(int sizeLimit)
    • zeroArgumentIndexError

      public static Throwable zeroArgumentIndexError()
    • binaryFormatError

      public static Throwable binaryFormatError(String funcName, String invalidFormat)
    • nullArgumentError

      public static Throwable nullArgumentError(String funcName, String parameter)
    • unorderablePivotColError

      public static Throwable unorderablePivotColError(org.apache.spark.sql.catalyst.expressions.Expression pivotCol)
    • nonLiteralPivotValError

      public static Throwable nonLiteralPivotValError(org.apache.spark.sql.catalyst.expressions.Expression pivotVal)
    • pivotValDataTypeMismatchError

      public static Throwable pivotValDataTypeMismatchError(org.apache.spark.sql.catalyst.expressions.Expression pivotVal, org.apache.spark.sql.catalyst.expressions.Expression pivotCol)
    • unpivotRequiresAttributes

      public static Throwable unpivotRequiresAttributes(String given, String empty, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression> expressions)
    • unpivotRequiresValueColumns

      public static Throwable unpivotRequiresValueColumns()
    • unpivotValueSizeMismatchError

      public static Throwable unpivotValueSizeMismatchError(int names)
    • unpivotValueDataTypeMismatchError

      public static Throwable unpivotValueDataTypeMismatchError(scala.collection.immutable.Seq<scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression>> values)
    • unsupportedIfNotExistsError

      public static Throwable unsupportedIfNotExistsError(String tableName)
    • nonPartitionColError

      public static Throwable nonPartitionColError(String partitionName)
    • missingStaticPartitionColumn

      public static Throwable missingStaticPartitionColumn(String staticName)
    • staticPartitionInUserSpecifiedColumnsError

      public static Throwable staticPartitionInUserSpecifiedColumnsError(String staticName)
    • nestedGeneratorError

      public static Throwable nestedGeneratorError(org.apache.spark.sql.catalyst.expressions.Expression trimmedNestedGenerator)
    • moreThanOneGeneratorError

      public static Throwable moreThanOneGeneratorError(scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Expression> generators)
    • generatorOutsideSelectError

      public static Throwable generatorOutsideSelectError(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan)
    • legacyStoreAssignmentPolicyError

      public static Throwable legacyStoreAssignmentPolicyError()
    • namedArgumentsNotEnabledError

      public static Throwable namedArgumentsNotEnabledError(String functionName, String argumentName)
    • collationNotEnabledError

      public static Throwable collationNotEnabledError()
    • unresolvedUsingColForJoinError

      public static Throwable unresolvedUsingColForJoinError(String colName, String suggestion, String side)
    • unresolvedAttributeError

      public static Throwable unresolvedAttributeError(String errorClass, String colName, scala.collection.immutable.Seq<String> candidates, org.apache.spark.sql.catalyst.trees.Origin origin)
    • unresolvedColumnError

      public static Throwable unresolvedColumnError(String columnName, scala.collection.immutable.Seq<String> proposal)
    • unresolvedFieldError

      public static Throwable unresolvedFieldError(String fieldName, scala.collection.immutable.Seq<String> columnPath, scala.collection.immutable.Seq<String> proposal)
    • dataTypeMismatchForDeserializerError

      public static Throwable dataTypeMismatchForDeserializerError(DataType dataType, String desiredType)
    • fieldNumberMismatchForDeserializerError

      public static Throwable fieldNumberMismatchForDeserializerError(StructType schema, int maxOrdinal)
    • upCastFailureError

      public static Throwable upCastFailureError(String fromStr, org.apache.spark.sql.catalyst.expressions.Expression from, DataType to, scala.collection.immutable.Seq<String> walkedTypePath)
    • outerScopeFailureForNewInstanceError

      public static Throwable outerScopeFailureForNewInstanceError(String className)
    • referenceColNotFoundForAlterTableChangesError

      public static Throwable referenceColNotFoundForAlterTableChangesError(String fieldName, String[] fields)
    • windowSpecificationNotDefinedError

      public static Throwable windowSpecificationNotDefinedError(String windowName)
    • selectExprNotInGroupByError

      public static Throwable selectExprNotInGroupByError(org.apache.spark.sql.catalyst.expressions.Expression expr, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Alias> groupByAliases)
    • groupingMustWithGroupingSetsOrCubeOrRollupError

      public static Throwable groupingMustWithGroupingSetsOrCubeOrRollupError()
    • pandasUDFAggregateNotSupportedInPivotError

      public static Throwable pandasUDFAggregateNotSupportedInPivotError()
    • aggregateExpressionRequiredForPivotError

      public static Throwable aggregateExpressionRequiredForPivotError(String sql)
    • writeIntoTempViewNotAllowedError

      public static Throwable writeIntoTempViewNotAllowedError(String quoted)
    • readNonStreamingTempViewError

      public static Throwable readNonStreamingTempViewError(String quoted)
    • viewDepthExceedsMaxResolutionDepthError

      public static Throwable viewDepthExceedsMaxResolutionDepthError(org.apache.spark.sql.catalyst.TableIdentifier identifier, int maxNestedDepth, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
    • insertIntoViewNotAllowedError

      public static Throwable insertIntoViewNotAllowedError(org.apache.spark.sql.catalyst.TableIdentifier identifier, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
    • writeIntoViewNotAllowedError

      public static Throwable writeIntoViewNotAllowedError(org.apache.spark.sql.catalyst.TableIdentifier identifier, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
    • writeIntoV1TableNotAllowedError

      public static Throwable writeIntoV1TableNotAllowedError(org.apache.spark.sql.catalyst.TableIdentifier identifier, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
    • expectTableNotViewError

      public static Throwable expectTableNotViewError(scala.collection.immutable.Seq<String> nameParts, String cmd, boolean suggestAlternative, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
    • expectViewNotTableError

      public static Throwable expectViewNotTableError(scala.collection.immutable.Seq<String> nameParts, String cmd, boolean suggestAlternative, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
    • expectPermanentViewNotTempViewError

      public static Throwable expectPermanentViewNotTempViewError(scala.collection.immutable.Seq<String> nameParts, String cmd, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
    • expectPersistentFuncError

      public static Throwable expectPersistentFuncError(String name, String cmd, scala.Option<String> mismatchHint, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
    • permanentViewNotSupportedByStreamingReadingAPIError

      public static Throwable permanentViewNotSupportedByStreamingReadingAPIError(String quoted)
    • starNotAllowedWhenGroupByOrdinalPositionUsedError

      public static Throwable starNotAllowedWhenGroupByOrdinalPositionUsedError()
    • invalidStarUsageError

      public static Throwable invalidStarUsageError(String prettyName, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.analysis.Star> stars)
    • singleTableStarInCountNotAllowedError

      public static Throwable singleTableStarInCountNotAllowedError(String targetString)
    • orderByPositionRangeError

      public static Throwable orderByPositionRangeError(int index, int size, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
    • groupByPositionRefersToAggregateFunctionError

      public static Throwable groupByPositionRefersToAggregateFunctionError(int index, org.apache.spark.sql.catalyst.expressions.Expression expr)
    • groupByPositionRangeError

      public static Throwable groupByPositionRangeError(int index, int size)
    • generatorNotExpectedError

      public static Throwable generatorNotExpectedError(org.apache.spark.sql.catalyst.FunctionIdentifier name, String classCanonicalName)
    • functionWithUnsupportedSyntaxError

      public static Throwable functionWithUnsupportedSyntaxError(String prettyName, String syntax)
    • subqueryExpressionInLambdaOrHigherOrderFunctionNotAllowedError

      public static Throwable subqueryExpressionInLambdaOrHigherOrderFunctionNotAllowedError()
    • nonDeterministicFilterInAggregateError

      public static Throwable nonDeterministicFilterInAggregateError(org.apache.spark.sql.catalyst.expressions.Expression filterExpr)
    • nonBooleanFilterInAggregateError

      public static Throwable nonBooleanFilterInAggregateError(org.apache.spark.sql.catalyst.expressions.Expression filterExpr)
    • aggregateInAggregateFilterError

      public static Throwable aggregateInAggregateFilterError(org.apache.spark.sql.catalyst.expressions.Expression filterExpr, org.apache.spark.sql.catalyst.expressions.Expression aggExpr)
    • windowFunctionInAggregateFilterError

      public static Throwable windowFunctionInAggregateFilterError(org.apache.spark.sql.catalyst.expressions.Expression filterExpr, org.apache.spark.sql.catalyst.expressions.Expression windowExpr)
    • distinctInverseDistributionFunctionUnsupportedError

      public static Throwable distinctInverseDistributionFunctionUnsupportedError(String funcName)
    • inverseDistributionFunctionMissingWithinGroupError

      public static Throwable inverseDistributionFunctionMissingWithinGroupError(String funcName)
    • wrongNumOrderingsForInverseDistributionFunctionError

      public static Throwable wrongNumOrderingsForInverseDistributionFunctionError(String funcName, int validOrderingsNumber, int actualOrderingsNumber)
    • aliasNumberNotMatchColumnNumberError

      public static Throwable aliasNumberNotMatchColumnNumberError(int columnSize, int outputSize, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
    • aliasesNumberNotMatchUDTFOutputError

      public static Throwable aliasesNumberNotMatchUDTFOutputError(int aliasesSize, String aliasesNames)
    • invalidSortOrderInUDTFOrderingColumnFromAnalyzeMethodHasAlias

      public static Throwable invalidSortOrderInUDTFOrderingColumnFromAnalyzeMethodHasAlias(String aliasName)
    • invalidUDTFSelectExpressionFromAnalyzeMethodNeedsAlias

      public static Throwable invalidUDTFSelectExpressionFromAnalyzeMethodNeedsAlias(String expression)
    • windowAggregateFunctionWithFilterNotSupportedError

      public static Throwable windowAggregateFunctionWithFilterNotSupportedError()
    • windowFunctionInsideAggregateFunctionNotAllowedError

      public static Throwable windowFunctionInsideAggregateFunctionNotAllowedError()
    • expressionWithoutWindowExpressionError

      public static Throwable expressionWithoutWindowExpressionError(org.apache.spark.sql.catalyst.expressions.NamedExpression expr)
    • expressionWithMultiWindowExpressionsError

      public static Throwable expressionWithMultiWindowExpressionsError(org.apache.spark.sql.catalyst.expressions.NamedExpression expr, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.WindowSpecDefinition> distinctWindowSpec)
    • windowFunctionNotAllowedError

      public static Throwable windowFunctionNotAllowedError(String clauseName)
    • cannotSpecifyWindowFrameError

      public static Throwable cannotSpecifyWindowFrameError(String prettyName)
    • windowFrameNotMatchRequiredFrameError

      public static Throwable windowFrameNotMatchRequiredFrameError(org.apache.spark.sql.catalyst.expressions.SpecifiedWindowFrame f, org.apache.spark.sql.catalyst.expressions.WindowFrame required)
    • windowFunctionWithWindowFrameNotOrderedError

      public static Throwable windowFunctionWithWindowFrameNotOrderedError(org.apache.spark.sql.catalyst.expressions.WindowFunction wf)
    • multiTimeWindowExpressionsNotSupportedError

      public static Throwable multiTimeWindowExpressionsNotSupportedError(org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
    • sessionWindowGapDurationDataTypeError

      public static Throwable sessionWindowGapDurationDataTypeError(DataType dt)
    • unresolvedVariableError

      public static Throwable unresolvedVariableError(scala.collection.immutable.Seq<String> name, scala.collection.immutable.Seq<String> searchPath)
    • unresolvedVariableError

      public static Throwable unresolvedVariableError(scala.collection.immutable.Seq<String> name, scala.collection.immutable.Seq<String> searchPath, org.apache.spark.sql.catalyst.trees.Origin origin)
    • unresolvedRoutineError

      public static Throwable unresolvedRoutineError(org.apache.spark.sql.catalyst.FunctionIdentifier name, scala.collection.immutable.Seq<String> searchPath)
    • unresolvedRoutineError

      public static Throwable unresolvedRoutineError(scala.collection.immutable.Seq<String> nameParts, scala.collection.immutable.Seq<String> searchPath, org.apache.spark.sql.catalyst.trees.Origin context)
    • wrongNumArgsError

      public static Throwable wrongNumArgsError(String name, scala.collection.immutable.Seq<Object> validParametersCount, int actualNumber, String legacyNum, String legacyConfKey, String legacyConfValue)
    • alterV2TableSetLocationWithPartitionNotSupportedError

      public static Throwable alterV2TableSetLocationWithPartitionNotSupportedError()
    • joinStrategyHintParameterNotSupportedError

      public static Throwable joinStrategyHintParameterNotSupportedError(Object unsupported)
    • invalidHintParameterError

      public static Throwable invalidHintParameterError(String hintName, scala.collection.immutable.Seq<Object> invalidParams)
    • invalidCoalesceHintParameterError

      public static Throwable invalidCoalesceHintParameterError(String hintName)
    • starExpandDataTypeNotSupportedError

      public static Throwable starExpandDataTypeNotSupportedError(scala.collection.immutable.Seq<String> attributes)
    • cannotResolveStarExpandGivenInputColumnsError

      public static Throwable cannotResolveStarExpandGivenInputColumnsError(String targetString, String columns)
    • addColumnWithV1TableCannotSpecifyNotNullError

      public static Throwable addColumnWithV1TableCannotSpecifyNotNullError()
    • unsupportedTableOperationError

      public static Throwable unsupportedTableOperationError(CatalogPlugin catalog, Identifier ident, String operation)
    • unsupportedTableOperationError

      public static Throwable unsupportedTableOperationError(org.apache.spark.sql.catalyst.TableIdentifier ident, String operation)
    • unsupportedBatchReadError

      public static Throwable unsupportedBatchReadError(Table table)
    • unsupportedStreamingScanError

      public static Throwable unsupportedStreamingScanError(Table table)
    • unsupportedAppendInBatchModeError

      public static Throwable unsupportedAppendInBatchModeError(String name)
    • unsupportedDynamicOverwriteInBatchModeError

      public static Throwable unsupportedDynamicOverwriteInBatchModeError(Table table)
    • unsupportedTruncateInBatchModeError

      public static Throwable unsupportedTruncateInBatchModeError(Table table)
    • unsupportedOverwriteByFilterInBatchModeError

      public static Throwable unsupportedOverwriteByFilterInBatchModeError(String name)
    • catalogOperationNotSupported

      public static Throwable catalogOperationNotSupported(CatalogPlugin catalog, String operation)
    • alterColumnWithV1TableCannotSpecifyNotNullError

      public static Throwable alterColumnWithV1TableCannotSpecifyNotNullError()
    • alterColumnCannotFindColumnInV1TableError

      public static Throwable alterColumnCannotFindColumnInV1TableError(String colName, org.apache.spark.sql.connector.catalog.V1Table v1Table)
    • wrongCommandForObjectTypeError

      public static Throwable wrongCommandForObjectTypeError(String operation, String requiredType, String objectName, String foundType, String alternative)
    • showColumnsWithConflictDatabasesError

      public static Throwable showColumnsWithConflictDatabasesError(scala.collection.immutable.Seq<String> db, org.apache.spark.sql.catalyst.TableIdentifier v1TableName)
    • cannotCreateTableWithBothProviderAndSerdeError

      public static Throwable cannotCreateTableWithBothProviderAndSerdeError(scala.Option<String> provider, scala.Option<org.apache.spark.sql.catalyst.plans.logical.SerdeInfo> maybeSerdeInfo)
    • invalidFileFormatForStoredAsError

      public static Throwable invalidFileFormatForStoredAsError(org.apache.spark.sql.catalyst.plans.logical.SerdeInfo serdeInfo)
    • commandNotSupportNestedColumnError

      public static Throwable commandNotSupportNestedColumnError(String command, String quoted)
    • renameTempViewToExistingViewError

      public static Throwable renameTempViewToExistingViewError(String newName)
    • cannotDropNonemptyDatabaseError

      public static Throwable cannotDropNonemptyDatabaseError(String db)
    • cannotDropNonemptyNamespaceError

      public static Throwable cannotDropNonemptyNamespaceError(scala.collection.immutable.Seq<String> namespace)
    • invalidNameForTableOrDatabaseError

      public static Throwable invalidNameForTableOrDatabaseError(String name)
    • cannotCreateDatabaseWithSameNameAsPreservedDatabaseError

      public static Throwable cannotCreateDatabaseWithSameNameAsPreservedDatabaseError(String database)
    • cannotDropDefaultDatabaseError

      public static Throwable cannotDropDefaultDatabaseError(String database)
    • cannotUsePreservedDatabaseAsCurrentDatabaseError

      public static Throwable cannotUsePreservedDatabaseAsCurrentDatabaseError(String database)
    • createExternalTableWithoutLocationError

      public static Throwable createExternalTableWithoutLocationError()
    • dropNonExistentColumnsNotSupportedError

      public static Throwable dropNonExistentColumnsNotSupportedError(scala.collection.immutable.Seq<String> nonExistentColumnNames)
    • cannotRetrieveTableOrViewNotInSameDatabaseError

      public static Throwable cannotRetrieveTableOrViewNotInSameDatabaseError(scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.QualifiedTableName> qualifiedTableNames)
    • renameTableSourceAndDestinationMismatchError

      public static Throwable renameTableSourceAndDestinationMismatchError(String db, String newDb)
    • cannotRenameTempViewWithDatabaseSpecifiedError

      public static Throwable cannotRenameTempViewWithDatabaseSpecifiedError(org.apache.spark.sql.catalyst.TableIdentifier oldName, org.apache.spark.sql.catalyst.TableIdentifier newName)
    • cannotRenameTempViewToExistingTableError

      public static Throwable cannotRenameTempViewToExistingTableError(org.apache.spark.sql.catalyst.TableIdentifier newName)
    • invalidPartitionSpecError

      public static Throwable invalidPartitionSpecError(String details)
    • functionAlreadyExistsError

      public static Throwable functionAlreadyExistsError(org.apache.spark.sql.catalyst.FunctionIdentifier func)
    • cannotLoadClassWhenRegisteringFunctionError

      public static Throwable cannotLoadClassWhenRegisteringFunctionError(String className, org.apache.spark.sql.catalyst.FunctionIdentifier func)
    • resourceTypeNotSupportedError

      public static Throwable resourceTypeNotSupportedError(String resourceType)
    • tableNotSpecifyDatabaseError

      public static Throwable tableNotSpecifyDatabaseError(org.apache.spark.sql.catalyst.TableIdentifier identifier)
    • tableNotSpecifyLocationUriError

      public static Throwable tableNotSpecifyLocationUriError(org.apache.spark.sql.catalyst.TableIdentifier identifier)
    • partitionNotSpecifyLocationUriError

      public static Throwable partitionNotSpecifyLocationUriError(String specString)
    • invalidBucketNumberError

      public static Throwable invalidBucketNumberError(int bucketingMaxBuckets, int numBuckets)
    • corruptedTableNameContextInCatalogError

      public static Throwable corruptedTableNameContextInCatalogError(int numParts, int index)
    • corruptedViewSQLConfigsInCatalogError

      public static Throwable corruptedViewSQLConfigsInCatalogError(Exception e)
    • corruptedViewQueryOutputColumnsInCatalogError

      public static Throwable corruptedViewQueryOutputColumnsInCatalogError(String numCols, int index)
    • corruptedViewReferredTempViewInCatalogError

      public static Throwable corruptedViewReferredTempViewInCatalogError(Exception e)
    • corruptedViewReferredTempFunctionsInCatalogError

      public static Throwable corruptedViewReferredTempFunctionsInCatalogError(Exception e)
    • columnStatisticsDeserializationNotSupportedError

      public static Throwable columnStatisticsDeserializationNotSupportedError(String name, DataType dataType)
    • columnStatisticsSerializationNotSupportedError

      public static Throwable columnStatisticsSerializationNotSupportedError(String colName, DataType dataType)
    • insufficientTablePropertyError

      public static Throwable insufficientTablePropertyError(String key)
    • insufficientTablePropertyPartError

      public static Throwable insufficientTablePropertyPartError(String key, String totalAmountOfParts)
    • unexpectedSchemaTypeError

      public static Throwable unexpectedSchemaTypeError(org.apache.spark.sql.catalyst.expressions.Expression exp)
    • schemaIsNotStructTypeError

      public static Throwable schemaIsNotStructTypeError(org.apache.spark.sql.catalyst.expressions.Expression exp, DataType dataType)
    • schemaIsNotStructTypeError

      public static Throwable schemaIsNotStructTypeError(String inputSchema, DataType dataType)
    • keyValueInMapNotStringError

      public static Throwable keyValueInMapNotStringError(org.apache.spark.sql.catalyst.expressions.CreateMap m)
    • nonMapFunctionNotAllowedError

      public static Throwable nonMapFunctionNotAllowedError()
    • invalidFieldTypeForCorruptRecordError

      public static Throwable invalidFieldTypeForCorruptRecordError()
    • dataTypeUnsupportedByClassError

      public static Throwable dataTypeUnsupportedByClassError(DataType x, String className)
    • parseModeUnsupportedError

      public static Throwable parseModeUnsupportedError(String funcName, org.apache.spark.sql.catalyst.util.ParseMode mode)
    • nonFoldableArgumentError

      public static Throwable nonFoldableArgumentError(String funcName, String paramName, DataType paramType)
    • literalTypeUnsupportedForSourceTypeError

      public static Throwable literalTypeUnsupportedForSourceTypeError(String field, org.apache.spark.sql.catalyst.expressions.Expression source)
    • arrayComponentTypeUnsupportedError

      public static Throwable arrayComponentTypeUnsupportedError(Class<?> clz)
    • secondArgumentNotDoubleLiteralError

      public static Throwable secondArgumentNotDoubleLiteralError()
    • dataTypeUnsupportedByExtractValueError

      public static Throwable dataTypeUnsupportedByExtractValueError(DataType dataType, org.apache.spark.sql.catalyst.expressions.Expression extraction, org.apache.spark.sql.catalyst.expressions.Expression child)
    • noHandlerForUDAFError

      public static Throwable noHandlerForUDAFError(String name)
    • batchWriteCapabilityError

      public static Throwable batchWriteCapabilityError(Table table, String v2WriteClassName, String v1WriteClassName)
    • unsupportedDeleteByConditionWithSubqueryError

      public static Throwable unsupportedDeleteByConditionWithSubqueryError(org.apache.spark.sql.catalyst.expressions.Expression condition)
    • cannotTranslateExpressionToSourceFilterError

      public static Throwable cannotTranslateExpressionToSourceFilterError(org.apache.spark.sql.catalyst.expressions.Expression f)
    • cannotDeleteTableWhereFiltersError

      public static Throwable cannotDeleteTableWhereFiltersError(Table table, Predicate[] filters)
    • describeDoesNotSupportPartitionForV2TablesError

      public static Throwable describeDoesNotSupportPartitionForV2TablesError()
    • cannotReplaceMissingTableError

      public static Throwable cannotReplaceMissingTableError(Identifier tableIdentifier)
    • cannotReplaceMissingTableError

      public static Throwable cannotReplaceMissingTableError(Identifier tableIdentifier, scala.Option<Throwable> cause)
    • streamingSourcesDoNotSupportCommonExecutionModeError

      public static Throwable streamingSourcesDoNotSupportCommonExecutionModeError(scala.collection.immutable.Seq<String> microBatchSources, scala.collection.immutable.Seq<String> continuousSources)
    • noSuchTableError

      public static org.apache.spark.sql.catalyst.analysis.NoSuchTableException noSuchTableError(Identifier ident)
    • noSuchTableError

      public static Throwable noSuchTableError(scala.collection.immutable.Seq<String> nameParts)
    • noSuchNamespaceError

      public static Throwable noSuchNamespaceError(String[] namespace)
    • tableAlreadyExistsError

      public static Throwable tableAlreadyExistsError(Identifier ident)
    • requiresSinglePartNamespaceError

      public static Throwable requiresSinglePartNamespaceError(scala.collection.immutable.Seq<String> namespace)
    • namespaceAlreadyExistsError

      public static Throwable namespaceAlreadyExistsError(String[] namespace)
    • cannotCreateJDBCTableUsingProviderError

      public static Throwable cannotCreateJDBCTableUsingProviderError()
    • cannotCreateJDBCTableUsingLocationError

      public static Throwable cannotCreateJDBCTableUsingLocationError()
    • cannotCreateJDBCNamespaceUsingProviderError

      public static Throwable cannotCreateJDBCNamespaceUsingProviderError()
    • cannotCreateJDBCNamespaceWithPropertyError

      public static Throwable cannotCreateJDBCNamespaceWithPropertyError(String property)
    • cannotSetJDBCNamespaceWithPropertyError

      public static Throwable cannotSetJDBCNamespaceWithPropertyError(String property)
    • cannotUnsetJDBCNamespaceWithPropertyError

      public static Throwable cannotUnsetJDBCNamespaceWithPropertyError(String property)
    • unsupportedJDBCNamespaceChangeInCatalogError

      public static Throwable unsupportedJDBCNamespaceChangeInCatalogError(scala.collection.immutable.Seq<NamespaceChange> changes)
    • tableDoesNotSupportReadsError

      public static Throwable tableDoesNotSupportReadsError(Table table)
    • tableDoesNotSupportWritesError

      public static Throwable tableDoesNotSupportWritesError(Table table)
    • tableDoesNotSupportDeletesError

      public static Throwable tableDoesNotSupportDeletesError(Table table)
    • tableDoesNotSupportTruncatesError

      public static Throwable tableDoesNotSupportTruncatesError(Table table)
    • tableDoesNotSupportPartitionManagementError

      public static Throwable tableDoesNotSupportPartitionManagementError(Table table)
    • tableDoesNotSupportAtomicPartitionManagementError

      public static Throwable tableDoesNotSupportAtomicPartitionManagementError(Table table)
    • tableIsNotRowLevelOperationTableError

      public static Throwable tableIsNotRowLevelOperationTableError(Table table)
    • cannotRenameTableWithAlterViewError

      public static Throwable cannotRenameTableWithAlterViewError()
    • analyzeTableNotSupportedForV2TablesError

      public static Throwable analyzeTableNotSupportedForV2TablesError()
    • alterTableRecoverPartitionsNotSupportedForV2TablesError

      public static Throwable alterTableRecoverPartitionsNotSupportedForV2TablesError()
    • alterTableSerDePropertiesNotSupportedForV2TablesError

      public static Throwable alterTableSerDePropertiesNotSupportedForV2TablesError()
    • loadDataNotSupportedForV2TablesError

      public static Throwable loadDataNotSupportedForV2TablesError()
    • showCreateTableAsSerdeNotSupportedForV2TablesError

      public static Throwable showCreateTableAsSerdeNotSupportedForV2TablesError()
    • showColumnsNotSupportedForV2TablesError

      public static Throwable showColumnsNotSupportedForV2TablesError()
    • repairTableNotSupportedForV2TablesError

      public static Throwable repairTableNotSupportedForV2TablesError()
    • databaseFromV1SessionCatalogNotSpecifiedError

      public static Throwable databaseFromV1SessionCatalogNotSpecifiedError()
    • nestedDatabaseUnsupportedByV1SessionCatalogError

      public static Throwable nestedDatabaseUnsupportedByV1SessionCatalogError(String catalog)
    • invalidRepartitionExpressionsError

      public static Throwable invalidRepartitionExpressionsError(scala.collection.immutable.Seq<Object> sortOrders)
    • partitionColumnNotSpecifiedError

      public static Throwable partitionColumnNotSpecifiedError(String format, String partitionColumn)
    • dataSchemaNotSpecifiedError

      public static Throwable dataSchemaNotSpecifiedError(String format)
    • dataPathNotExistError

      public static Throwable dataPathNotExistError(String path)
    • dataSourceOutputModeUnsupportedError

      public static Throwable dataSourceOutputModeUnsupportedError(String className, OutputMode outputMode)
    • schemaNotSpecifiedForSchemaRelationProviderError

      public static Throwable schemaNotSpecifiedForSchemaRelationProviderError(String className)
    • userSpecifiedSchemaMismatchActualSchemaError

      public static Throwable userSpecifiedSchemaMismatchActualSchemaError(StructType schema, StructType actualSchema)
    • dataSchemaNotSpecifiedError

      public static Throwable dataSchemaNotSpecifiedError(String format, String fileCatalog)
    • invalidDataSourceError

      public static Throwable invalidDataSourceError(String className)
    • cannotSaveIntervalIntoExternalStorageError

      public static Throwable cannotSaveIntervalIntoExternalStorageError()
    • cannotSaveVariantIntoExternalStorageError

      public static Throwable cannotSaveVariantIntoExternalStorageError()
    • cannotResolveAttributeError

      public static Throwable cannotResolveAttributeError(String name, String outputStr)
    • orcNotUsedWithHiveEnabledError

      public static Throwable orcNotUsedWithHiveEnabledError()
    • failedToFindAvroDataSourceError

      public static Throwable failedToFindAvroDataSourceError(String provider)
    • failedToFindKafkaDataSourceError

      public static Throwable failedToFindKafkaDataSourceError(String provider)
    • findMultipleDataSourceError

      public static Throwable findMultipleDataSourceError(String provider, scala.collection.immutable.Seq<String> sourceNames)
    • writeEmptySchemasUnsupportedByDataSourceError

      public static Throwable writeEmptySchemasUnsupportedByDataSourceError()
    • insertMismatchedColumnNumberError

      public static Throwable insertMismatchedColumnNumberError(scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> targetAttributes, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> sourceAttributes, int staticPartitionsSize)
    • insertMismatchedPartitionNumberError

      public static Throwable insertMismatchedPartitionNumberError(StructType targetPartitionSchema, int providedPartitionsSize)
    • invalidPartitionColumnError

      public static Throwable invalidPartitionColumnError(String partKey, StructType targetPartitionSchema)
    • multiplePartitionColumnValuesSpecifiedError

      public static Throwable multiplePartitionColumnValuesSpecifiedError(StructField field, scala.collection.immutable.Map<String,String> potentialSpecs)
    • invalidOrderingForConstantValuePartitionColumnError

      public static Throwable invalidOrderingForConstantValuePartitionColumnError(StructType targetPartitionSchema)
    • cannotWriteDataToRelationsWithMultiplePathsError

      public static Throwable cannotWriteDataToRelationsWithMultiplePathsError(scala.collection.immutable.Seq<org.apache.hadoop.fs.Path> paths)
    • failedToRebuildExpressionError

      public static Throwable failedToRebuildExpressionError(Filter filter)
    • dataTypeUnsupportedByDataSourceError

      public static Throwable dataTypeUnsupportedByDataSourceError(String format, StructField column)
    • failToResolveDataSourceForTableError

      public static Throwable failToResolveDataSourceForTableError(org.apache.spark.sql.catalyst.catalog.CatalogTable table, String key)
    • outputPathAlreadyExistsError

      public static Throwable outputPathAlreadyExistsError(org.apache.hadoop.fs.Path outputPath)
    • invalidPartitionColumnDataTypeError

      public static Throwable invalidPartitionColumnDataTypeError(StructField field)
    • cannotUseAllColumnsForPartitionColumnsError

      public static Throwable cannotUseAllColumnsForPartitionColumnsError()
    • partitionColumnNotFoundInSchemaError

      public static Throwable partitionColumnNotFoundInSchemaError(String col, String schemaCatalog)
    • columnNotFoundInSchemaError

      public static Throwable columnNotFoundInSchemaError(StructField col, scala.Option<StructType> tableSchema)
    • saveDataIntoViewNotAllowedError

      public static Throwable saveDataIntoViewNotAllowedError()
    • mismatchedTableFormatError

      public static Throwable mismatchedTableFormatError(String tableName, Class<?> existingProvider, Class<?> specifiedProvider)
    • mismatchedTableLocationError

      public static Throwable mismatchedTableLocationError(org.apache.spark.sql.catalyst.TableIdentifier identifier, org.apache.spark.sql.catalyst.catalog.CatalogTable existingTable, org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc)
    • mismatchedTableColumnNumberError

      public static Throwable mismatchedTableColumnNumberError(String tableName, org.apache.spark.sql.catalyst.catalog.CatalogTable existingTable, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query)
    • cannotResolveColumnGivenInputColumnsError

      public static Throwable cannotResolveColumnGivenInputColumnsError(String col, String inputColumns)
    • mismatchedTablePartitionColumnError

      public static Throwable mismatchedTablePartitionColumnError(String tableName, scala.collection.immutable.Seq<String> specifiedPartCols, String existingPartCols)
    • mismatchedTableBucketingError

      public static Throwable mismatchedTableBucketingError(String tableName, String specifiedBucketString, String existingBucketString)
    • specifyPartitionNotAllowedWhenTableSchemaNotDefinedError

      public static Throwable specifyPartitionNotAllowedWhenTableSchemaNotDefinedError()
    • bucketingColumnCannotBePartOfPartitionColumnsError

      public static Throwable bucketingColumnCannotBePartOfPartitionColumnsError(String bucketCol, scala.collection.immutable.Seq<String> normalizedPartCols)
    • bucketSortingColumnCannotBePartOfPartitionColumnsError

      public static Throwable bucketSortingColumnCannotBePartOfPartitionColumnsError(String sortCol, scala.collection.immutable.Seq<String> normalizedPartCols)
    • invalidBucketColumnDataTypeError

      public static Throwable invalidBucketColumnDataTypeError(DataType dataType)
    • requestedPartitionsMismatchTablePartitionsError

      public static Throwable requestedPartitionsMismatchTablePartitionsError(String tableName, scala.collection.immutable.Map<String,scala.Option<String>> normalizedPartSpec, StructType partColNames)
    • ddlWithoutHiveSupportEnabledError

      public static Throwable ddlWithoutHiveSupportEnabledError(String cmd)
    • createTableColumnTypesOptionColumnNotFoundInSchemaError

      public static Throwable createTableColumnTypesOptionColumnNotFoundInSchemaError(String col, StructType schema)
    • parquetTypeUnsupportedYetError

      public static Throwable parquetTypeUnsupportedYetError(String parquetType)
    • illegalParquetTypeError

      public static Throwable illegalParquetTypeError(String parquetType)
    • unrecognizedParquetTypeError

      public static Throwable unrecognizedParquetTypeError(String field)
    • cannotConvertDataTypeToParquetTypeError

      public static Throwable cannotConvertDataTypeToParquetTypeError(StructField field)
    • incompatibleViewSchemaChangeError

      public static Throwable incompatibleViewSchemaChangeError(String viewName, String colName, int expectedNum, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> actualCols, scala.Option<String> viewDDL)
    • numberOfPartitionsNotAllowedWithUnspecifiedDistributionError

      public static Throwable numberOfPartitionsNotAllowedWithUnspecifiedDistributionError()
    • partitionSizeNotAllowedWithUnspecifiedDistributionError

      public static Throwable partitionSizeNotAllowedWithUnspecifiedDistributionError()
    • numberAndSizeOfPartitionsNotAllowedTogether

      public static Throwable numberAndSizeOfPartitionsNotAllowedTogether()
    • unexpectedInputDataTypeError

      public static Throwable unexpectedInputDataTypeError(String functionName, int paramIndex, DataType dataType, org.apache.spark.sql.catalyst.expressions.Expression expression)
    • unexpectedNullError

      public static Throwable unexpectedNullError(String exprName, org.apache.spark.sql.catalyst.expressions.Expression expression)
    • streamJoinStreamWithoutEqualityPredicateUnsupportedError

      public static Throwable streamJoinStreamWithoutEqualityPredicateUnsupportedError(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan)
    • invalidPandasUDFPlacementError

      public static Throwable invalidPandasUDFPlacementError(scala.collection.immutable.Seq<String> groupAggPandasUDFNames)
    • ambiguousAttributesInSelfJoinError

      public static Throwable ambiguousAttributesInSelfJoinError(scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.AttributeReference> ambiguousAttrs)
    • ambiguousColumnOrFieldError

      public static Throwable ambiguousColumnOrFieldError(scala.collection.immutable.Seq<String> name, int numMatches, org.apache.spark.sql.catalyst.trees.Origin context)
    • ambiguousReferenceError

      public static Throwable ambiguousReferenceError(String name, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> ambiguousReferences)
    • cannotUseIntervalTypeInTableSchemaError

      public static Throwable cannotUseIntervalTypeInTableSchemaError()
    • missingCatalogAbilityError

      public static Throwable missingCatalogAbilityError(CatalogPlugin plugin, String ability)
    • tableValuedFunctionTooManyTableArgumentsError

      public static Throwable tableValuedFunctionTooManyTableArgumentsError(int num)
    • tableValuedFunctionFailedToAnalyseInPythonError

      public static Throwable tableValuedFunctionFailedToAnalyseInPythonError(String msg)
    • pythonDataSourceError

      public static Throwable pythonDataSourceError(String action, String tpe, String msg)
    • identifierTooManyNamePartsError

      public static Throwable identifierTooManyNamePartsError(String originalIdentifier)
    • emptyMultipartIdentifierError

      public static Throwable emptyMultipartIdentifierError()
    • cannotOperateOnHiveDataSourceFilesError

      public static Throwable cannotOperateOnHiveDataSourceFilesError(String operation)
    • setPathOptionAndCallWithPathParameterError

      public static Throwable setPathOptionAndCallWithPathParameterError(String method)
    • userSpecifiedSchemaUnsupportedError

      public static Throwable userSpecifiedSchemaUnsupportedError(String operation)
    • tempViewNotSupportStreamingWriteError

      public static Throwable tempViewNotSupportStreamingWriteError(String viewName)
    • streamingIntoViewNotSupportedError

      public static Throwable streamingIntoViewNotSupportedError(String viewName)
    • inputSourceDiffersFromDataSourceProviderError

      public static Throwable inputSourceDiffersFromDataSourceProviderError(String source, String tableName, org.apache.spark.sql.catalyst.catalog.CatalogTable table)
    • tableNotSupportStreamingWriteError

      public static Throwable tableNotSupportStreamingWriteError(String tableName, Table t)
    • queryNameNotSpecifiedForMemorySinkError

      public static Throwable queryNameNotSpecifiedForMemorySinkError()
    • sourceNotSupportedWithContinuousTriggerError

      public static Throwable sourceNotSupportedWithContinuousTriggerError(String source)
    • columnNotFoundInExistingColumnsError

      public static Throwable columnNotFoundInExistingColumnsError(String columnType, String columnName, scala.collection.immutable.Seq<String> validColumnNames)
    • operationNotSupportPartitioningError

      public static Throwable operationNotSupportPartitioningError(String operation)
    • mixedRefsInAggFunc

      public static Throwable mixedRefsInAggFunc(String funcStr, org.apache.spark.sql.catalyst.trees.Origin origin)
    • subqueryReturnMoreThanOneColumn

      public static Throwable subqueryReturnMoreThanOneColumn(int number, org.apache.spark.sql.catalyst.trees.Origin origin)
    • unsupportedCorrelatedReferenceDataTypeError

      public static Throwable unsupportedCorrelatedReferenceDataTypeError(org.apache.spark.sql.catalyst.expressions.Expression expr, DataType dataType, org.apache.spark.sql.catalyst.trees.Origin origin)
    • unsupportedCorrelatedSubqueryInJoinConditionError

      public static Throwable unsupportedCorrelatedSubqueryInJoinConditionError(scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Expression> unsupportedSubqueryExpressions)
    • functionCannotProcessInputError

      public static Throwable functionCannotProcessInputError(UnboundFunction unbound, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Expression> arguments, UnsupportedOperationException unsupported)
    • v2FunctionInvalidInputTypeLengthError

      public static Throwable v2FunctionInvalidInputTypeLengthError(BoundFunction bound, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Expression> args)
    • cannotResolveColumnNameAmongAttributesError

      public static Throwable cannotResolveColumnNameAmongAttributesError(String colName, String fieldNames)
    • cannotWriteTooManyColumnsToTableError

      public static Throwable cannotWriteTooManyColumnsToTableError(String tableName, scala.collection.immutable.Seq<String> expected, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> queryOutput)
    • cannotWriteNotEnoughColumnsToTableError

      public static Throwable cannotWriteNotEnoughColumnsToTableError(String tableName, scala.collection.immutable.Seq<String> expected, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> queryOutput)
    • incompatibleDataToTableCannotFindDataError

      public static Throwable incompatibleDataToTableCannotFindDataError(String tableName, String colName)
    • incompatibleDataToTableAmbiguousColumnNameError

      public static Throwable incompatibleDataToTableAmbiguousColumnNameError(String tableName, String colName)
    • incompatibleDataToTableExtraColumnsError

      public static Throwable incompatibleDataToTableExtraColumnsError(String tableName, String extraColumns)
    • incompatibleDataToTableExtraStructFieldsError

      public static Throwable incompatibleDataToTableExtraStructFieldsError(String tableName, String colName, String extraFields)
    • incompatibleDataToTableNullableColumnError

      public static Throwable incompatibleDataToTableNullableColumnError(String tableName, String colName)
    • incompatibleDataToTableNullableArrayElementsError

      public static Throwable incompatibleDataToTableNullableArrayElementsError(String tableName, String colName)
    • incompatibleDataToTableNullableMapValuesError

      public static Throwable incompatibleDataToTableNullableMapValuesError(String tableName, String colName)
    • incompatibleDataToTableCannotSafelyCastError

      public static Throwable incompatibleDataToTableCannotSafelyCastError(String tableName, String colName, String srcType, String targetType)
    • incompatibleDataToTableStructMissingFieldsError

      public static Throwable incompatibleDataToTableStructMissingFieldsError(String tableName, String colName, String missingFields)
    • incompatibleDataToTableUnexpectedColumnNameError

      public static Throwable incompatibleDataToTableUnexpectedColumnNameError(String tableName, String colName, int order, String expected, String found)
    • invalidRowLevelOperationAssignments

      public static Throwable invalidRowLevelOperationAssignments(scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.plans.logical.Assignment> assignments, scala.collection.immutable.Seq<String> errors)
    • invalidEscapeChar

      public static Throwable invalidEscapeChar(org.apache.spark.sql.catalyst.expressions.Expression sqlExpr)
    • secondArgumentOfFunctionIsNotIntegerError

      public static Throwable secondArgumentOfFunctionIsNotIntegerError(String function, NumberFormatException e)
    • nonPartitionPruningPredicatesNotExpectedError

      public static Throwable nonPartitionPruningPredicatesNotExpectedError(scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Expression> nonPartitionPruningPredicates)
    • columnNotDefinedInTableError

      public static Throwable columnNotDefinedInTableError(String colType, String colName, String tableName, scala.collection.immutable.Seq<String> tableCols)
    • invalidLiteralForWindowDurationError

      public static Throwable invalidLiteralForWindowDurationError()
    • noSuchStructFieldInGivenFieldsError

      public static Throwable noSuchStructFieldInGivenFieldsError(String fieldName, StructField[] fields)
    • ambiguousReferenceToFieldsError

      public static Throwable ambiguousReferenceToFieldsError(String field, int numberOfAppearance)
    • secondArgumentInFunctionIsNotBooleanLiteralError

      public static Throwable secondArgumentInFunctionIsNotBooleanLiteralError(String funcName)
    • joinConditionMissingOrTrivialError

      public static Throwable joinConditionMissingOrTrivialError(org.apache.spark.sql.catalyst.plans.logical.Join join, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan left, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan right)
    • usePythonUDFInJoinConditionUnsupportedError

      public static Throwable usePythonUDFInJoinConditionUnsupportedError(org.apache.spark.sql.catalyst.plans.JoinType joinType)
    • conflictingAttributesInJoinConditionError

      public static Throwable conflictingAttributesInJoinConditionError(org.apache.spark.sql.catalyst.expressions.AttributeSet conflictingAttrs, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan outerPlan, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan subplan)
    • emptyWindowExpressionError

      public static Throwable emptyWindowExpressionError(org.apache.spark.sql.catalyst.plans.logical.Window expr)
    • foundDifferentWindowFunctionTypeError

      public static Throwable foundDifferentWindowFunctionTypeError(scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression> windowExpressions)
    • escapeCharacterInTheMiddleError

      public static Throwable escapeCharacterInTheMiddleError(String pattern, String char_)
    • escapeCharacterAtTheEndError

      public static Throwable escapeCharacterAtTheEndError(String pattern)
    • tableIdentifierExistsError

      public static Throwable tableIdentifierExistsError(org.apache.spark.sql.catalyst.TableIdentifier tableIdentifier)
    • tableIdentifierNotConvertedToHadoopFsRelationError

      public static Throwable tableIdentifierNotConvertedToHadoopFsRelationError(org.apache.spark.sql.catalyst.TableIdentifier tableIdentifier)
    • alterDatabaseLocationUnsupportedError

      public static Throwable alterDatabaseLocationUnsupportedError()
    • hiveTableTypeUnsupportedError

      public static Throwable hiveTableTypeUnsupportedError(String tableName, String tableType)
    • unknownHiveResourceTypeError

      public static Throwable unknownHiveResourceTypeError(String resourceType)
    • configRemovedInVersionError

      public static Throwable configRemovedInVersionError(String configName, String version, String comment)
    • invalidPartitionColumnKeyInTableError

      public static Throwable invalidPartitionColumnKeyInTableError(String key, String tblName)
    • invalidPartitionSpecError

      public static Throwable invalidPartitionSpecError(String specKeys, scala.collection.immutable.Seq<String> partitionColumnNames, String tableName)
    • columnAlreadyExistsError

      public static Throwable columnAlreadyExistsError(String columnName)
    • noSuchTableError

      public static Throwable noSuchTableError(String db, String table)
    • tempViewNotCachedForAnalyzingColumnsError

      public static Throwable tempViewNotCachedForAnalyzingColumnsError(org.apache.spark.sql.catalyst.TableIdentifier tableIdent)
    • columnTypeNotSupportStatisticsCollectionError

      public static Throwable columnTypeNotSupportStatisticsCollectionError(String name, org.apache.spark.sql.catalyst.TableIdentifier tableIdent, DataType dataType)
    • analyzeTableNotSupportedOnViewsError

      public static Throwable analyzeTableNotSupportedOnViewsError()
    • unexpectedPartitionColumnPrefixError

      public static Throwable unexpectedPartitionColumnPrefixError(String table, String database, String schemaColumns, String specColumns)
    • noSuchPartitionError

      public static Throwable noSuchPartitionError(String db, String table, scala.collection.immutable.Map<String,String> partition)
    • notExistPartitionError

      public static Throwable notExistPartitionError(Identifier table, org.apache.spark.sql.catalyst.InternalRow partitionIdent, StructType partitionSchema)
    • analyzingColumnStatisticsNotSupportedForColumnTypeError

      public static Throwable analyzingColumnStatisticsNotSupportedForColumnTypeError(String name, DataType dataType)
    • tableAlreadyExistsError

      public static Throwable tableAlreadyExistsError(String table)
    • createTableAsSelectWithNonEmptyDirectoryError

      public static Throwable createTableAsSelectWithNonEmptyDirectoryError(String tablePath)
    • unsetNonExistentPropertiesError

      public static Throwable unsetNonExistentPropertiesError(scala.collection.immutable.Seq<String> properties, org.apache.spark.sql.catalyst.TableIdentifier table)
    • alterTableChangeColumnNotSupportedForColumnTypeError

      public static Throwable alterTableChangeColumnNotSupportedForColumnTypeError(String tableName, StructField originColumn, StructField newColumn, org.apache.spark.sql.catalyst.trees.Origin origin)
    • cannotAlterPartitionColumn

      public static Throwable cannotAlterPartitionColumn(String tableName, String columnName)
    • cannotFindColumnError

      public static Throwable cannotFindColumnError(String name, String[] fieldNames)
    • alterTableSetSerdeForSpecificPartitionNotSupportedError

      public static Throwable alterTableSetSerdeForSpecificPartitionNotSupportedError()
    • alterTableSetSerdeNotSupportedError

      public static Throwable alterTableSetSerdeNotSupportedError()
    • cmdOnlyWorksOnPartitionedTablesError

      public static Throwable cmdOnlyWorksOnPartitionedTablesError(String operation, String tableIdentWithDB)
    • cmdOnlyWorksOnTableWithLocationError

      public static Throwable cmdOnlyWorksOnTableWithLocationError(String cmd, String tableIdentWithDB)
    • actionNotAllowedOnTableWithFilesourcePartitionManagementDisabledError

      public static Throwable actionNotAllowedOnTableWithFilesourcePartitionManagementDisabledError(String action, String tableName)
    • actionNotAllowedOnTableSincePartitionMetadataNotStoredError

      public static Throwable actionNotAllowedOnTableSincePartitionMetadataNotStoredError(String action, String tableName)
    • cannotAlterViewWithAlterTableError

      public static Throwable cannotAlterViewWithAlterTableError()
    • cannotAlterTableWithAlterViewError

      public static Throwable cannotAlterTableWithAlterViewError()
    • cannotOverwritePathBeingReadFromError

      public static Throwable cannotOverwritePathBeingReadFromError(String path)
    • cannotOverwriteTableThatIsBeingReadFromError

      public static Throwable cannotOverwriteTableThatIsBeingReadFromError(org.apache.spark.sql.catalyst.TableIdentifier tableIdent)
    • cannotDropBuiltinFuncError

      public static Throwable cannotDropBuiltinFuncError(String functionName)
    • cannotRefreshBuiltInFuncError

      public static Throwable cannotRefreshBuiltInFuncError(String functionName)
    • cannotRefreshTempFuncError

      public static Throwable cannotRefreshTempFuncError(String functionName)
    • noSuchFunctionError

      public static Throwable noSuchFunctionError(org.apache.spark.sql.catalyst.FunctionIdentifier identifier)
    • alterAddColNotSupportViewError

      public static Throwable alterAddColNotSupportViewError(org.apache.spark.sql.catalyst.TableIdentifier table)
    • alterAddColNotSupportDatasourceTableError

      public static Throwable alterAddColNotSupportDatasourceTableError(Object tableType, org.apache.spark.sql.catalyst.TableIdentifier table)
    • loadDataNotSupportedForDatasourceTablesError

      public static Throwable loadDataNotSupportedForDatasourceTablesError(String tableIdentWithDB)
    • loadDataWithoutPartitionSpecProvidedError

      public static Throwable loadDataWithoutPartitionSpecProvidedError(String tableIdentWithDB)
    • loadDataPartitionSizeNotMatchNumPartitionColumnsError

      public static Throwable loadDataPartitionSizeNotMatchNumPartitionColumnsError(String tableIdentWithDB, int partitionSize, int targetTableSize)
    • loadDataTargetTableNotPartitionedButPartitionSpecWasProvidedError

      public static Throwable loadDataTargetTableNotPartitionedButPartitionSpecWasProvidedError(String tableIdentWithDB)
    • loadDataInputPathNotExistError

      public static Throwable loadDataInputPathNotExistError(String path)
    • truncateTableOnExternalTablesError

      public static Throwable truncateTableOnExternalTablesError(String tableIdentWithDB)
    • truncateTablePartitionNotSupportedForNotPartitionedTablesError

      public static Throwable truncateTablePartitionNotSupportedForNotPartitionedTablesError(String tableIdentWithDB)
    • failToTruncateTableWhenRemovingDataError

      public static Throwable failToTruncateTableWhenRemovingDataError(String tableIdentWithDB, org.apache.hadoop.fs.Path path, Throwable e)
    • descPartitionNotAllowedOnTempView

      public static Throwable descPartitionNotAllowedOnTempView(String table)
    • descPartitionNotAllowedOnView

      public static Throwable descPartitionNotAllowedOnView(String table)
    • showPartitionNotAllowedOnTableNotPartitionedError

      public static Throwable showPartitionNotAllowedOnTableNotPartitionedError(String tableIdentWithDB)
    • showCreateTableNotSupportedOnTempView

      public static Throwable showCreateTableNotSupportedOnTempView(String table)
    • showCreateTableFailToExecuteUnsupportedFeatureError

      public static Throwable showCreateTableFailToExecuteUnsupportedFeatureError(org.apache.spark.sql.catalyst.catalog.CatalogTable table)
    • showCreateTableNotSupportTransactionalHiveTableError

      public static Throwable showCreateTableNotSupportTransactionalHiveTableError(org.apache.spark.sql.catalyst.catalog.CatalogTable table)
    • showCreateTableFailToExecuteUnsupportedConfError

      public static Throwable showCreateTableFailToExecuteUnsupportedConfError(org.apache.spark.sql.catalyst.TableIdentifier table, scala.collection.mutable.StringBuilder builder)
    • showCreateTableAsSerdeNotAllowedOnSparkDataSourceTableError

      public static Throwable showCreateTableAsSerdeNotAllowedOnSparkDataSourceTableError(org.apache.spark.sql.catalyst.TableIdentifier table)
    • showCreateTableOrViewFailToExecuteUnsupportedFeatureError

      public static Throwable showCreateTableOrViewFailToExecuteUnsupportedFeatureError(org.apache.spark.sql.catalyst.catalog.CatalogTable table, scala.collection.immutable.Seq<String> features)
    • logicalPlanForViewNotAnalyzedError

      public static Throwable logicalPlanForViewNotAnalyzedError()
    • cannotCreateViewTooManyColumnsError

      public static Throwable cannotCreateViewTooManyColumnsError(org.apache.spark.sql.catalyst.TableIdentifier viewIdent, scala.collection.immutable.Seq<String> expected, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query)
    • cannotCreateViewNotEnoughColumnsError

      public static Throwable cannotCreateViewNotEnoughColumnsError(org.apache.spark.sql.catalyst.TableIdentifier viewIdent, scala.collection.immutable.Seq<String> expected, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query)
    • cannotAlterTempViewWithSchemaBindingError

      public static Throwable cannotAlterTempViewWithSchemaBindingError()
    • unsupportedCreateOrReplaceViewOnTableError

      public static Throwable unsupportedCreateOrReplaceViewOnTableError(org.apache.spark.sql.catalyst.TableIdentifier name, boolean replace)
    • viewAlreadyExistsError

      public static Throwable viewAlreadyExistsError(org.apache.spark.sql.catalyst.TableIdentifier name)
    • createPersistedViewFromDatasetAPINotAllowedError

      public static Throwable createPersistedViewFromDatasetAPINotAllowedError()
    • recursiveViewDetectedError

      public static Throwable recursiveViewDetectedError(org.apache.spark.sql.catalyst.TableIdentifier viewIdent, scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.TableIdentifier> newPath)
    • notAllowedToCreatePermanentViewWithoutAssigningAliasForExpressionError

      public static Throwable notAllowedToCreatePermanentViewWithoutAssigningAliasForExpressionError(org.apache.spark.sql.catalyst.TableIdentifier name, org.apache.spark.sql.catalyst.expressions.Attribute attr)
    • notAllowedToCreatePermanentViewByReferencingTempViewError

      public static Throwable notAllowedToCreatePermanentViewByReferencingTempViewError(org.apache.spark.sql.catalyst.TableIdentifier name, String nameParts)
    • notAllowedToCreatePermanentViewByReferencingTempFuncError

      public static Throwable notAllowedToCreatePermanentViewByReferencingTempFuncError(org.apache.spark.sql.catalyst.TableIdentifier name, String funcName)
    • notAllowedToCreatePermanentViewByReferencingTempVarError

      public static Throwable notAllowedToCreatePermanentViewByReferencingTempVarError(org.apache.spark.sql.catalyst.TableIdentifier name, String varName)
    • queryFromRawFilesIncludeCorruptRecordColumnError

      public static Throwable queryFromRawFilesIncludeCorruptRecordColumnError()
    • userDefinedPartitionNotFoundInJDBCRelationError

      public static Throwable userDefinedPartitionNotFoundInJDBCRelationError(String columnName, String schema)
    • invalidPartitionColumnTypeError

      public static Throwable invalidPartitionColumnTypeError(StructField column)
    • tableOrViewAlreadyExistsError

      public static Throwable tableOrViewAlreadyExistsError(String name)
    • invalidColumnNameAsPathError

      public static Throwable invalidColumnNameAsPathError(String datasource, String columnName)
    • textDataSourceWithMultiColumnsError

      public static Throwable textDataSourceWithMultiColumnsError(StructType schema)
    • cannotFindPartitionColumnInPartitionSchemaError

      public static Throwable cannotFindPartitionColumnInPartitionSchemaError(StructField readField, StructType partitionSchema)
    • cannotSpecifyDatabaseForTempViewError

      public static Throwable cannotSpecifyDatabaseForTempViewError(org.apache.spark.sql.catalyst.TableIdentifier tableIdent)
    • cannotCreateTempViewUsingHiveDataSourceError

      public static Throwable cannotCreateTempViewUsingHiveDataSourceError()
    • invalidTimestampProvidedForStrategyError

      public static Throwable invalidTimestampProvidedForStrategyError(String strategy, String timeString)
    • hostOptionNotSetError

      public static Throwable hostOptionNotSetError()
    • portOptionNotSetError

      public static Throwable portOptionNotSetError()
    • invalidIncludeTimestampValueError

      public static Throwable invalidIncludeTimestampValueError()
    • checkpointLocationNotSpecifiedError

      public static Throwable checkpointLocationNotSpecifiedError()
    • recoverQueryFromCheckpointUnsupportedError

      public static Throwable recoverQueryFromCheckpointUnsupportedError(org.apache.hadoop.fs.Path checkpointPath)
    • cannotFindColumnInRelationOutputError

      public static Throwable cannotFindColumnInRelationOutputError(String colName, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan relation)
    • invalidBoundaryStartError

      public static Throwable invalidBoundaryStartError(long start)
    • invalidBoundaryEndError

      public static Throwable invalidBoundaryEndError(long end)
    • tableOrViewNotFound

      public static Throwable tableOrViewNotFound(scala.collection.immutable.Seq<String> ident)
    • unsupportedTableChangeInJDBCCatalogError

      public static Throwable unsupportedTableChangeInJDBCCatalogError(TableChange change)
    • pathOptionNotSetCorrectlyWhenReadingError

      public static Throwable pathOptionNotSetCorrectlyWhenReadingError()
    • pathOptionNotSetCorrectlyWhenWritingError

      public static Throwable pathOptionNotSetCorrectlyWhenWritingError()
    • invalidSaveModeError

      public static Throwable invalidSaveModeError(String saveMode)
    • invalidSingleVariantColumn

      public static Throwable invalidSingleVariantColumn()
    • writeWithSaveModeUnsupportedBySourceError

      public static Throwable writeWithSaveModeUnsupportedBySourceError(String source, String createMode)
    • partitionByDoesNotAllowedWhenUsingInsertIntoError

      public static Throwable partitionByDoesNotAllowedWhenUsingInsertIntoError()
    • cannotFindCatalogToHandleIdentifierError

      public static Throwable cannotFindCatalogToHandleIdentifierError(String quote)
    • sortByWithoutBucketingError

      public static Throwable sortByWithoutBucketingError()
    • bucketByUnsupportedByOperationError

      public static Throwable bucketByUnsupportedByOperationError(String operation)
    • bucketByAndSortByUnsupportedByOperationError

      public static Throwable bucketByAndSortByUnsupportedByOperationError(String operation)
    • tableAlreadyExistsError

      public static Throwable tableAlreadyExistsError(org.apache.spark.sql.catalyst.TableIdentifier tableIdent)
    • invalidPartitionTransformationError

      public static Throwable invalidPartitionTransformationError(org.apache.spark.sql.catalyst.expressions.Expression expr)
    • unresolvedColumnError

      public static AnalysisException unresolvedColumnError(String colName, String[] fields)
    • cannotParseIntervalError

      public static Throwable cannotParseIntervalError(String delayThreshold, Throwable e)
    • invalidJoinTypeInJoinWithError

      public static Throwable invalidJoinTypeInJoinWithError(org.apache.spark.sql.catalyst.plans.JoinType joinType)
    • cannotPassTypedColumnInUntypedSelectError

      public static Throwable cannotPassTypedColumnInUntypedSelectError(String typedCol)
    • invalidViewNameError

      public static Throwable invalidViewNameError(String viewName)
    • invalidBucketsNumberError

      public static Throwable invalidBucketsNumberError(String numBuckets, String e)
    • usingUntypedScalaUDFError

      public static Throwable usingUntypedScalaUDFError()
    • aggregationFunctionAppliedOnNonNumericColumnError

      public static Throwable aggregationFunctionAppliedOnNonNumericColumnError(String colName)
    • aggregationFunctionAppliedOnNonNumericColumnError

      public static Throwable aggregationFunctionAppliedOnNonNumericColumnError(String pivotColumn, int maxValues)
    • cannotModifyValueOfStaticConfigError

      public static Throwable cannotModifyValueOfStaticConfigError(String key)
    • cannotModifyValueOfSparkConfigError

      public static Throwable cannotModifyValueOfSparkConfigError(String key, String docroot)
    • commandExecutionInRunnerUnsupportedError

      public static Throwable commandExecutionInRunnerUnsupportedError(String runner)
    • udfClassDoesNotImplementAnyUDFInterfaceError

      public static Throwable udfClassDoesNotImplementAnyUDFInterfaceError(String className)
    • udfClassImplementMultiUDFInterfacesError

      public static Throwable udfClassImplementMultiUDFInterfacesError(String className)
    • udfClassWithTooManyTypeArgumentsError

      public static Throwable udfClassWithTooManyTypeArgumentsError(int n)
    • classWithoutPublicNonArgumentConstructorError

      public static Throwable classWithoutPublicNonArgumentConstructorError(String className)
    • cannotLoadClassNotOnClassPathError

      public static Throwable cannotLoadClassNotOnClassPathError(String className)
    • classDoesNotImplementUserDefinedAggregateFunctionError

      public static Throwable classDoesNotImplementUserDefinedAggregateFunctionError(String className)
    • missingFieldError

      public static Throwable missingFieldError(scala.collection.immutable.Seq<String> fieldName, org.apache.spark.sql.catalyst.analysis.ResolvedTable table, org.apache.spark.sql.catalyst.trees.Origin context)
    • invalidFieldName

      public static Throwable invalidFieldName(scala.collection.immutable.Seq<String> fieldName, scala.collection.immutable.Seq<String> path, org.apache.spark.sql.catalyst.trees.Origin context)
    • invalidJsonSchema

      public static Throwable invalidJsonSchema(DataType schema)
    • invalidXmlSchema

      public static Throwable invalidXmlSchema(DataType schema)
    • tableIndexNotSupportedError

      public static Throwable tableIndexNotSupportedError(String errorMessage)
    • invalidViewText

      public static Throwable invalidViewText(String viewText, String viewName)
    • invalidTimeTravelSpecError

      public static Throwable invalidTimeTravelSpecError()
    • invalidTimestampExprForTimeTravel

      public static Throwable invalidTimestampExprForTimeTravel(String errorClass, org.apache.spark.sql.catalyst.expressions.Expression expr)
    • timeTravelUnsupportedError

      public static Throwable timeTravelUnsupportedError(String relationId)
    • writeDistributionAndOrderingNotSupportedInContinuousExecution

      public static Throwable writeDistributionAndOrderingNotSupportedInContinuousExecution()
    • defaultReferencesNotAllowedInComplexExpressionsInInsertValuesList

      public static Throwable defaultReferencesNotAllowedInComplexExpressionsInInsertValuesList()
    • defaultReferencesNotAllowedInComplexExpressionsInUpdateSetClause

      public static Throwable defaultReferencesNotAllowedInComplexExpressionsInUpdateSetClause()
    • defaultReferencesNotAllowedInComplexExpressionsInMergeInsertsOrUpdates

      public static Throwable defaultReferencesNotAllowedInComplexExpressionsInMergeInsertsOrUpdates()
    • nonDeterministicMergeCondition

      public static Throwable nonDeterministicMergeCondition(String condName, org.apache.spark.sql.catalyst.expressions.Expression cond)
    • subqueryNotAllowedInMergeCondition

      public static Throwable subqueryNotAllowedInMergeCondition(String condName, org.apache.spark.sql.catalyst.expressions.Expression cond)
    • aggregationNotAllowedInMergeCondition

      public static Throwable aggregationNotAllowedInMergeCondition(String condName, org.apache.spark.sql.catalyst.expressions.Expression cond)
    • failedToParseExistenceDefaultAsLiteral

      public static Throwable failedToParseExistenceDefaultAsLiteral(String fieldName, String defaultValue)
    • defaultReferencesNotAllowedInDataSource

      public static Throwable defaultReferencesNotAllowedInDataSource(String statementType, String dataSource)
    • addNewDefaultColumnToExistingTableNotAllowed

      public static Throwable addNewDefaultColumnToExistingTableNotAllowed(String statementType, String dataSource)
    • defaultValuesDataTypeError

      public static Throwable defaultValuesDataTypeError(String statement, String colName, String defaultValue, DataType expectedType, DataType actualType)
    • defaultValuesUnresolvedExprError

      public static Throwable defaultValuesUnresolvedExprError(String statement, String colName, String defaultValue, Throwable cause)
    • defaultValuesMayNotContainSubQueryExpressions

      public static Throwable defaultValuesMayNotContainSubQueryExpressions(String statement, String colName, String defaultValue)
    • defaultValueNotConstantError

      public static Throwable defaultValueNotConstantError(String statement, String colName, String defaultValue)
    • nullableColumnOrFieldError

      public static Throwable nullableColumnOrFieldError(scala.collection.immutable.Seq<String> name)
    • notNullConstraintViolationArrayElementError

      public static Throwable notNullConstraintViolationArrayElementError(scala.collection.immutable.Seq<String> path)
    • notNullConstraintViolationMapValueError

      public static Throwable notNullConstraintViolationMapValueError(scala.collection.immutable.Seq<String> path)
    • invalidColumnOrFieldDataTypeError

      public static Throwable invalidColumnOrFieldDataTypeError(scala.collection.immutable.Seq<String> name, DataType dt, DataType expected)
    • columnNotInGroupByClauseError

      public static Throwable columnNotInGroupByClauseError(org.apache.spark.sql.catalyst.expressions.Expression expression)
    • implicitCollationMismatchError

      public static Throwable implicitCollationMismatchError()
    • explicitCollationMismatchError

      public static Throwable explicitCollationMismatchError(scala.collection.immutable.Seq<String> explicitTypes)
    • indeterminateCollationError

      public static Throwable indeterminateCollationError()
    • cannotConvertProtobufTypeToSqlTypeError

      public static Throwable cannotConvertProtobufTypeToSqlTypeError(String protobufColumn, scala.collection.immutable.Seq<String> sqlColumn, String protobufType, DataType sqlType)
    • cannotConvertCatalystTypeToProtobufTypeError

      public static Throwable cannotConvertCatalystTypeToProtobufTypeError(scala.collection.immutable.Seq<String> sqlColumn, String protobufColumn, DataType sqlType, String protobufType)
    • cannotConvertProtobufTypeToCatalystTypeError

      public static Throwable cannotConvertProtobufTypeToCatalystTypeError(String protobufType, DataType sqlType, Throwable cause)
    • cannotConvertSqlTypeToProtobufError

      public static Throwable cannotConvertSqlTypeToProtobufError(String protobufType, DataType sqlType, Throwable cause)
    • protobufTypeUnsupportedYetError

      public static Throwable protobufTypeUnsupportedYetError(String protobufType)
    • unknownProtobufMessageTypeError

      public static Throwable unknownProtobufMessageTypeError(String descriptorName, String containingType)
    • cannotFindCatalystTypeInProtobufSchemaError

      public static Throwable cannotFindCatalystTypeInProtobufSchemaError(String catalystFieldPath)
    • cannotFindProtobufFieldInCatalystError

      public static Throwable cannotFindProtobufFieldInCatalystError(String field)
    • protobufFieldMatchError

      public static Throwable protobufFieldMatchError(String field, String protobufSchema, String matchSize, String matches)
    • unableToLocateProtobufMessageError

      public static Throwable unableToLocateProtobufMessageError(String messageName)
    • foundRecursionInProtobufSchema

      public static Throwable foundRecursionInProtobufSchema(String fieldDescriptor)
    • protobufFieldTypeMismatchError

      public static Throwable protobufFieldTypeMismatchError(String field)
    • protobufClassLoadError

      public static Throwable protobufClassLoadError(String protobufClassName, String explanation, Throwable cause)
    • protobufDescriptorDependencyError

      public static Throwable protobufDescriptorDependencyError(String dependencyName)
    • invalidByteStringFormatError

      public static Throwable invalidByteStringFormatError(Object unsupported)
    • funcBuildError

      public static Throwable funcBuildError(String funcName, Exception cause)
    • ambiguousRelationAliasNameInNestedCTEError

      public static Throwable ambiguousRelationAliasNameInNestedCTEError(String name)
    • ambiguousLateralColumnAliasError

      public static Throwable ambiguousLateralColumnAliasError(String name, int numOfMatches)
    • ambiguousLateralColumnAliasError

      public static Throwable ambiguousLateralColumnAliasError(scala.collection.immutable.Seq<String> nameParts, int numOfMatches)
    • lateralColumnAliasInAggFuncUnsupportedError

      public static Throwable lateralColumnAliasInAggFuncUnsupportedError(scala.collection.immutable.Seq<String> lcaNameParts, org.apache.spark.sql.catalyst.expressions.Expression aggExpr)
    • lateralColumnAliasInWindowUnsupportedError

      public static Throwable lateralColumnAliasInWindowUnsupportedError(scala.collection.immutable.Seq<String> lcaNameParts, org.apache.spark.sql.catalyst.expressions.Expression windowExpr)
    • lateralColumnAliasInAggWithWindowAndHavingUnsupportedError

      public static Throwable lateralColumnAliasInAggWithWindowAndHavingUnsupportedError(scala.collection.immutable.Seq<String> lcaNameParts)
    • dataTypeOperationUnsupportedError

      public static Throwable dataTypeOperationUnsupportedError()
    • nullableRowIdError

      public static Throwable nullableRowIdError(scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.AttributeReference> nullableRowIdAttrs)
    • cannotRenameTableAcrossSchemaError

      public static Throwable cannotRenameTableAcrossSchemaError()
    • avroIncompatibleReadError

      public static Throwable avroIncompatibleReadError(String avroPath, String sqlPath, String avroType, String sqlType)
    • optionMustBeLiteralString

      public static Throwable optionMustBeLiteralString(String key)
    • optionMustBeConstant

      public static Throwable optionMustBeConstant(String key, scala.Option<Throwable> cause)
    • tableValuedFunctionRequiredMetadataIncompatibleWithCall

      public static Throwable tableValuedFunctionRequiredMetadataIncompatibleWithCall(String functionName, String requestedMetadata, String invalidFunctionCallProperty)
    • tableValuedFunctionRequiredMetadataInvalid

      public static Throwable tableValuedFunctionRequiredMetadataInvalid(String functionName, String reason)
    • dataSourceAlreadyExists

      public static Throwable dataSourceAlreadyExists(String name)
    • dataSourceDoesNotExist

      public static Throwable dataSourceDoesNotExist(String name)
    • foundMultipleDataSources

      public static Throwable foundMultipleDataSources(String provider)
    • foundMultipleXMLDataSourceError

      public static Throwable foundMultipleXMLDataSourceError(String provider, scala.collection.immutable.Seq<String> sourceNames, String externalSource)
    • xmlRowTagRequiredError

      public static Throwable xmlRowTagRequiredError(String optionName)
    • invalidUDFClassError

      public static Throwable invalidUDFClassError(String invalidClass)
    • unsupportedParameterExpression

      public static Throwable unsupportedParameterExpression(org.apache.spark.sql.catalyst.expressions.Expression expr)
    • invalidQueryAllParametersMustBeNamed

      public static Throwable invalidQueryAllParametersMustBeNamed(scala.collection.immutable.Seq<org.apache.spark.sql.catalyst.expressions.Expression> expr)
    • invalidQueryMixedQueryParameters

      public static Throwable invalidQueryMixedQueryParameters()
    • invalidExecuteImmediateVariableType

      public static Throwable invalidExecuteImmediateVariableType(DataType dataType)
    • nullSQLStringExecuteImmediate

      public static Throwable nullSQLStringExecuteImmediate(String varName)
    • invalidStatementForExecuteInto

      public static Throwable invalidStatementForExecuteInto(String queryString)
    • nestedExecuteImmediate

      public static Throwable nestedExecuteImmediate(String queryString)
    • dataSourceTableSchemaMismatchError

      public static Throwable dataSourceTableSchemaMismatchError(StructType dsSchema, StructType expectedSchema)
    • cannotResolveDataFrameColumn

      public static Throwable cannotResolveDataFrameColumn(org.apache.spark.sql.catalyst.expressions.Expression e)
    • ambiguousColumnReferences

      public static Throwable ambiguousColumnReferences(org.apache.spark.sql.catalyst.expressions.Expression e)
    • createTableDeprecatedError

      public static Throwable createTableDeprecatedError()
    • cannotAssignEventTimeColumn

      public static Throwable cannotAssignEventTimeColumn()
    • toSQLId

      public static String toSQLId(String parts)
    • toSQLId

      public static String toSQLId(scala.collection.immutable.Seq<String> parts)
    • toSQLStmt

      public static String toSQLStmt(String text)
    • toSQLConf

      public static String toSQLConf(String conf)
    • toSQLType

      public static String toSQLType(String text)
    • toSQLType

      public static String toSQLType(org.apache.spark.sql.types.AbstractDataType t)
    • toSQLValue

      public static String toSQLValue(String value)
    • toSQLValue

      public static String toSQLValue(org.apache.spark.unsafe.types.UTF8String value)
    • toSQLValue

      public static String toSQLValue(short value)
    • toSQLValue

      public static String toSQLValue(int value)
    • toSQLValue

      public static String toSQLValue(long value)
    • toSQLValue

      public static String toSQLValue(float value)
    • toSQLValue

      public static String toSQLValue(double value)
    • getSummary

      public static String getSummary(QueryContext sqlContext)
    • getQueryContext

      public static QueryContext[] getQueryContext(QueryContext context)
    • toSQLConfVal

      public static String toSQLConfVal(String conf)
    • toDSOption

      public static String toDSOption(String option)
    • toSQLExpr

      public static String toSQLExpr(org.apache.spark.sql.catalyst.expressions.Expression e)
    • toSQLValue

      public static String toSQLValue(Object v, DataType t)
    • ordinalNumber

      public static String ordinalNumber(int i)
    • columnNotFoundError

      public static AnalysisException columnNotFoundError(String colName)
    • descriptorParseError

      public static AnalysisException descriptorParseError(Throwable cause)
    • cannotFindDescriptorFileError

      public static AnalysisException cannotFindDescriptorFileError(String filePath, Throwable cause)