Package org.apache.spark.sql.errors
Class QueryExecutionErrors
Object
org.apache.spark.sql.errors.QueryExecutionErrors
Object for grouping error messages from (most) exceptions thrown during query execution.
This does not include exceptions thrown during the eager execution of commands, which are
grouped into
QueryCompilationErrors.-
Constructor Summary
Constructors -
Method Summary
Modifier and TypeMethodDescriptionstatic org.apache.spark.SparkUnsupportedOperationExceptionaddFilesWithAbsolutePathUnsupportedError(String commitProtocol) static Throwablestatic RuntimeExceptionaesCryptoError(String detailMessage) static RuntimeExceptionaesModeUnsupportedError(String mode, String padding) static RuntimeExceptionaesUnsupportedAad(String mode) static RuntimeExceptionaesUnsupportedIv(String mode) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkDateTimeExceptionstatic org.apache.spark.SparkDateTimeExceptionstatic IllegalArgumentExceptionstatic org.apache.spark.SparkIllegalArgumentExceptionansiIllegalArgumentError(String message) static org.apache.spark.SparkArithmeticExceptionstatic Stringstatic org.apache.spark.sql.catalyst.trees.SQLQueryContextstatic org.apache.spark.SparkUnsupportedOperationExceptionattributesForTypeUnsupportedError(org.apache.spark.sql.catalyst.ScalaReflection.Schema schema) static org.apache.spark.SparkFileNotFoundExceptionbatchMetadataFileNotFoundError(org.apache.hadoop.fs.Path batchMetadataFile) static org.apache.spark.SparkArithmeticExceptionbinaryArithmeticCauseOverflowError(short eval1, String symbol, short eval2) static org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowablecannotAcquireMemoryToBuildLongHashedRelationError(long size, long got) static Throwablestatic org.apache.spark.SparkUnsupportedOperationExceptionstatic Throwablestatic ThrowablecannotBroadcastTableOverMaxTableBytesError(long maxBroadcastTableBytes, long dataSize) static ThrowablecannotBroadcastTableOverMaxTableRowsError(long maxBroadcastTableRows, long numRows) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowablecannotCastError(DataType from, DataType to) static Throwablestatic ArithmeticExceptioncannotChangeDecimalPrecisionError(Decimal value, int decimalPrecision, int decimalScale, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static ThrowablecannotClearOutputDirectoryError(org.apache.hadoop.fs.Path staticPrefixPath) static ThrowablecannotClearPartitionDirectoryError(org.apache.hadoop.fs.Path path) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkIllegalArgumentExceptionstatic ThrowablecannotConvertCatalystValueToProtobufEnumTypeError(scala.collection.Seq<String> sqlColumn, String protobufColumn, String data, String enumString) static Throwablestatic Throwablestatic org.apache.spark.SparkRuntimeExceptioncannotCreateArrayWithElementsExceedLimitError(long numElements, String additionalErrorMessage) static Throwablestatic org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkRuntimeExceptioncannotCreateParquetConverterForDataTypeError(DataType t, String parquetType) static org.apache.spark.SparkRuntimeExceptioncannotCreateParquetConverterForDecimalTypeError(DecimalType t, String parquetType) static org.apache.spark.SparkRuntimeExceptioncannotCreateParquetConverterForTypeError(DecimalType t, String parquetType) static org.apache.spark.SparkRuntimeExceptioncannotCreateStagingDirError(String message, IOException e) static org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowablecannotEvaluateExpressionError(org.apache.spark.sql.catalyst.expressions.Expression expression) static org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowablecannotFetchTablesOfDatabaseError(String dbName, Exception e) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptioncannotFindEncoderForTypeError(String typeName) static ThrowablecannotGenerateCodeForExpressionError(org.apache.spark.sql.catalyst.expressions.Expression expression) static org.apache.spark.SparkIllegalArgumentExceptioncannotGenerateCodeForIncomparableTypeError(String codeType, DataType dataType) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkIllegalArgumentExceptionstatic org.apache.spark.SparkRuntimeExceptioncannotGetOuterPointerForInnerClassError(Class<?> innerCls) static org.apache.spark.SparkRuntimeExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowablecannotInstantiateAbstractCatalogPluginClassError(String name, String pluginClassName, Exception e) static org.apache.spark.SparkIllegalArgumentExceptionstatic ThrowablecannotLoadUserDefinedTypeError(String name, String userClass) static org.apache.spark.SparkUnsupportedOperationExceptioncannotMergeClassWithOtherClassError(String className, String otherClass) static ThrowablecannotMergeDecimalTypesWithIncompatibleScaleError(int leftScale, int rightScale) static ThrowablecannotMergeIncompatibleDataTypesError(DataType left, DataType right) static org.apache.spark.SparkUnsupportedOperationExceptionstatic Throwablestatic org.apache.spark.SparkRuntimeExceptioncannotParseJsonArraysAsStructsError(String recordStr) static org.apache.spark.SparkRuntimeExceptioncannotParseJSONFieldError(com.fasterxml.jackson.core.JsonParser parser, com.fasterxml.jackson.core.JsonToken jsonType, DataType dataType) static org.apache.spark.SparkRuntimeExceptioncannotParseJSONFieldError(String fieldName, String fieldValue, com.fasterxml.jackson.core.JsonToken jsonType, DataType dataType) static org.apache.spark.SparkIllegalArgumentExceptionstatic org.apache.spark.SparkRuntimeExceptioncannotParseStringAsDataTypeError(com.fasterxml.jackson.core.JsonParser parser, com.fasterxml.jackson.core.JsonToken token, DataType dataType) static org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowablecannotReadFilesError(Throwable e, String path) static ThrowablecannotReadFooterForFileError(org.apache.hadoop.fs.Path file, Exception e) static ThrowablecannotRecognizeHiveTypeError(org.apache.spark.sql.catalyst.parser.ParseException e, String fieldType, String fieldName) static org.apache.spark.SparkRuntimeExceptioncannotRemovePartitionDirError(org.apache.hadoop.fs.Path partitionPath) static org.apache.spark.SparkUnsupportedOperationExceptioncannotRemoveReservedPropertyError(String property) static ThrowablecannotRestorePermissionsForPathError(org.apache.hadoop.fs.permission.FsPermission permission, org.apache.hadoop.fs.Path path) static org.apache.spark.SparkUnsupportedOperationExceptioncannotSafelyMergeSerdePropertiesError(scala.collection.immutable.Map<String, String> props1, scala.collection.immutable.Map<String, String> props2, scala.collection.immutable.Set<String> conflictKeys) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkIllegalArgumentExceptioncannotSpecifyBothJdbcTableNameAndQueryError(String jdbcTableName, String jdbcQueryString) static ThrowablecannotTerminateGeneratorError(org.apache.spark.sql.catalyst.analysis.UnresolvedGenerator generator) static org.apache.spark.SparkIllegalArgumentExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptioncannotUseInvalidJavaIdentifierAsFieldNameError(String fieldName, org.apache.spark.sql.catalyst.WalkedTypePath walkedTypePath) static ArithmeticExceptioncastingCauseOverflowError(Object t, DataType from, DataType to) static ArithmeticExceptioncastingCauseOverflowErrorInTableInsert(DataType from, DataType to, String columnName) static ThrowablecatalogFailToCallPublicNoArgConstructorError(String name, String pluginClassName, Exception e) static ThrowablecatalogFailToFindPublicNoArgConstructorError(String name, String pluginClassName, Exception e) static Throwablestatic ThrowablecatalogPluginClassNotFoundForCatalogError(String name, String pluginClassName, Exception e) static ThrowablecatalogPluginClassNotImplementedError(String name, String pluginClassName) static org.apache.spark.SparkRuntimeExceptionclassHasUnexpectedSerializerError(String clsName, org.apache.spark.sql.catalyst.expressions.Expression objSerializer) static org.apache.spark.SparkRuntimeExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkSQLFeatureNotSupportedExceptionstatic ThrowablecommitDeniedError(int partId, long taskId, int attemptId, int stageId, int stageAttempt) static ThrowablecomparatorReturnsNull(String firstValue, String secondValue) static ThrowablecompilerError(org.codehaus.commons.compiler.CompileException e) static org.apache.spark.SparkRuntimeExceptionconcatArraysWithElementsExceedLimitError(long numberOfElements) static org.apache.spark.SparkConcurrentModificationExceptionstatic Throwablestatic ThrowableconcurrentStreamLogUpdate(long batchId) static org.apache.spark.SparkRuntimeExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowableconvertHiveTableToCatalogTableError(SparkException e, String dbName, String tableName) static org.apache.spark.SparkRuntimeExceptionstatic org.apache.spark.SparkIllegalArgumentExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowabledatabaseNameConflictWithSystemPreservedDatabaseError(String globalTempDB) static org.apache.spark.SparkIllegalArgumentExceptionstatic org.apache.spark.SparkClassNotFoundExceptiondataSourceNotFoundError(String provider, Throwable error) static org.apache.spark.SparkUnsupportedOperationExceptiondataTypeUnexpectedError(DataType dataType) static ThrowabledataTypeUnsupportedError(String dataType, String failure) static org.apache.spark.SparkUnsupportedOperationExceptiondataTypeUnsupportedYetError(DataType dataType) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkArithmeticExceptiondecimalPrecisionExceedsMaxPrecisionError(int precision, int maxPrecision) static org.apache.spark.SparkUnsupportedOperationExceptiondecorrelateInnerQueryThroughPlanUnsupportedError(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan) static ThrowabledefaultDatabaseNotExistsError(String defaultDatabase) static ArithmeticExceptiondivideByZeroError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static org.apache.spark.SparkUnsupportedOperationExceptiondoExecuteBroadcastNotImplementedError(String nodeName) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionduplicatedFieldNameInArrowStructError(scala.collection.Seq<String> fieldNames) static org.apache.spark.SparkRuntimeExceptionstatic Throwablestatic Throwablestatic org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkRuntimeExceptionemptyJsonFieldValueError(DataType dataType) static org.apache.spark.SparkIllegalArgumentExceptionemptyOptionError(String optionName) static Throwablestatic Throwablestatic org.apache.spark.SparkRuntimeExceptionexceedMapSizeLimitError(int size) static org.apache.spark.SparkRuntimeExceptionexceedMaxLimit(int limit) static ThrowableexecuteBroadcastTimeoutError(long timeout, scala.Option<TimeoutException> ex) static org.apache.spark.SparkUnsupportedOperationExceptionexecuteCodePathUnsupportedError(String execName) static org.apache.spark.SparkRuntimeExceptionexpressionDecodingError(Exception e, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> expressions) static org.apache.spark.SparkRuntimeExceptionexpressionEncodingError(Exception e, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> expressions) static ThrowablefailedExecuteUserDefinedFunctionError(String functionName, String inputTypes, String outputType, Throwable e) static ThrowablefailedMergingSchemaError(StructType leftSchema, StructType rightSchema, SparkException e) static org.apache.spark.SparkRuntimeExceptionstatic ThrowablefailedRenameTempFileError(org.apache.hadoop.fs.Path srcPath, org.apache.hadoop.fs.Path dstPath) static org.apache.spark.SparkRuntimeExceptionfailedToCastValueToDataTypeForPartitionColumnError(String value, DataType dataType, String columnName) static Stringstatic SparkExceptionstatic ThrowablefailedToGenerateEpochMarkerError(Throwable failureReason) static ThrowablefailedToInstantiateConstructorForCatalogError(String name, String pluginClassName, Exception e) static ThrowablefailedToMergeIncompatibleSchemasError(StructType left, StructType right, Throwable e) static ThrowablefailedToPushRowIntoRowQueueError(String rowQueue) static ThrowablefailedToReadDataError(Throwable failureReason) static ThrowablefailedToReadDeltaFileError(org.apache.hadoop.fs.Path fileToRead, String clazz, int keySize) static ThrowablefailedToReadSnapshotFileError(org.apache.hadoop.fs.Path fileToRead, String clazz, String message) static org.apache.spark.SparkRuntimeExceptionfailToConvertValueToJsonError(Object value, Class<?> cls, DataType dataType) static org.apache.spark.SparkUpgradeExceptionstatic org.apache.spark.SparkUpgradeExceptionfailToRecognizePatternAfterUpgradeError(String pattern, Throwable e) static org.apache.spark.SparkRuntimeExceptionfailToRecognizePatternError(String pattern, Throwable e) static org.apache.spark.SparkSecurityExceptionfailToSetOriginalACLBackError(String aclEntries, org.apache.hadoop.fs.Path path, Throwable e) static org.apache.spark.SparkIllegalArgumentExceptionfallbackV1RelationReportsInconsistentSchemaError(StructType v2Schema, StructType v1Schema) static org.apache.spark.SparkRuntimeExceptionfieldCannotBeNullError(int index, String fieldName) static StringfieldCannotBeNullMsg(int index, String fieldName) static org.apache.spark.SparkDateTimeExceptionfieldDiffersFromDerivedLocalDateError(ChronoField field, int actual, int expected, LocalDate candidate) static org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowablefileLengthExceedsMaxLengthError(org.apache.hadoop.fs.FileStatus status, int maxLength) static org.apache.spark.SparkFileNotFoundExceptionstatic org.apache.spark.SparkRuntimeExceptionflattenArraysWithElementsExceedLimitError(long numberOfElements) static Throwablestatic org.apache.spark.SparkRuntimeExceptionfoundDuplicateFieldInCaseInsensitiveModeError(String requiredFieldName, String matchedOrcFields) static org.apache.spark.SparkRuntimeExceptionfoundDuplicateFieldInFieldIdLookupModeError(int requiredId, String matchedFields) static org.apache.spark.SparkRuntimeExceptionstatic org.apache.spark.SparkSQLFeatureNotSupportedExceptiongetParentLoggerNotImplementedError(String className) static org.apache.spark.SparkRuntimeExceptionstatic QueryContext[]getQueryContext(org.apache.spark.sql.catalyst.trees.SQLQueryContext sqlContext) static StringgetSummary(org.apache.spark.sql.catalyst.trees.SQLQueryContext sqlContext) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionhiveTableWithAnsiIntervalsError(org.apache.spark.sql.catalyst.TableIdentifier table) static ThrowablehllInvalidInputSketchBuffer(String function) static ThrowablehllInvalidLgK(String function, int min, int max, String value) static ThrowablehllUnionDifferentLgK(int left, int right, String function) static Throwablestatic ThrowableillegalUrlError(org.apache.spark.unsafe.types.UTF8String url) static Throwablestatic ThrowableincorrectEndOffset(long rowsPerSecond, long maxSeconds, long endSeconds) static ThrowableincorrectRampUpRate(long rowsPerSecond, long maxSeconds, long rampUpTimeSeconds) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptioninitialTypeNotTargetDataTypeError(DataType dataType, String target) static org.apache.spark.SparkUnsupportedOperationExceptioninitialTypeNotTargetDataTypesError(DataType dataType) static org.apache.spark.SparkRuntimeExceptionstatic Throwablestatic ThrowableinternalCompilerError(org.codehaus.commons.compiler.InternalCompilerException e) static ArithmeticExceptionintervalArithmeticOverflowError(String message, String hint, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static ArithmeticExceptionintervalDividedByZeroError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static RuntimeExceptioninvalidAesIvLengthError(String mode, int actualLength) static RuntimeExceptioninvalidAesKeyLengthError(int actualLength) invalidArrayIndexError(int index, int numElements, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) invalidBitmapPositionError(long bitPosition, long bitmapNumBytes) static ThrowableinvalidBucketFile(String path) static ThrowableinvalidElementAtIndexError(int index, int numElements, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static org.apache.spark.SparkIllegalArgumentExceptioninvalidEmptyLocationError(String location) static DateTimeExceptionstatic RuntimeExceptioninvalidIndexOfZeroError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static org.apache.spark.SparkDateTimeExceptioninvalidInputInCastToDatetimeError(double value, DataType to, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static org.apache.spark.SparkDateTimeExceptioninvalidInputInCastToDatetimeError(org.apache.spark.unsafe.types.UTF8String value, DataType to, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static org.apache.spark.SparkNumberFormatExceptioninvalidInputInCastToNumberError(DataType to, org.apache.spark.unsafe.types.UTF8String s, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static org.apache.spark.SparkIllegalArgumentExceptioninvalidInputInConversionError(DataType to, org.apache.spark.unsafe.types.UTF8String s, org.apache.spark.unsafe.types.UTF8String fmt, String hint) static org.apache.spark.SparkRuntimeExceptioninvalidInputSyntaxForBooleanError(org.apache.spark.unsafe.types.UTF8String s, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static org.apache.spark.SparkIllegalArgumentExceptioninvalidJdbcNumPartitionsError(int n, String jdbcNumPartitions) static org.apache.spark.SparkIllegalArgumentExceptioninvalidJdbcTxnIsolationLevelError(String jdbcTxnIsolationLevel, String value) static Throwablestatic org.apache.spark.SparkIllegalArgumentExceptioninvalidNamespaceNameError(String[] namespace) static org.apache.spark.SparkIllegalArgumentExceptioninvalidNumberFormatError(DataType dataType, String input, String format) static org.apache.spark.SparkUnsupportedOperationExceptionstatic RuntimeExceptioninvalidPatternError(String funcName, String pattern, Throwable cause) static RuntimeExceptioninvalidRegexGroupIndexError(String funcName, int groupCount, int groupIndex) static org.apache.spark.SparkArrayIndexOutOfBoundsExceptioninvalidStartIndexError(int numRows, int startIndex) static org.apache.spark.SparkUnsupportedOperationExceptioninvalidStreamingOutputModeError(scala.Option<OutputMode> outputMode) static org.apache.spark.SparkIllegalArgumentExceptioninvalidUrlError(org.apache.spark.unsafe.types.UTF8String url, URISyntaxException e) static org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowablelegacyCheckpointDirectoryExistsError(org.apache.hadoop.fs.Path checkpointPath, String legacyCheckpointDir) static ThrowablelegacyMetadataPathExistsError(org.apache.hadoop.fs.Path metadataPath, org.apache.hadoop.fs.Path legacyMetadataPath) static RuntimeExceptionstatic org.apache.spark.SparkClassNotFoundExceptionloadHiveClientCausesNoClassDefFoundError(NoClassDefFoundError cnf, scala.collection.Seq<URL> execJars, String key, InvocationTargetException e) static ThrowablelocationAlreadyExists(org.apache.spark.sql.catalyst.TableIdentifier tableId, org.apache.hadoop.fs.Path location) static org.apache.spark.SparkRuntimeExceptionmalformedCSVRecordError(String badRecord) static Throwablestatic Throwablestatic ThrowablemalformedRecordsDetectedInRecordParsingError(String badRecord, org.apache.spark.sql.catalyst.util.BadRecordException e) static Throwablestatic Throwablestatic org.apache.spark.SparkRuntimeExceptionstatic org.apache.spark.SparkRuntimeExceptionmapSizeExceedArraySizeWhenZipMapError(int size) static org.apache.spark.SparkRuntimeExceptionstatic ThrowablemergeUnsupportedByWindowFunctionError(String funcName) static org.apache.spark.SparkRuntimeExceptionstatic ThrowablemethodNotDeclaredError(String name) static ThrowablemethodNotFoundError(Class<?> cls, String functionName, scala.collection.Seq<Class<?>> argClasses) static org.apache.spark.SparkUnsupportedOperationExceptionmethodNotImplementedError(String methodName) static org.apache.spark.SparkUnsupportedOperationExceptionmicroBatchUnsupportedByDataSourceError(String srcName, String disabledSources, Table table) static org.apache.spark.SparkIllegalArgumentExceptionstatic org.apache.spark.SparkIllegalArgumentExceptionmissingJdbcTableNameAndQueryError(String jdbcTableName, String jdbcQueryString) static ThrowablemultiActionAlterError(String tableName) static Throwablestatic org.apache.spark.SparkIllegalArgumentExceptionmultiplePathsSpecifiedError(scala.collection.Seq<String> allPaths) static ThrowablemultipleRowSubqueryError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static org.apache.spark.SparkConcurrentModificationExceptionmultiStreamingQueriesUsingPathConcurrentlyError(String path, org.apache.hadoop.fs.FileAlreadyExistsException e) static org.apache.spark.SparkIllegalArgumentExceptionnegativeValueUnexpectedError(org.apache.spark.sql.catalyst.expressions.Expression frequencyExpression) static org.apache.spark.SparkIllegalArgumentExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionnestedFieldUnsupportedError(String colName) static SparkExceptionnoDefaultForDataTypeError(DataType dataType) static AnalysisExceptionnonTimeWindowNotSupportedInStreamingError(scala.collection.Seq<String> windowFuncList, scala.collection.Seq<String> columnNameList, scala.collection.Seq<String> windowSpecList, org.apache.spark.sql.catalyst.trees.Origin origin) static Throwablestatic Throwablestatic Throwablestatic ThrowablenotEnoughMemoryToBuildAndBroadcastTableError(OutOfMemoryError oe, scala.collection.Seq<org.apache.spark.sql.catalyst.TableIdentifier> tables) static org.apache.spark.SparkRuntimeExceptionnotExpectedUnresolvedEncoderError(org.apache.spark.sql.catalyst.expressions.AttributeReference attr) static org.apache.spark.SparkRuntimeExceptionnotOverrideExpectedMethodsError(String className, String m1, String m2) static org.apache.spark.SparkUnsupportedOperationExceptionnotPublicClassError(String name) static org.apache.spark.SparkRuntimeExceptionstatic ThrowablenotSupportTypeError(DataType dataType) static ThrowablenotUserDefinedTypeError(String name, String userClass) static org.apache.spark.SparkRuntimeExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstatic <TreeType extends org.apache.spark.sql.catalyst.trees.TreeNode<?>>
org.apache.spark.SparkRuntimeExceptiononceStrategyIdempotenceIsBrokenForBatchError(String batchName, TreeType plan, TreeType reOptimized) static ThrowableonlySupportDataSourcesProvidingFileFormatError(String providingClass) static org.apache.spark.SparkIllegalArgumentExceptionstatic org.apache.spark.SparkIllegalArgumentExceptionstatic org.apache.spark.SparkArithmeticExceptionoutOfDecimalTypeRangeError(org.apache.spark.unsafe.types.UTF8String str) static ThrowableoutputDataTypeUnsupportedByNodeWithoutSerdeError(String nodeName, DataType dt) static ArithmeticExceptionoverflowInConvError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static ArithmeticExceptionoverflowInIntegralDivideError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static ArithmeticExceptionoverflowInSumOfDecimalError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static Throwablestatic org.apache.spark.SparkUnsupportedOperationExceptionpairUnsupportedAtFunctionError(org.apache.spark.sql.catalyst.plans.logical.statsEstimation.ValueInterval r1, org.apache.spark.sql.catalyst.plans.logical.statsEstimation.ValueInterval r2, String function) static org.apache.spark.SparkRuntimeExceptionparamExceedOneCharError(String paramName) static ThrowableparamIsNotBooleanValueError(String paramName) static org.apache.spark.SparkRuntimeExceptionparamIsNotIntegerError(String paramName, String value) static Throwablestatic org.apache.spark.SparkRuntimeExceptionpartitionColumnNotFoundInSchemaError(String col, StructType schema) static RuntimeExceptionpivotColumnUnsupportedError(Object v, DataType dataType) static Throwablestatic org.apache.spark.SparkRuntimeExceptionprimaryConstructorNotFoundError(Class<?> cls) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkFileNotFoundExceptionstatic Throwablestatic org.apache.spark.SparkRuntimeExceptionstatic org.apache.spark.SparkClassNotFoundExceptionremovedClassInSpark2Error(String className, Throwable e) static org.apache.spark.SparkFileAlreadyExistsExceptionrenameAsExistsPathError(org.apache.hadoop.fs.Path dstPath) static org.apache.spark.SparkSQLFeatureNotSupportedExceptionstatic ThrowablerenamePathAsExistsPathError(org.apache.hadoop.fs.Path srcPath, org.apache.hadoop.fs.Path dstPath) static ThrowablerenameSrcPathNotFoundError(org.apache.hadoop.fs.Path srcPath) static ThrowablerepeatedPivotsUnsupportedError(String clause, String operation) static ThrowablerequestedPartitionsMismatchTablePartitionsError(org.apache.spark.sql.catalyst.catalog.CatalogTable table, scala.collection.immutable.Map<String, scala.Option<String>> partition) static org.apache.spark.SparkRuntimeExceptionresolveCannotHandleNestedSchema(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan) static org.apache.spark.SparkRuntimeExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowableruleIdNotFoundForRuleError(String ruleName) static ThrowablesaveModeUnsupportedError(Object saveMode, boolean pathExists) static org.apache.spark.SparkClassNotFoundExceptionstatic org.apache.spark.SparkUpgradeExceptionsparkUpgradeInReadingDatesError(String format, String config, String option) static org.apache.spark.SparkUpgradeExceptionsparkUpgradeInWritingDatesError(String format, String config) static org.apache.spark.SparkNoSuchElementExceptionstatic Throwablestatic org.apache.spark.SparkIllegalArgumentExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstreamedOperatorUnsupportedByDataSourceError(String className, String operator) static ThrowablesubprocessExitedError(int exitCode, org.apache.spark.util.CircularBuffer stderrBuffer, Throwable cause) static ThrowabletaskFailedWhileWritingRowsError(String path, Throwable cause) static ArithmeticExceptiontimestampAddOverflowError(long micros, int amount, String unit) static StringtoDSOption(String option) static org.apache.spark.SparkIllegalArgumentExceptiontooManyArrayElementsError(int numElements, int elementSize) static Stringstatic StringtoSQLConfVal(String conf) static StringtoSQLExpr(org.apache.spark.sql.catalyst.expressions.Expression e) static Stringstatic Stringstatic StringtoSQLSchema(String schema) static Stringstatic Stringstatic StringtoSQLType(org.apache.spark.sql.types.AbstractDataType t) static StringtoSQLValue(double value) static StringtoSQLValue(float value) static StringtoSQLValue(int value) static StringtoSQLValue(long value) static StringtoSQLValue(short value) static StringtoSQLValue(Object v, DataType t) static StringtoSQLValue(String value) static StringtoSQLValue(org.apache.spark.unsafe.types.UTF8String value) static Throwablestatic org.apache.spark.SparkUnsupportedOperationExceptiontruncateMultiPartitionUnsupportedError(String tableName) static org.apache.spark.SparkIllegalArgumentExceptiontypeUnsupportedError(DataType dataType) static ThrowableunableToCreateDatabaseAsFailedToCreateDirectoryError(org.apache.spark.sql.catalyst.catalog.CatalogDatabase dbDefinition, IOException e) static ThrowableunableToCreatePartitionPathError(org.apache.hadoop.fs.Path partitionPath, IOException e) static ThrowableunableToCreateTableAsFailedToCreateDirectoryError(String table, org.apache.hadoop.fs.Path defaultTableLocation, IOException e) static ThrowableunableToDeletePartitionPathError(org.apache.hadoop.fs.Path partitionPath, IOException e) static ThrowableunableToDropDatabaseAsFailedToDeleteDirectoryError(org.apache.spark.sql.catalyst.catalog.CatalogDatabase dbDefinition, IOException e) static ThrowableunableToDropTableAsFailedToDeleteDirectoryError(String table, org.apache.hadoop.fs.Path dir, IOException e) static ThrowableunableToRenamePartitionPathError(org.apache.hadoop.fs.Path oldPartPath, IOException e) static ThrowableunableToRenameTableAsFailedToRenameDirectoryError(String oldName, String newName, org.apache.hadoop.fs.Path oldDir, IOException e) static org.apache.spark.SparkArithmeticExceptionunaryMinusCauseOverflowError(int originValue) static org.apache.spark.SparkIllegalArgumentExceptionstatic org.apache.spark.SparkRuntimeExceptionunexpectedOperatorInCorrelatedSubquery(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan op, String pos) static org.apache.spark.SparkRuntimeExceptionunexpectedValueForLengthInFunctionError(String prettyName) static org.apache.spark.SparkRuntimeExceptionunexpectedValueForStartInFunctionError(String prettyName) static org.apache.spark.SparkRuntimeExceptionstatic org.apache.spark.SparkRuntimeExceptionunionArrayWithElementsExceedLimitError(int length) static org.apache.spark.SparkIllegalArgumentExceptionunknownColumnError(String unknownColumn) static org.apache.spark.SparkRuntimeExceptionunreachableError(String err) static Stringstatic org.apache.spark.SparkUnsupportedOperationExceptionunrecognizedCompressionSchemaTypeIDError(int typeId) static ThrowableunrecognizedSqlTypeError(String jdbcTypeId, String typeName) static ArithmeticExceptionunscaledValueTooLargeForPrecisionError(Decimal value, int decimalPrecision, int decimalScale, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static org.apache.spark.SparkIllegalArgumentExceptionstatic org.apache.spark.SparkRuntimeExceptionunsupportedArrayTypeError(Class<?> clazz) static org.apache.spark.SparkUnsupportedOperationExceptionunsupportedArrowTypeError(org.apache.arrow.vector.types.pojo.ArrowType typeName) static org.apache.spark.SparkSQLFeatureNotSupportedExceptionunsupportedCommentNamespaceError(String namespace) static org.apache.spark.SparkUnsupportedOperationExceptionunsupportedDataTypeError(DataType typeName) static org.apache.spark.SparkSQLFeatureNotSupportedExceptionunsupportedDropNamespaceError(String namespace) static org.apache.spark.SparkRuntimeExceptionstatic org.apache.spark.SparkRuntimeExceptionunsupportedFieldNameError(String fieldName) static org.apache.spark.SparkUnsupportedOperationExceptionunsupportedHiveMetastoreVersionError(String version, String key) static org.apache.spark.SparkRuntimeExceptionunsupportedJavaTypeError(Class<?> clazz) static org.apache.spark.SparkSQLExceptionunsupportedJdbcTypeError(String content) static org.apache.spark.SparkUnsupportedOperationExceptionstatic SparkExceptionunsupportedNaturalJoinTypeError(org.apache.spark.sql.catalyst.plans.JoinType joinType) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionunsupportedOperationForDataTypeError(DataType dataType) static org.apache.spark.SparkUnsupportedOperationExceptionunsupportedPartitionTransformError(Transform transform) static org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstatic org.apache.spark.SparkSQLFeatureNotSupportedExceptionunsupportedRemoveNamespaceCommentError(String namespace) static SparkExceptionunsupportedRoundingMode(scala.Enumeration.Value roundMode) static ThrowableunsupportedSchemaColumnConvertError(String filePath, String column, String logicalType, String physicalType, Exception e) static Throwablestatic ThrowableunsupportedTypeError(DataType dataType) static org.apache.spark.SparkSQLFeatureNotSupportedExceptionstatic org.apache.spark.SparkUnsupportedOperationExceptionstatic Throwablestatic Throwablestatic org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowablevalueIsNullError(int index) static ThrowablewritePartitionExceedConfigSizeWhenDynamicPartitionError(int numWrittenParts, int maxDynamicPartitions, String maxDynamicPartitionsKey) static org.apache.spark.SparkUnsupportedOperationExceptionstatic ThrowablewritingJobFailedError(Throwable cause)
-
Constructor Details
-
QueryExecutionErrors
public QueryExecutionErrors()
-
-
Method Details
-
cannotEvaluateExpressionError
public static Throwable cannotEvaluateExpressionError(org.apache.spark.sql.catalyst.expressions.Expression expression) -
cannotGenerateCodeForExpressionError
public static Throwable cannotGenerateCodeForExpressionError(org.apache.spark.sql.catalyst.expressions.Expression expression) -
cannotTerminateGeneratorError
public static Throwable cannotTerminateGeneratorError(org.apache.spark.sql.catalyst.analysis.UnresolvedGenerator generator) -
castingCauseOverflowError
-
castingCauseOverflowErrorInTableInsert
public static ArithmeticException castingCauseOverflowErrorInTableInsert(DataType from, DataType to, String columnName) -
cannotChangeDecimalPrecisionError
public static ArithmeticException cannotChangeDecimalPrecisionError(Decimal value, int decimalPrecision, int decimalScale, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
invalidInputSyntaxForBooleanError
public static org.apache.spark.SparkRuntimeException invalidInputSyntaxForBooleanError(org.apache.spark.unsafe.types.UTF8String s, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
invalidInputInCastToNumberError
public static org.apache.spark.SparkNumberFormatException invalidInputInCastToNumberError(DataType to, org.apache.spark.unsafe.types.UTF8String s, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
invalidInputInConversionError
-
cannotCastFromNullTypeError
-
cannotCastError
-
cannotParseDecimalError
-
dataTypeUnsupportedError
-
failedExecuteUserDefinedFunctionError
-
divideByZeroError
public static ArithmeticException divideByZeroError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
intervalDividedByZeroError
public static ArithmeticException intervalDividedByZeroError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
invalidArrayIndexError
public static ArrayIndexOutOfBoundsException invalidArrayIndexError(int index, int numElements, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
invalidElementAtIndexError
public static ArrayIndexOutOfBoundsException invalidElementAtIndexError(int index, int numElements, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
invalidBitmapPositionError
public static ArrayIndexOutOfBoundsException invalidBitmapPositionError(long bitPosition, long bitmapNumBytes) -
invalidFractionOfSecondError
-
ansiDateTimeParseError
-
ansiDateTimeError
-
ansiIllegalArgumentError
public static org.apache.spark.SparkIllegalArgumentException ansiIllegalArgumentError(String message) -
ansiIllegalArgumentError
-
overflowInSumOfDecimalError
public static ArithmeticException overflowInSumOfDecimalError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
overflowInIntegralDivideError
public static ArithmeticException overflowInIntegralDivideError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
overflowInConvError
public static ArithmeticException overflowInConvError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
mapSizeExceedArraySizeWhenZipMapError
public static org.apache.spark.SparkRuntimeException mapSizeExceedArraySizeWhenZipMapError(int size) -
literalTypeUnsupportedError
-
pivotColumnUnsupportedError
-
noDefaultForDataTypeError
-
orderedOperationUnsupportedByDataTypeError
public static org.apache.spark.SparkIllegalArgumentException orderedOperationUnsupportedByDataTypeError(DataType dataType) -
orderedOperationUnsupportedByDataTypeError
public static org.apache.spark.SparkIllegalArgumentException orderedOperationUnsupportedByDataTypeError(String dataType) -
invalidRegexGroupIndexError
public static RuntimeException invalidRegexGroupIndexError(String funcName, int groupCount, int groupIndex) -
invalidUrlError
public static org.apache.spark.SparkIllegalArgumentException invalidUrlError(org.apache.spark.unsafe.types.UTF8String url, URISyntaxException e) -
illegalUrlError
-
mergeUnsupportedByWindowFunctionError
-
dataTypeUnexpectedError
public static org.apache.spark.SparkUnsupportedOperationException dataTypeUnexpectedError(DataType dataType) -
typeUnsupportedError
public static org.apache.spark.SparkIllegalArgumentException typeUnsupportedError(DataType dataType) -
negativeValueUnexpectedError
public static org.apache.spark.SparkIllegalArgumentException negativeValueUnexpectedError(org.apache.spark.sql.catalyst.expressions.Expression frequencyExpression) -
addNewFunctionMismatchedWithFunctionError
-
cannotGenerateCodeForIncomparableTypeError
-
cannotInterpolateClassIntoCodeBlockError
public static org.apache.spark.SparkIllegalArgumentException cannotInterpolateClassIntoCodeBlockError(Object arg) -
customCollectionClsNotResolvedError
public static org.apache.spark.SparkUnsupportedOperationException customCollectionClsNotResolvedError() -
classUnsupportedByMapObjectsError
public static org.apache.spark.SparkRuntimeException classUnsupportedByMapObjectsError(Class<?> cls) -
nullAsMapKeyNotAllowedError
public static org.apache.spark.SparkRuntimeException nullAsMapKeyNotAllowedError() -
methodNotDeclaredError
-
methodNotFoundError
-
constructorNotFoundError
-
unsupportedNaturalJoinTypeError
public static SparkException unsupportedNaturalJoinTypeError(org.apache.spark.sql.catalyst.plans.JoinType joinType) -
notExpectedUnresolvedEncoderError
public static org.apache.spark.SparkRuntimeException notExpectedUnresolvedEncoderError(org.apache.spark.sql.catalyst.expressions.AttributeReference attr) -
unsupportedEncoderError
public static org.apache.spark.SparkRuntimeException unsupportedEncoderError() -
notOverrideExpectedMethodsError
-
failToConvertValueToJsonError
-
unsupportedRoundingMode
-
resolveCannotHandleNestedSchema
public static org.apache.spark.SparkRuntimeException resolveCannotHandleNestedSchema(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan) -
inputExternalRowCannotBeNullError
public static org.apache.spark.SparkRuntimeException inputExternalRowCannotBeNullError() -
fieldCannotBeNullMsg
-
fieldCannotBeNullError
public static org.apache.spark.SparkRuntimeException fieldCannotBeNullError(int index, String fieldName) -
unableToCreateDatabaseAsFailedToCreateDirectoryError
public static Throwable unableToCreateDatabaseAsFailedToCreateDirectoryError(org.apache.spark.sql.catalyst.catalog.CatalogDatabase dbDefinition, IOException e) -
unableToDropDatabaseAsFailedToDeleteDirectoryError
public static Throwable unableToDropDatabaseAsFailedToDeleteDirectoryError(org.apache.spark.sql.catalyst.catalog.CatalogDatabase dbDefinition, IOException e) -
unableToCreateTableAsFailedToCreateDirectoryError
public static Throwable unableToCreateTableAsFailedToCreateDirectoryError(String table, org.apache.hadoop.fs.Path defaultTableLocation, IOException e) -
unableToDeletePartitionPathError
public static Throwable unableToDeletePartitionPathError(org.apache.hadoop.fs.Path partitionPath, IOException e) -
unableToDropTableAsFailedToDeleteDirectoryError
public static Throwable unableToDropTableAsFailedToDeleteDirectoryError(String table, org.apache.hadoop.fs.Path dir, IOException e) -
unableToRenameTableAsFailedToRenameDirectoryError
public static Throwable unableToRenameTableAsFailedToRenameDirectoryError(String oldName, String newName, org.apache.hadoop.fs.Path oldDir, IOException e) -
unableToCreatePartitionPathError
public static Throwable unableToCreatePartitionPathError(org.apache.hadoop.fs.Path partitionPath, IOException e) -
unableToRenamePartitionPathError
public static Throwable unableToRenamePartitionPathError(org.apache.hadoop.fs.Path oldPartPath, IOException e) -
methodNotImplementedError
public static org.apache.spark.SparkUnsupportedOperationException methodNotImplementedError(String methodName) -
arithmeticOverflowError
public static org.apache.spark.SparkArithmeticException arithmeticOverflowError(ArithmeticException e) -
unaryMinusCauseOverflowError
public static org.apache.spark.SparkArithmeticException unaryMinusCauseOverflowError(int originValue) -
binaryArithmeticCauseOverflowError
public static org.apache.spark.SparkArithmeticException binaryArithmeticCauseOverflowError(short eval1, String symbol, short eval2) -
intervalArithmeticOverflowError
public static ArithmeticException intervalArithmeticOverflowError(String message, String hint, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
failedToCompileMsg
-
internalCompilerError
public static Throwable internalCompilerError(org.codehaus.commons.compiler.InternalCompilerException e) -
compilerError
-
unsupportedTableChangeError
-
notADatasourceRDDPartitionError
-
dataPathNotSpecifiedError
public static org.apache.spark.SparkIllegalArgumentException dataPathNotSpecifiedError() -
createStreamingSourceNotSpecifySchemaError
public static org.apache.spark.SparkIllegalArgumentException createStreamingSourceNotSpecifySchemaError() -
streamedOperatorUnsupportedByDataSourceError
-
nonTimeWindowNotSupportedInStreamingError
public static AnalysisException nonTimeWindowNotSupportedInStreamingError(scala.collection.Seq<String> windowFuncList, scala.collection.Seq<String> columnNameList, scala.collection.Seq<String> windowSpecList, org.apache.spark.sql.catalyst.trees.Origin origin) -
multiplePathsSpecifiedError
public static org.apache.spark.SparkIllegalArgumentException multiplePathsSpecifiedError(scala.collection.Seq<String> allPaths) -
dataSourceNotFoundError
-
removedClassInSpark2Error
-
incompatibleDataSourceRegisterError
-
sparkUpgradeInReadingDatesError
-
sparkUpgradeInWritingDatesError
-
buildReaderUnsupportedForFileFormatError
public static org.apache.spark.SparkUnsupportedOperationException buildReaderUnsupportedForFileFormatError(String format) -
taskFailedWhileWritingRowsError
-
readCurrentFileNotFoundError
public static org.apache.spark.SparkFileNotFoundException readCurrentFileNotFoundError(FileNotFoundException e) -
saveModeUnsupportedError
-
cannotClearOutputDirectoryError
-
cannotClearPartitionDirectoryError
-
failedToCastValueToDataTypeForPartitionColumnError
-
endOfStreamError
-
fallbackV1RelationReportsInconsistentSchemaError
public static org.apache.spark.SparkIllegalArgumentException fallbackV1RelationReportsInconsistentSchemaError(StructType v2Schema, StructType v1Schema) -
noRecordsFromEmptyDataReaderError
-
fileNotFoundError
public static org.apache.spark.SparkFileNotFoundException fileNotFoundError(FileNotFoundException e) -
unsupportedSchemaColumnConvertError
-
cannotReadFilesError
-
cannotCreateColumnarReaderError
-
invalidNamespaceNameError
public static org.apache.spark.SparkIllegalArgumentException invalidNamespaceNameError(String[] namespace) -
unsupportedPartitionTransformError
public static org.apache.spark.SparkUnsupportedOperationException unsupportedPartitionTransformError(Transform transform) -
missingDatabaseLocationError
public static org.apache.spark.SparkIllegalArgumentException missingDatabaseLocationError() -
cannotRemoveReservedPropertyError
public static org.apache.spark.SparkUnsupportedOperationException cannotRemoveReservedPropertyError(String property) -
writingJobFailedError
-
commitDeniedError
public static Throwable commitDeniedError(int partId, long taskId, int attemptId, int stageId, int stageAttempt) -
cannotCreateJDBCTableWithPartitionsError
public static org.apache.spark.SparkUnsupportedOperationException cannotCreateJDBCTableWithPartitionsError() -
unsupportedUserSpecifiedSchemaError
public static org.apache.spark.SparkUnsupportedOperationException unsupportedUserSpecifiedSchemaError() -
writeUnsupportedForBinaryFileDataSourceError
public static org.apache.spark.SparkUnsupportedOperationException writeUnsupportedForBinaryFileDataSourceError() -
fileLengthExceedsMaxLengthError
public static Throwable fileLengthExceedsMaxLengthError(org.apache.hadoop.fs.FileStatus status, int maxLength) -
unsupportedFieldNameError
-
cannotSpecifyBothJdbcTableNameAndQueryError
-
missingJdbcTableNameAndQueryError
-
emptyOptionError
-
invalidJdbcTxnIsolationLevelError
-
cannotGetJdbcTypeError
-
unrecognizedSqlTypeError
-
unsupportedJdbcTypeError
-
unsupportedArrayElementTypeBasedOnBinaryError
public static org.apache.spark.SparkIllegalArgumentException unsupportedArrayElementTypeBasedOnBinaryError(DataType dt) -
nestedArraysUnsupportedError
public static org.apache.spark.SparkIllegalArgumentException nestedArraysUnsupportedError() -
cannotTranslateNonNullValueForFieldError
public static org.apache.spark.SparkIllegalArgumentException cannotTranslateNonNullValueForFieldError(int pos) -
invalidJdbcNumPartitionsError
public static org.apache.spark.SparkIllegalArgumentException invalidJdbcNumPartitionsError(int n, String jdbcNumPartitions) -
multiActionAlterError
-
dataTypeUnsupportedYetError
public static org.apache.spark.SparkUnsupportedOperationException dataTypeUnsupportedYetError(DataType dataType) -
unsupportedOperationForDataTypeError
public static org.apache.spark.SparkUnsupportedOperationException unsupportedOperationForDataTypeError(DataType dataType) -
inputFilterNotFullyConvertibleError
-
foundDuplicateFieldInCaseInsensitiveModeError
-
foundDuplicateFieldInFieldIdLookupModeError
public static org.apache.spark.SparkRuntimeException foundDuplicateFieldInFieldIdLookupModeError(int requiredId, String matchedFields) -
failedToMergeIncompatibleSchemasError
public static Throwable failedToMergeIncompatibleSchemasError(StructType left, StructType right, Throwable e) -
ddlUnsupportedTemporarilyError
public static org.apache.spark.SparkUnsupportedOperationException ddlUnsupportedTemporarilyError(String ddl) -
executeBroadcastTimeoutError
public static Throwable executeBroadcastTimeoutError(long timeout, scala.Option<TimeoutException> ex) -
cannotCompareCostWithTargetCostError
public static org.apache.spark.SparkIllegalArgumentException cannotCompareCostWithTargetCostError(String cost) -
notSupportTypeError
-
notSupportNonPrimitiveTypeError
public static org.apache.spark.SparkRuntimeException notSupportNonPrimitiveTypeError() -
unsupportedTypeError
-
useDictionaryEncodingWhenDictionaryOverflowError
-
endOfIteratorError
-
cannotAllocateMemoryToGrowBytesToBytesMapError
-
cannotAcquireMemoryToBuildLongHashedRelationError
-
cannotAcquireMemoryToBuildUnsafeHashedRelationError
-
rowLargerThan256MUnsupportedError
public static org.apache.spark.SparkUnsupportedOperationException rowLargerThan256MUnsupportedError() -
cannotBuildHashedRelationWithUniqueKeysExceededError
public static org.apache.spark.SparkUnsupportedOperationException cannotBuildHashedRelationWithUniqueKeysExceededError() -
cannotBuildHashedRelationLargerThan8GError
public static org.apache.spark.SparkUnsupportedOperationException cannotBuildHashedRelationLargerThan8GError() -
failedToPushRowIntoRowQueueError
-
unexpectedWindowFunctionFrameError
public static org.apache.spark.SparkRuntimeException unexpectedWindowFunctionFrameError(String frame) -
cannotParseStatisticAsPercentileError
public static org.apache.spark.SparkIllegalArgumentException cannotParseStatisticAsPercentileError(String stats, NumberFormatException e) -
statisticNotRecognizedError
public static org.apache.spark.SparkIllegalArgumentException statisticNotRecognizedError(String stats) -
unknownColumnError
public static org.apache.spark.SparkIllegalArgumentException unknownColumnError(String unknownColumn) -
unexpectedAccumulableUpdateValueError
public static org.apache.spark.SparkIllegalArgumentException unexpectedAccumulableUpdateValueError(Object o) -
unscaledValueTooLargeForPrecisionError
public static ArithmeticException unscaledValueTooLargeForPrecisionError(Decimal value, int decimalPrecision, int decimalScale, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
decimalPrecisionExceedsMaxPrecisionError
public static org.apache.spark.SparkArithmeticException decimalPrecisionExceedsMaxPrecisionError(int precision, int maxPrecision) -
outOfDecimalTypeRangeError
public static org.apache.spark.SparkArithmeticException outOfDecimalTypeRangeError(org.apache.spark.unsafe.types.UTF8String str) -
unsupportedArrayTypeError
-
unsupportedJavaTypeError
-
failedParsingStructTypeError
-
cannotMergeDecimalTypesWithIncompatibleScaleError
public static Throwable cannotMergeDecimalTypesWithIncompatibleScaleError(int leftScale, int rightScale) -
cannotMergeIncompatibleDataTypesError
-
exceedMapSizeLimitError
public static org.apache.spark.SparkRuntimeException exceedMapSizeLimitError(int size) -
duplicateMapKeyFoundError
-
mapDataKeyArrayLengthDiffersFromValueArrayLengthError
public static org.apache.spark.SparkRuntimeException mapDataKeyArrayLengthDiffersFromValueArrayLengthError() -
registeringStreamingQueryListenerError
-
concurrentQueryInstanceError
-
concurrentStreamLogUpdate
-
cannotParseJsonArraysAsStructsError
public static org.apache.spark.SparkRuntimeException cannotParseJsonArraysAsStructsError(String recordStr) -
cannotParseStringAsDataTypeError
public static org.apache.spark.SparkRuntimeException cannotParseStringAsDataTypeError(com.fasterxml.jackson.core.JsonParser parser, com.fasterxml.jackson.core.JsonToken token, DataType dataType) -
emptyJsonFieldValueError
-
cannotParseJSONFieldError
public static org.apache.spark.SparkRuntimeException cannotParseJSONFieldError(com.fasterxml.jackson.core.JsonParser parser, com.fasterxml.jackson.core.JsonToken jsonType, DataType dataType) -
cannotParseJSONFieldError
-
rootConverterReturnNullError
public static org.apache.spark.SparkRuntimeException rootConverterReturnNullError() -
attributesForTypeUnsupportedError
public static org.apache.spark.SparkUnsupportedOperationException attributesForTypeUnsupportedError(org.apache.spark.sql.catalyst.ScalaReflection.Schema schema) -
paramExceedOneCharError
-
paramIsNotIntegerError
-
paramIsNotBooleanValueError
-
foundNullValueForNotNullableFieldError
public static org.apache.spark.SparkRuntimeException foundNullValueForNotNullableFieldError(String name) -
malformedCSVRecordError
-
elementsOfTupleExceedLimitError
public static org.apache.spark.SparkUnsupportedOperationException elementsOfTupleExceedLimitError() -
expressionDecodingError
public static org.apache.spark.SparkRuntimeException expressionDecodingError(Exception e, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> expressions) -
expressionEncodingError
public static org.apache.spark.SparkRuntimeException expressionEncodingError(Exception e, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> expressions) -
classHasUnexpectedSerializerError
public static org.apache.spark.SparkRuntimeException classHasUnexpectedSerializerError(String clsName, org.apache.spark.sql.catalyst.expressions.Expression objSerializer) -
unsupportedOperandTypeForSizeFunctionError
public static org.apache.spark.SparkUnsupportedOperationException unsupportedOperandTypeForSizeFunctionError(DataType dataType) -
unexpectedValueForStartInFunctionError
public static org.apache.spark.SparkRuntimeException unexpectedValueForStartInFunctionError(String prettyName) -
unexpectedValueForLengthInFunctionError
public static org.apache.spark.SparkRuntimeException unexpectedValueForLengthInFunctionError(String prettyName) -
invalidIndexOfZeroError
public static RuntimeException invalidIndexOfZeroError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
concatArraysWithElementsExceedLimitError
public static org.apache.spark.SparkRuntimeException concatArraysWithElementsExceedLimitError(long numberOfElements) -
flattenArraysWithElementsExceedLimitError
public static org.apache.spark.SparkRuntimeException flattenArraysWithElementsExceedLimitError(long numberOfElements) -
createArrayWithElementsExceedLimitError
public static org.apache.spark.SparkRuntimeException createArrayWithElementsExceedLimitError(Object count) -
unionArrayWithElementsExceedLimitError
public static org.apache.spark.SparkRuntimeException unionArrayWithElementsExceedLimitError(int length) -
initialTypeNotTargetDataTypeError
-
initialTypeNotTargetDataTypesError
public static org.apache.spark.SparkUnsupportedOperationException initialTypeNotTargetDataTypesError(DataType dataType) -
malformedRecordsDetectedInSchemaInferenceError
-
malformedJSONError
-
malformedRecordsDetectedInSchemaInferenceError
-
decorrelateInnerQueryThroughPlanUnsupportedError
public static org.apache.spark.SparkUnsupportedOperationException decorrelateInnerQueryThroughPlanUnsupportedError(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan) -
methodCalledInAnalyzerNotAllowedError
public static org.apache.spark.SparkRuntimeException methodCalledInAnalyzerNotAllowedError() -
cannotSafelyMergeSerdePropertiesError
-
pairUnsupportedAtFunctionError
public static org.apache.spark.SparkUnsupportedOperationException pairUnsupportedAtFunctionError(org.apache.spark.sql.catalyst.plans.logical.statsEstimation.ValueInterval r1, org.apache.spark.sql.catalyst.plans.logical.statsEstimation.ValueInterval r2, String function) -
onceStrategyIdempotenceIsBrokenForBatchError
public static <TreeType extends org.apache.spark.sql.catalyst.trees.TreeNode<?>> org.apache.spark.SparkRuntimeException onceStrategyIdempotenceIsBrokenForBatchError(String batchName, TreeType plan, TreeType reOptimized) -
ruleIdNotFoundForRuleError
-
cannotCreateArrayWithElementsExceedLimitError
public static org.apache.spark.SparkRuntimeException cannotCreateArrayWithElementsExceedLimitError(long numElements, String additionalErrorMessage) -
malformedRecordsDetectedInRecordParsingError
-
remoteOperationsUnsupportedError
public static org.apache.spark.SparkRuntimeException remoteOperationsUnsupportedError() -
invalidKerberosConfigForHiveServer2Error
-
parentSparkUIToAttachTabNotFoundError
-
inferSchemaUnsupportedForHiveError
public static org.apache.spark.SparkUnsupportedOperationException inferSchemaUnsupportedForHiveError() -
requestedPartitionsMismatchTablePartitionsError
-
dynamicPartitionKeyNotAmongWrittenPartitionPathsError
-
cannotRemovePartitionDirError
public static org.apache.spark.SparkRuntimeException cannotRemovePartitionDirError(org.apache.hadoop.fs.Path partitionPath) -
cannotCreateStagingDirError
public static org.apache.spark.SparkRuntimeException cannotCreateStagingDirError(String message, IOException e) -
serDeInterfaceNotFoundError
public static org.apache.spark.SparkClassNotFoundException serDeInterfaceNotFoundError(NoClassDefFoundError e) -
convertHiveTableToCatalogTableError
public static Throwable convertHiveTableToCatalogTableError(SparkException e, String dbName, String tableName) -
cannotRecognizeHiveTypeError
-
getTablesByTypeUnsupportedByHiveVersionError
public static org.apache.spark.SparkUnsupportedOperationException getTablesByTypeUnsupportedByHiveVersionError() -
dropTableWithPurgeUnsupportedError
public static org.apache.spark.SparkUnsupportedOperationException dropTableWithPurgeUnsupportedError() -
alterTableWithDropPartitionAndPurgeUnsupportedError
public static org.apache.spark.SparkUnsupportedOperationException alterTableWithDropPartitionAndPurgeUnsupportedError() -
invalidPartitionFilterError
public static org.apache.spark.SparkUnsupportedOperationException invalidPartitionFilterError() -
getPartitionMetadataByFilterError
public static org.apache.spark.SparkRuntimeException getPartitionMetadataByFilterError(InvocationTargetException e) -
unsupportedHiveMetastoreVersionError
-
loadHiveClientCausesNoClassDefFoundError
public static org.apache.spark.SparkClassNotFoundException loadHiveClientCausesNoClassDefFoundError(NoClassDefFoundError cnf, scala.collection.Seq<URL> execJars, String key, InvocationTargetException e) -
cannotFetchTablesOfDatabaseError
-
illegalLocationClauseForViewPartitionError
-
renamePathAsExistsPathError
public static Throwable renamePathAsExistsPathError(org.apache.hadoop.fs.Path srcPath, org.apache.hadoop.fs.Path dstPath) -
renameAsExistsPathError
public static org.apache.spark.SparkFileAlreadyExistsException renameAsExistsPathError(org.apache.hadoop.fs.Path dstPath) -
renameSrcPathNotFoundError
-
failedRenameTempFileError
public static Throwable failedRenameTempFileError(org.apache.hadoop.fs.Path srcPath, org.apache.hadoop.fs.Path dstPath) -
legacyMetadataPathExistsError
public static Throwable legacyMetadataPathExistsError(org.apache.hadoop.fs.Path metadataPath, org.apache.hadoop.fs.Path legacyMetadataPath) -
partitionColumnNotFoundInSchemaError
public static org.apache.spark.SparkRuntimeException partitionColumnNotFoundInSchemaError(String col, StructType schema) -
stateNotDefinedOrAlreadyRemovedError
-
cannotSetTimeoutDurationError
public static org.apache.spark.SparkUnsupportedOperationException cannotSetTimeoutDurationError() -
cannotGetEventTimeWatermarkError
public static org.apache.spark.SparkUnsupportedOperationException cannotGetEventTimeWatermarkError() -
cannotSetTimeoutTimestampError
public static org.apache.spark.SparkUnsupportedOperationException cannotSetTimeoutTimestampError() -
batchMetadataFileNotFoundError
public static org.apache.spark.SparkFileNotFoundException batchMetadataFileNotFoundError(org.apache.hadoop.fs.Path batchMetadataFile) -
multiStreamingQueriesUsingPathConcurrentlyError
public static org.apache.spark.SparkConcurrentModificationException multiStreamingQueriesUsingPathConcurrentlyError(String path, org.apache.hadoop.fs.FileAlreadyExistsException e) -
addFilesWithAbsolutePathUnsupportedError
public static org.apache.spark.SparkUnsupportedOperationException addFilesWithAbsolutePathUnsupportedError(String commitProtocol) -
microBatchUnsupportedByDataSourceError
-
cannotExecuteStreamingRelationExecError
public static org.apache.spark.SparkUnsupportedOperationException cannotExecuteStreamingRelationExecError() -
invalidStreamingOutputModeError
public static org.apache.spark.SparkUnsupportedOperationException invalidStreamingOutputModeError(scala.Option<OutputMode> outputMode) -
invalidCatalogNameError
-
catalogPluginClassNotFoundError
-
catalogPluginClassNotImplementedError
-
catalogPluginClassNotFoundForCatalogError
-
catalogFailToFindPublicNoArgConstructorError
-
catalogFailToCallPublicNoArgConstructorError
-
cannotInstantiateAbstractCatalogPluginClassError
-
failedToInstantiateConstructorForCatalogError
-
noSuchElementExceptionError
-
sqlConfigNotFoundError
-
cannotMutateReadOnlySQLConfError
public static org.apache.spark.SparkUnsupportedOperationException cannotMutateReadOnlySQLConfError() -
cannotCloneOrCopyReadOnlySQLConfError
public static org.apache.spark.SparkUnsupportedOperationException cannotCloneOrCopyReadOnlySQLConfError() -
cannotGetSQLConfInSchedulerEventLoopThreadError
public static org.apache.spark.SparkRuntimeException cannotGetSQLConfInSchedulerEventLoopThreadError() -
unsupportedOperationExceptionError
public static org.apache.spark.SparkUnsupportedOperationException unsupportedOperationExceptionError() -
nullLiteralsCannotBeCastedError
public static org.apache.spark.SparkUnsupportedOperationException nullLiteralsCannotBeCastedError(String name) -
notUserDefinedTypeError
-
cannotLoadUserDefinedTypeError
-
notPublicClassError
-
primitiveTypesNotSupportedError
public static org.apache.spark.SparkUnsupportedOperationException primitiveTypesNotSupportedError() -
fieldIndexOnRowWithoutSchemaError
public static org.apache.spark.SparkUnsupportedOperationException fieldIndexOnRowWithoutSchemaError() -
valueIsNullError
-
onlySupportDataSourcesProvidingFileFormatError
-
cannotRestorePermissionsForPathError
public static Throwable cannotRestorePermissionsForPathError(org.apache.hadoop.fs.permission.FsPermission permission, org.apache.hadoop.fs.Path path) -
failToSetOriginalACLBackError
-
multiFailuresInStageMaterializationError
-
unrecognizedCompressionSchemaTypeIDError
public static org.apache.spark.SparkUnsupportedOperationException unrecognizedCompressionSchemaTypeIDError(int typeId) -
getParentLoggerNotImplementedError
public static org.apache.spark.SparkSQLFeatureNotSupportedException getParentLoggerNotImplementedError(String className) -
cannotCreateParquetConverterForTypeError
public static org.apache.spark.SparkRuntimeException cannotCreateParquetConverterForTypeError(DecimalType t, String parquetType) -
cannotCreateParquetConverterForDecimalTypeError
public static org.apache.spark.SparkRuntimeException cannotCreateParquetConverterForDecimalTypeError(DecimalType t, String parquetType) -
cannotCreateParquetConverterForDataTypeError
-
cannotAddMultiPartitionsOnNonatomicPartitionTableError
public static org.apache.spark.SparkUnsupportedOperationException cannotAddMultiPartitionsOnNonatomicPartitionTableError(String tableName) -
userSpecifiedSchemaUnsupportedByDataSourceError
public static org.apache.spark.SparkUnsupportedOperationException userSpecifiedSchemaUnsupportedByDataSourceError(TableProvider provider) -
cannotDropMultiPartitionsOnNonatomicPartitionTableError
public static org.apache.spark.SparkUnsupportedOperationException cannotDropMultiPartitionsOnNonatomicPartitionTableError(String tableName) -
truncateMultiPartitionUnsupportedError
public static org.apache.spark.SparkUnsupportedOperationException truncateMultiPartitionUnsupportedError(String tableName) -
overwriteTableByUnsupportedExpressionError
-
dynamicPartitionOverwriteUnsupportedByTableError
-
failedMergingSchemaError
public static Throwable failedMergingSchemaError(StructType leftSchema, StructType rightSchema, SparkException e) -
cannotBroadcastTableOverMaxTableRowsError
public static Throwable cannotBroadcastTableOverMaxTableRowsError(long maxBroadcastTableRows, long numRows) -
cannotBroadcastTableOverMaxTableBytesError
public static Throwable cannotBroadcastTableOverMaxTableBytesError(long maxBroadcastTableBytes, long dataSize) -
notEnoughMemoryToBuildAndBroadcastTableError
public static Throwable notEnoughMemoryToBuildAndBroadcastTableError(OutOfMemoryError oe, scala.collection.Seq<org.apache.spark.sql.catalyst.TableIdentifier> tables) -
executeCodePathUnsupportedError
public static org.apache.spark.SparkUnsupportedOperationException executeCodePathUnsupportedError(String execName) -
cannotMergeClassWithOtherClassError
-
continuousProcessingUnsupportedByDataSourceError
public static org.apache.spark.SparkUnsupportedOperationException continuousProcessingUnsupportedByDataSourceError(String sourceName) -
failedToReadDataError
-
failedToGenerateEpochMarkerError
-
foreachWriterAbortedDueToTaskFailureError
-
incorrectRampUpRate
public static Throwable incorrectRampUpRate(long rowsPerSecond, long maxSeconds, long rampUpTimeSeconds) -
incorrectEndOffset
-
failedToReadDeltaFileError
-
failedToReadSnapshotFileError
-
cannotPurgeAsBreakInternalStateError
public static org.apache.spark.SparkUnsupportedOperationException cannotPurgeAsBreakInternalStateError() -
cleanUpSourceFilesUnsupportedError
public static org.apache.spark.SparkUnsupportedOperationException cleanUpSourceFilesUnsupportedError() -
latestOffsetNotCalledError
public static org.apache.spark.SparkUnsupportedOperationException latestOffsetNotCalledError() -
legacyCheckpointDirectoryExistsError
-
subprocessExitedError
-
outputDataTypeUnsupportedByNodeWithoutSerdeError
-
invalidStartIndexError
public static org.apache.spark.SparkArrayIndexOutOfBoundsException invalidStartIndexError(int numRows, int startIndex) -
concurrentModificationOnExternalAppendOnlyUnsafeRowArrayError
public static org.apache.spark.SparkConcurrentModificationException concurrentModificationOnExternalAppendOnlyUnsafeRowArrayError(String className) -
doExecuteBroadcastNotImplementedError
public static org.apache.spark.SparkUnsupportedOperationException doExecuteBroadcastNotImplementedError(String nodeName) -
defaultDatabaseNotExistsError
-
databaseNameConflictWithSystemPreservedDatabaseError
-
commentOnTableUnsupportedError
public static org.apache.spark.SparkSQLFeatureNotSupportedException commentOnTableUnsupportedError() -
unsupportedUpdateColumnNullabilityError
public static org.apache.spark.SparkSQLFeatureNotSupportedException unsupportedUpdateColumnNullabilityError() -
renameColumnUnsupportedForOlderMySQLError
public static org.apache.spark.SparkSQLFeatureNotSupportedException renameColumnUnsupportedForOlderMySQLError() -
failedToExecuteQueryError
-
nestedFieldUnsupportedError
public static org.apache.spark.SparkUnsupportedOperationException nestedFieldUnsupportedError(String colName) -
transformationsAndActionsNotInvokedByDriverError
-
repeatedPivotsUnsupportedError
-
pivotNotAfterGroupByUnsupportedError
-
invalidAesKeyLengthError
-
aesModeUnsupportedError
-
aesCryptoError
-
invalidAesIvLengthError
-
aesUnsupportedIv
-
aesUnsupportedAad
-
hiveTableWithAnsiIntervalsError
public static org.apache.spark.SparkUnsupportedOperationException hiveTableWithAnsiIntervalsError(org.apache.spark.sql.catalyst.TableIdentifier table) -
cannotConvertOrcTimestampToTimestampNTZError
-
cannotConvertOrcTimestampNTZToTimestampLTZError
-
writePartitionExceedConfigSizeWhenDynamicPartitionError
-
invalidNumberFormatError
-
unsupportedMultipleBucketTransformsError
public static org.apache.spark.SparkUnsupportedOperationException unsupportedMultipleBucketTransformsError() -
unsupportedCommentNamespaceError
public static org.apache.spark.SparkSQLFeatureNotSupportedException unsupportedCommentNamespaceError(String namespace) -
unsupportedRemoveNamespaceCommentError
public static org.apache.spark.SparkSQLFeatureNotSupportedException unsupportedRemoveNamespaceCommentError(String namespace) -
unsupportedDropNamespaceError
public static org.apache.spark.SparkSQLFeatureNotSupportedException unsupportedDropNamespaceError(String namespace) -
exceedMaxLimit
public static org.apache.spark.SparkRuntimeException exceedMaxLimit(int limit) -
timestampAddOverflowError
-
invalidBucketFile
-
multipleRowSubqueryError
public static Throwable multipleRowSubqueryError(org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
comparatorReturnsNull
-
invalidPatternError
public static RuntimeException invalidPatternError(String funcName, String pattern, Throwable cause) -
tooManyArrayElementsError
public static org.apache.spark.SparkIllegalArgumentException tooManyArrayElementsError(int numElements, int elementSize) -
invalidEmptyLocationError
public static org.apache.spark.SparkIllegalArgumentException invalidEmptyLocationError(String location) -
malformedProtobufMessageDetectedInMessageParsingError
-
locationAlreadyExists
public static Throwable locationAlreadyExists(org.apache.spark.sql.catalyst.TableIdentifier tableId, org.apache.hadoop.fs.Path location) -
cannotConvertCatalystValueToProtobufEnumTypeError
-
hllInvalidLgK
-
hllInvalidInputSketchBuffer
-
hllUnionDifferentLgK
-
mergeCardinalityViolationError
public static org.apache.spark.SparkRuntimeException mergeCardinalityViolationError() -
unsupportedPurgePartitionError
public static org.apache.spark.SparkUnsupportedOperationException unsupportedPurgePartitionError() -
unsupportedPurgeTableError
public static org.apache.spark.SparkUnsupportedOperationException unsupportedPurgeTableError() -
toSQLId
-
toSQLId
-
toSQLStmt
-
toSQLConf
-
toSQLType
-
toSQLType
-
toSQLValue
-
toSQLValue
-
toSQLValue
-
toSQLValue
-
toSQLValue
-
toSQLValue
-
toSQLValue
-
getSummary
-
getQueryContext
public static QueryContext[] getQueryContext(org.apache.spark.sql.catalyst.trees.SQLQueryContext sqlContext) -
toSQLConfVal
-
toDSOption
-
toSQLExpr
-
toSQLSchema
-
toSQLValue
-
fieldDiffersFromDerivedLocalDateError
public static org.apache.spark.SparkDateTimeException fieldDiffersFromDerivedLocalDateError(ChronoField field, int actual, int expected, LocalDate candidate) -
failToParseDateTimeInNewParserError
-
failToRecognizePatternAfterUpgradeError
-
failToRecognizePatternError
-
unreachableError
-
unreachableError$default$1
-
invalidInputInCastToDatetimeError
public static org.apache.spark.SparkDateTimeException invalidInputInCastToDatetimeError(org.apache.spark.unsafe.types.UTF8String value, DataType to, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
invalidInputInCastToDatetimeError
public static org.apache.spark.SparkDateTimeException invalidInputInCastToDatetimeError(double value, DataType to, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
arithmeticOverflowError$default$2
-
arithmeticOverflowError$default$3
public static org.apache.spark.sql.catalyst.trees.SQLQueryContext arithmeticOverflowError$default$3() -
unsupportedArrowTypeError
public static org.apache.spark.SparkUnsupportedOperationException unsupportedArrowTypeError(org.apache.arrow.vector.types.pojo.ArrowType typeName) -
duplicatedFieldNameInArrowStructError
public static org.apache.spark.SparkUnsupportedOperationException duplicatedFieldNameInArrowStructError(scala.collection.Seq<String> fieldNames) -
unsupportedDataTypeError
public static org.apache.spark.SparkUnsupportedOperationException unsupportedDataTypeError(DataType typeName) -
userDefinedTypeNotAnnotatedAndRegisteredError
-
cannotFindEncoderForTypeError
public static org.apache.spark.SparkUnsupportedOperationException cannotFindEncoderForTypeError(String typeName) -
cannotHaveCircularReferencesInBeanClassError
public static org.apache.spark.SparkUnsupportedOperationException cannotHaveCircularReferencesInBeanClassError(Class<?> clazz) -
cannotFindConstructorForTypeError
public static org.apache.spark.SparkUnsupportedOperationException cannotFindConstructorForTypeError(String tpe) -
cannotHaveCircularReferencesInClassError
public static org.apache.spark.SparkUnsupportedOperationException cannotHaveCircularReferencesInClassError(String t) -
cannotUseInvalidJavaIdentifierAsFieldNameError
public static org.apache.spark.SparkUnsupportedOperationException cannotUseInvalidJavaIdentifierAsFieldNameError(String fieldName, org.apache.spark.sql.catalyst.WalkedTypePath walkedTypePath) -
-
cannotGetOuterPointerForInnerClassError
public static org.apache.spark.SparkRuntimeException cannotGetOuterPointerForInnerClassError(Class<?> innerCls)
-