public class KVUtils
extends Object
Constructor and Description |
---|
KVUtils() |
Modifier and Type | Method and Description |
---|---|
static <T> int |
count(org.apache.spark.util.kvstore.KVStoreView<T> view,
scala.Function1<T,Object> countFunc)
Counts the number of elements in the KVStoreView which satisfy a predicate.
|
static org.apache.spark.util.kvstore.KVStore |
createKVStore(scala.Option<java.io.File> storePath,
boolean live,
SparkConf conf) |
static <T> void |
foreach(org.apache.spark.util.kvstore.KVStoreView<T> view,
scala.Function1<T,scala.runtime.BoxedUnit> foreachFunc)
Applies a function f to all values produced by KVStoreView.
|
static <T,B> scala.collection.Seq<B> |
mapToSeq(org.apache.spark.util.kvstore.KVStoreView<T> view,
scala.Function1<T,B> mapFunc)
Maps all values of KVStoreView to new values using a transformation function.
|
static <M> org.apache.spark.util.kvstore.KVStore |
open(java.io.File path,
M metadata,
SparkConf conf,
boolean live,
scala.reflect.ClassTag<M> evidence$1)
Open or create a disk-based KVStore.
|
static void |
org$apache$spark$internal$Logging$$log__$eq(org.slf4j.Logger x$1) |
static org.slf4j.Logger |
org$apache$spark$internal$Logging$$log_() |
static org.apache.spark.status.KVUtils.KVStoreScalaSerializer |
serializerForHistoryServer(SparkConf conf) |
static <T> int |
size(org.apache.spark.util.kvstore.KVStoreView<T> view) |
static <T> scala.collection.Seq<T> |
viewToSeq(org.apache.spark.util.kvstore.KVStoreView<T> view)
Turns a KVStoreView into a Scala sequence.
|
static <T> scala.collection.Seq<T> |
viewToSeq(org.apache.spark.util.kvstore.KVStoreView<T> view,
int max,
scala.Function1<T,Object> filter)
Turns a KVStoreView into a Scala sequence, applying a filter.
|
static <T> scala.collection.Seq<T> |
viewToSeq(org.apache.spark.util.kvstore.KVStoreView<T> view,
int from,
int until,
scala.Function1<T,Object> filter)
Turns an interval of KVStoreView into a Scala sequence, applying a filter.
|
public static <M> org.apache.spark.util.kvstore.KVStore open(java.io.File path, M metadata, SparkConf conf, boolean live, scala.reflect.ClassTag<M> evidence$1)
path
- Location of the store.metadata
- Metadata value to compare to the data in the store. If the store does not
contain any metadata (e.g. it's a new store), this value is written as
the store's metadata.conf
- SparkConf use to get HYBRID_STORE_DISK_BACKEND
live
- (undocumented)evidence$1
- (undocumented)public static org.apache.spark.status.KVUtils.KVStoreScalaSerializer serializerForHistoryServer(SparkConf conf)
public static org.apache.spark.util.kvstore.KVStore createKVStore(scala.Option<java.io.File> storePath, boolean live, SparkConf conf)
public static <T> scala.collection.Seq<T> viewToSeq(org.apache.spark.util.kvstore.KVStoreView<T> view, int max, scala.Function1<T,Object> filter)
public static <T> scala.collection.Seq<T> viewToSeq(org.apache.spark.util.kvstore.KVStoreView<T> view, int from, int until, scala.Function1<T,Object> filter)
public static <T> scala.collection.Seq<T> viewToSeq(org.apache.spark.util.kvstore.KVStoreView<T> view)
public static <T> int count(org.apache.spark.util.kvstore.KVStoreView<T> view, scala.Function1<T,Object> countFunc)
public static <T> void foreach(org.apache.spark.util.kvstore.KVStoreView<T> view, scala.Function1<T,scala.runtime.BoxedUnit> foreachFunc)
public static <T,B> scala.collection.Seq<B> mapToSeq(org.apache.spark.util.kvstore.KVStoreView<T> view, scala.Function1<T,B> mapFunc)
public static <T> int size(org.apache.spark.util.kvstore.KVStoreView<T> view)
public static org.slf4j.Logger org$apache$spark$internal$Logging$$log_()
public static void org$apache$spark$internal$Logging$$log__$eq(org.slf4j.Logger x$1)