@Test public void testRegisterKryoClasses() { SparkConf conf = new SparkConf(); conf.registerKryoClasses(new Class<?>[]{ Class1.class, Class2.class }); assertEquals( Class1.class.getName() + "," + Class2.class.getName(), conf.get("spark.kryo.classesToRegister")); }
@Test public void testRegisterKryoClasses() { SparkConf conf = new SparkConf(); conf.registerKryoClasses(new Class<?>[]{ Class1.class, Class2.class }); assertEquals( Class1.class.getName() + "," + Class2.class.getName(), conf.get("spark.kryo.classesToRegister")); }
@Test public void testRegisterKryoClasses() { SparkConf conf = new SparkConf(); conf.registerKryoClasses(new Class<?>[]{ Class1.class, Class2.class }); assertEquals( Class1.class.getName() + "," + Class2.class.getName(), conf.get("spark.kryo.classesToRegister")); }
conf.set("spark.kryo.registrationRequired", "true").registerKryoClasses(kryoClassArray);
conf.set("spark.kryo.registrationRequired", "true").registerKryoClasses(kryoClassArray);
conf.set("spark.kryo.registrationRequired", "true").registerKryoClasses(kryoClassArray);
conf.set("spark.kryo.registrationRequired", "true").registerKryoClasses(kryoClassArray);
conf.set("spark.kryo.registrationRequired", "true").registerKryoClasses(kryoClassArray);
public static SparkConf registerClasses(SparkConf conf) { conf.registerKryoClasses( new Class[]{HoodieWriteConfig.class, HoodieRecord.class, HoodieKey.class}); return conf; }
public static SparkConf registerClasses(SparkConf conf) { conf.registerKryoClasses( new Class[]{HoodieWriteConfig.class, HoodieRecord.class, HoodieKey.class}); return conf; }
/** * Register classes with kryo serializer * * @param classes to register * @return builder */ public Builder registerKryoClasses(Class<?>... classes) { conf.registerKryoClasses(classes); return this; }
/** * Register the DataVec writable classes for Kryo */ public static void registerKryoClasses(SparkConf conf) { List<Class<?>> classes = Arrays.<Class<?>>asList(BooleanWritable.class, ByteWritable.class, DoubleWritable.class, FloatWritable.class, IntWritable.class, LongWritable.class, NullWritable.class, Text.class); conf.registerKryoClasses((Class<?>[]) classes.toArray()); }
/** * Register the DataVec writable classes for Kryo */ public static void registerKryoClasses(SparkConf conf) { List<Class<?>> classes = Arrays.<Class<?>>asList(BooleanWritable.class, ByteWritable.class, DoubleWritable.class, FloatWritable.class, IntWritable.class, LongWritable.class, NullWritable.class, Text.class); conf.registerKryoClasses((Class<?>[]) classes.toArray()); }
@Override public Executor newExecutor(Config config, Collection<? extends Class<?>> dataClasses) throws IOException { SparkConf conf = new SparkConf(); conf.set("spark.serializer", KryoSerializer.class.getName()); conf.registerKryoClasses(dataClasses.toArray(new Class[dataClasses.size()])); return SparkExecutor .newBuilder(EuphoriaSparkTrends.class.getSimpleName(), conf) .build(); } }
private static SparkConf getConf(boolean local) { SparkConf conf = new SparkConf(); if (local) { conf.setMaster("local[3]"); conf.setAppName("LocalBeam"); } conf.set("spark.kryo.registrationRequired", "true"); conf.set("spark.serializer", KryoSerializer.class.getName()); conf.set("spark.kryo.registrator", BeamSparkRunnerRegistrator.class.getName()); conf.registerKryoClasses(new Class[]{ scala.collection.mutable.WrappedArray.ofRef.class, Object[].class, org.apache.beam.runners.spark.util.ByteArray.class}); return conf; } }
/** * Creates {@link SparkConf} with {@link org.apache.spark.serializer.KryoSerializer} along with * registering default/user-input serializable classes and user-input Avro Schemas. * Once {@link SparkContext} is created, we can no longer register serialization classes and Avro schemas. */ public SparkConf createSparkConf(@NonNull final SparkArgs sparkArgs) { /** * By custom registering classes the full class name of each object * is not stored during serialization which reduces storage space. */ final SparkConf sparkConf = new SparkConf(); sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); final List<Class> serializableClasses = getDefaultSerializableClasses(); serializableClasses.addAll(sparkArgs.getUserSerializationClasses()); sparkConf.registerKryoClasses(serializableClasses.toArray(new Class[0])); if (sparkArgs.getAvroSchemas().isPresent()) { sparkConf.registerAvroSchemas( JavaConverters .iterableAsScalaIterableConverter(sparkArgs.getAvroSchemas().get()) .asScala() .toSeq()); } // override spark properties final Map<String, String> sparkProps = sparkArgs.getOverrideSparkProperties(); for (Entry<String, String> entry : sparkProps.entrySet()) { log.info("Setting spark key:val {} : {}", entry.getKey(), entry.getValue()); sparkConf.set(entry.getKey(), entry.getValue()); } return sparkConf; }
private JavaStreamingContext create(String streamingContextName, int port, long streamingBatchTime, String sparkHost) { SparkConf conf = new SparkConf(); conf.set("spark.ui.port", String.valueOf(port)); conf.setAppName(streamingContextName); conf.setJars(JavaStreamingContext.jarOfClass(StreamingEngine.class)); conf.setMaster(sparkHost); conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); conf.registerKryoClasses(new Class[] { StratioStreamingMessage.class, InsertMessage.class, ColumnType.class, Action.class}); HashMap<String, String> tuningProperties = configurationContext.getSparkTunningProperties(); if (tuningProperties != null && tuningProperties.size() > 0) { tuningProperties.forEach( (key, value) -> conf.set(key, value)); } JavaStreamingContext streamingContext = new JavaStreamingContext(conf, new Duration(streamingBatchTime)); return streamingContext; }
conf.set("spark.kryo.registrationRequired", "true").registerKryoClasses(kryoClassArray);
.registerKryoClasses(KRYO_CLASSES) .setAppName("sparkTask");
sparkConf.registerKryoClasses(c);