congrats Icon
New! Announcing Tabnine Chat Beta
Learn More
Tabnine Logo
SparkSpecificationCodec
Code IndexAdd Tabnine to your IDE (free)

How to use
SparkSpecificationCodec
in
co.cask.cdap.proto.codec

Best Java code snippets using co.cask.cdap.proto.codec.SparkSpecificationCodec (Showing top 6 results out of 315)

origin: caskdata/cdap

@Override
public SparkSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context)
 throws JsonParseException {
 JsonObject jsonObj = json.getAsJsonObject();
 String className = jsonObj.get("className").getAsString();
 String name = jsonObj.get("name").getAsString();
 String description = jsonObj.get("description").getAsString();
 Map<String, Plugin> plugins = deserializeMap(jsonObj.get("plugins"), context, Plugin.class);
 String mainClassName = jsonObj.has("mainClassName") ? jsonObj.get("mainClassName").getAsString() : null;
 Set<String> datasets = deserializeSet(jsonObj.get("datasets"), context, String.class);
 Map<String, String> properties = deserializeMap(jsonObj.get("properties"), context, String.class);
 Resources clientResources = deserializeResources(jsonObj, "client", context);
 Resources driverResources = deserializeResources(jsonObj, "driver", context);
 Resources executorResources = deserializeResources(jsonObj, "executor", context);
 List<SparkHttpServiceHandlerSpecification> handlers = deserializeList(jsonObj.get("handlers"), context,
                                    SparkHttpServiceHandlerSpecification.class);
 return new SparkSpecification(className, name, description, mainClassName, datasets,
                properties, clientResources, driverResources, executorResources, handlers, plugins);
}
origin: caskdata/cdap

@Override
public JsonElement serialize(SparkSpecification src, Type typeOfSrc, JsonSerializationContext context) {
 JsonObject jsonObj = new JsonObject();
 jsonObj.add("className", new JsonPrimitive(src.getClassName()));
 jsonObj.add("name", new JsonPrimitive(src.getName()));
 jsonObj.add("description", new JsonPrimitive(src.getDescription()));
 jsonObj.add("plugins", serializeMap(src.getPlugins(), context, Plugin.class));
 if (src.getMainClassName() != null) {
  jsonObj.add("mainClassName", new JsonPrimitive(src.getMainClassName()));
 }
 jsonObj.add("datasets", serializeSet(src.getDatasets(), context, String.class));
 jsonObj.add("properties", serializeMap(src.getProperties(), context, String.class));
 serializeResources(jsonObj, "client", context, src.getClientResources());
 serializeResources(jsonObj, "driver", context, src.getDriverResources());
 serializeResources(jsonObj, "executor", context, src.getExecutorResources());
 jsonObj.add("handlers", serializeList(src.getHandlers(), context, SparkHttpServiceHandlerSpecification.class));
 return jsonObj;
}
origin: cdapio/cdap

public static GsonBuilder addTypeAdapters(GsonBuilder builder) {
 return builder
  .registerTypeAdapter(Schema.class, new SchemaTypeAdapter())
  .registerTypeAdapter(ApplicationSpecification.class, new ApplicationSpecificationCodec())
  .registerTypeAdapter(MapReduceSpecification.class, new MapReduceSpecificationCodec())
  .registerTypeAdapter(SparkSpecification.class, new SparkSpecificationCodec())
  .registerTypeAdapter(WorkflowSpecification.class, new WorkflowSpecificationCodec())
  .registerTypeAdapter(WorkflowNode.class, new WorkflowNodeCodec())
  .registerTypeAdapter(CustomActionSpecification.class, new CustomActionSpecificationCodec())
  .registerTypeAdapter(ConditionSpecification.class, new ConditionSpecificationCodec())
  .registerTypeAdapter(ServiceSpecification.class, new ServiceSpecificationCodec())
  .registerTypeAdapter(WorkerSpecification.class, new WorkerSpecificationCodec())
  .registerTypeAdapter(BasicThrowable.class, new BasicThrowableCodec())
  .registerTypeAdapter(Trigger.class, new TriggerCodec())
  .registerTypeAdapter(SatisfiableTrigger.class, new TriggerCodec())
  .registerTypeAdapter(Constraint.class, new ConstraintCodec())
  .registerTypeAdapterFactory(new AppSpecTypeAdapterFactory());
}
origin: co.cask.cdap/cdap-proto

@Override
public JsonElement serialize(SparkSpecification src, Type typeOfSrc, JsonSerializationContext context) {
 JsonObject jsonObj = new JsonObject();
 jsonObj.add("className", new JsonPrimitive(src.getClassName()));
 jsonObj.add("name", new JsonPrimitive(src.getName()));
 jsonObj.add("description", new JsonPrimitive(src.getDescription()));
 jsonObj.add("plugins", serializeMap(src.getPlugins(), context, Plugin.class));
 if (src.getMainClassName() != null) {
  jsonObj.add("mainClassName", new JsonPrimitive(src.getMainClassName()));
 }
 jsonObj.add("datasets", serializeSet(src.getDatasets(), context, String.class));
 jsonObj.add("properties", serializeMap(src.getProperties(), context, String.class));
 serializeResources(jsonObj, "client", context, src.getClientResources());
 serializeResources(jsonObj, "driver", context, src.getDriverResources());
 serializeResources(jsonObj, "executor", context, src.getExecutorResources());
 jsonObj.add("handlers", serializeList(src.getHandlers(), context, SparkHttpServiceHandlerSpecification.class));
 return jsonObj;
}
origin: co.cask.cdap/cdap-app-fabric

public static GsonBuilder addTypeAdapters(GsonBuilder builder) {
 return builder
  .registerTypeAdapter(Schema.class, new SchemaTypeAdapter())
  .registerTypeAdapter(ApplicationSpecification.class, new ApplicationSpecificationCodec())
  .registerTypeAdapter(FlowSpecification.class, new FlowSpecificationCodec())
  .registerTypeAdapter(FlowletSpecification.class, new FlowletSpecificationCodec())
  .registerTypeAdapter(MapReduceSpecification.class, new MapReduceSpecificationCodec())
  .registerTypeAdapter(SparkSpecification.class, new SparkSpecificationCodec())
  .registerTypeAdapter(WorkflowSpecification.class, new WorkflowSpecificationCodec())
  .registerTypeAdapter(WorkflowNode.class, new WorkflowNodeCodec())
  .registerTypeAdapter(CustomActionSpecification.class, new CustomActionSpecificationCodec())
  .registerTypeAdapter(ConditionSpecification.class, new ConditionSpecificationCodec())
  .registerTypeAdapter(ServiceSpecification.class, new ServiceSpecificationCodec())
  .registerTypeAdapter(WorkerSpecification.class, new WorkerSpecificationCodec())
  .registerTypeAdapter(BasicThrowable.class, new BasicThrowableCodec())
  .registerTypeAdapter(Trigger.class, new TriggerCodec())
  .registerTypeAdapter(SatisfiableTrigger.class, new TriggerCodec())
  .registerTypeAdapter(Constraint.class, new ConstraintCodec())
  .registerTypeAdapterFactory(new AppSpecTypeAdapterFactory());
}
origin: co.cask.cdap/cdap-proto

@Override
public SparkSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context)
 throws JsonParseException {
 JsonObject jsonObj = json.getAsJsonObject();
 String className = jsonObj.get("className").getAsString();
 String name = jsonObj.get("name").getAsString();
 String description = jsonObj.get("description").getAsString();
 Map<String, Plugin> plugins = deserializeMap(jsonObj.get("plugins"), context, Plugin.class);
 String mainClassName = jsonObj.has("mainClassName") ? jsonObj.get("mainClassName").getAsString() : null;
 Set<String> datasets = deserializeSet(jsonObj.get("datasets"), context, String.class);
 Map<String, String> properties = deserializeMap(jsonObj.get("properties"), context, String.class);
 Resources clientResources = deserializeResources(jsonObj, "client", context);
 Resources driverResources = deserializeResources(jsonObj, "driver", context);
 Resources executorResources = deserializeResources(jsonObj, "executor", context);
 List<SparkHttpServiceHandlerSpecification> handlers = deserializeList(jsonObj.get("handlers"), context,
                                    SparkHttpServiceHandlerSpecification.class);
 return new SparkSpecification(className, name, description, mainClassName, datasets,
                properties, clientResources, driverResources, executorResources, handlers, plugins);
}
co.cask.cdap.proto.codecSparkSpecificationCodec

Most used methods

  • <init>
  • deserializeList
  • deserializeMap
  • deserializeResources
    Deserialize Resources object from a json property named with Resources. A null value will be returne
  • deserializeSet
  • serializeList
  • serializeMap
  • serializeResources
    Serialize the Resources object if it is not null.
  • serializeSet

Popular in Java

  • Creating JSON documents from java classes using gson
  • onRequestPermissionsResult (Fragment)
  • orElseThrow (Optional)
    Return the contained value, if present, otherwise throw an exception to be created by the provided s
  • findViewById (Activity)
  • FileOutputStream (java.io)
    An output stream that writes bytes to a file. If the output file exists, it can be replaced or appen
  • URLEncoder (java.net)
    This class is used to encode a string using the format required by application/x-www-form-urlencoded
  • Annotation (javassist.bytecode.annotation)
    The annotation structure.An instance of this class is returned bygetAnnotations() in AnnotationsAttr
  • DateTimeFormat (org.joda.time.format)
    Factory that creates instances of DateTimeFormatter from patterns and styles. Datetime formatting i
  • LoggerFactory (org.slf4j)
    The LoggerFactory is a utility class producing Loggers for various logging APIs, most notably for lo
  • Option (scala)
  • Github Copilot alternatives
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now