@Override public SparkSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { JsonObject jsonObj = json.getAsJsonObject(); String className = jsonObj.get("className").getAsString(); String name = jsonObj.get("name").getAsString(); String description = jsonObj.get("description").getAsString(); Map<String, Plugin> plugins = deserializeMap(jsonObj.get("plugins"), context, Plugin.class); String mainClassName = jsonObj.has("mainClassName") ? jsonObj.get("mainClassName").getAsString() : null; Set<String> datasets = deserializeSet(jsonObj.get("datasets"), context, String.class); Map<String, String> properties = deserializeMap(jsonObj.get("properties"), context, String.class); Resources clientResources = deserializeResources(jsonObj, "client", context); Resources driverResources = deserializeResources(jsonObj, "driver", context); Resources executorResources = deserializeResources(jsonObj, "executor", context); List<SparkHttpServiceHandlerSpecification> handlers = deserializeList(jsonObj.get("handlers"), context, SparkHttpServiceHandlerSpecification.class); return new SparkSpecification(className, name, description, mainClassName, datasets, properties, clientResources, driverResources, executorResources, handlers, plugins); }
@Override public JsonElement serialize(SparkSpecification src, Type typeOfSrc, JsonSerializationContext context) { JsonObject jsonObj = new JsonObject(); jsonObj.add("className", new JsonPrimitive(src.getClassName())); jsonObj.add("name", new JsonPrimitive(src.getName())); jsonObj.add("description", new JsonPrimitive(src.getDescription())); jsonObj.add("plugins", serializeMap(src.getPlugins(), context, Plugin.class)); if (src.getMainClassName() != null) { jsonObj.add("mainClassName", new JsonPrimitive(src.getMainClassName())); } jsonObj.add("datasets", serializeSet(src.getDatasets(), context, String.class)); jsonObj.add("properties", serializeMap(src.getProperties(), context, String.class)); serializeResources(jsonObj, "client", context, src.getClientResources()); serializeResources(jsonObj, "driver", context, src.getDriverResources()); serializeResources(jsonObj, "executor", context, src.getExecutorResources()); jsonObj.add("handlers", serializeList(src.getHandlers(), context, SparkHttpServiceHandlerSpecification.class)); return jsonObj; }
public static GsonBuilder addTypeAdapters(GsonBuilder builder) { return builder .registerTypeAdapter(Schema.class, new SchemaTypeAdapter()) .registerTypeAdapter(ApplicationSpecification.class, new ApplicationSpecificationCodec()) .registerTypeAdapter(MapReduceSpecification.class, new MapReduceSpecificationCodec()) .registerTypeAdapter(SparkSpecification.class, new SparkSpecificationCodec()) .registerTypeAdapter(WorkflowSpecification.class, new WorkflowSpecificationCodec()) .registerTypeAdapter(WorkflowNode.class, new WorkflowNodeCodec()) .registerTypeAdapter(CustomActionSpecification.class, new CustomActionSpecificationCodec()) .registerTypeAdapter(ConditionSpecification.class, new ConditionSpecificationCodec()) .registerTypeAdapter(ServiceSpecification.class, new ServiceSpecificationCodec()) .registerTypeAdapter(WorkerSpecification.class, new WorkerSpecificationCodec()) .registerTypeAdapter(BasicThrowable.class, new BasicThrowableCodec()) .registerTypeAdapter(Trigger.class, new TriggerCodec()) .registerTypeAdapter(SatisfiableTrigger.class, new TriggerCodec()) .registerTypeAdapter(Constraint.class, new ConstraintCodec()) .registerTypeAdapterFactory(new AppSpecTypeAdapterFactory()); }
@Override public JsonElement serialize(SparkSpecification src, Type typeOfSrc, JsonSerializationContext context) { JsonObject jsonObj = new JsonObject(); jsonObj.add("className", new JsonPrimitive(src.getClassName())); jsonObj.add("name", new JsonPrimitive(src.getName())); jsonObj.add("description", new JsonPrimitive(src.getDescription())); jsonObj.add("plugins", serializeMap(src.getPlugins(), context, Plugin.class)); if (src.getMainClassName() != null) { jsonObj.add("mainClassName", new JsonPrimitive(src.getMainClassName())); } jsonObj.add("datasets", serializeSet(src.getDatasets(), context, String.class)); jsonObj.add("properties", serializeMap(src.getProperties(), context, String.class)); serializeResources(jsonObj, "client", context, src.getClientResources()); serializeResources(jsonObj, "driver", context, src.getDriverResources()); serializeResources(jsonObj, "executor", context, src.getExecutorResources()); jsonObj.add("handlers", serializeList(src.getHandlers(), context, SparkHttpServiceHandlerSpecification.class)); return jsonObj; }
public static GsonBuilder addTypeAdapters(GsonBuilder builder) { return builder .registerTypeAdapter(Schema.class, new SchemaTypeAdapter()) .registerTypeAdapter(ApplicationSpecification.class, new ApplicationSpecificationCodec()) .registerTypeAdapter(FlowSpecification.class, new FlowSpecificationCodec()) .registerTypeAdapter(FlowletSpecification.class, new FlowletSpecificationCodec()) .registerTypeAdapter(MapReduceSpecification.class, new MapReduceSpecificationCodec()) .registerTypeAdapter(SparkSpecification.class, new SparkSpecificationCodec()) .registerTypeAdapter(WorkflowSpecification.class, new WorkflowSpecificationCodec()) .registerTypeAdapter(WorkflowNode.class, new WorkflowNodeCodec()) .registerTypeAdapter(CustomActionSpecification.class, new CustomActionSpecificationCodec()) .registerTypeAdapter(ConditionSpecification.class, new ConditionSpecificationCodec()) .registerTypeAdapter(ServiceSpecification.class, new ServiceSpecificationCodec()) .registerTypeAdapter(WorkerSpecification.class, new WorkerSpecificationCodec()) .registerTypeAdapter(BasicThrowable.class, new BasicThrowableCodec()) .registerTypeAdapter(Trigger.class, new TriggerCodec()) .registerTypeAdapter(SatisfiableTrigger.class, new TriggerCodec()) .registerTypeAdapter(Constraint.class, new ConstraintCodec()) .registerTypeAdapterFactory(new AppSpecTypeAdapterFactory()); }
@Override public SparkSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { JsonObject jsonObj = json.getAsJsonObject(); String className = jsonObj.get("className").getAsString(); String name = jsonObj.get("name").getAsString(); String description = jsonObj.get("description").getAsString(); Map<String, Plugin> plugins = deserializeMap(jsonObj.get("plugins"), context, Plugin.class); String mainClassName = jsonObj.has("mainClassName") ? jsonObj.get("mainClassName").getAsString() : null; Set<String> datasets = deserializeSet(jsonObj.get("datasets"), context, String.class); Map<String, String> properties = deserializeMap(jsonObj.get("properties"), context, String.class); Resources clientResources = deserializeResources(jsonObj, "client", context); Resources driverResources = deserializeResources(jsonObj, "driver", context); Resources executorResources = deserializeResources(jsonObj, "executor", context); List<SparkHttpServiceHandlerSpecification> handlers = deserializeList(jsonObj.get("handlers"), context, SparkHttpServiceHandlerSpecification.class); return new SparkSpecification(className, name, description, mainClassName, datasets, properties, clientResources, driverResources, executorResources, handlers, plugins); }