public static <T> T instantiate(String className, Settings settings) { return instantiate(className, null, settings); }
public EsStorage(String... configuration) { if (!ObjectUtils.isEmpty(configuration)) { try { for (String string : configuration) { // replace ; with line separators properties.load(new StringReader(string)); log.trace(properties.toString()); } } catch (IOException ex) { throw new EsHadoopIllegalArgumentException("Cannot parse options " + Arrays.toString(configuration), ex); } } }
for (Object o : ObjectUtils.toObjectArray(value)) { Result result = doWrite(o, generator, parentField); if (!result.isSuccesful()) {
static void checkSparkLibraryCompatibility(boolean throwOnIncompatible) { // check whether the correct es-hadoop is used with the correct Spark version boolean isSpark13Level = ObjectUtils.isClassPresent("org.apache.spark.sql.DataFrame", SparkConf.class.getClassLoader()); boolean isSpark20Level = ObjectUtils.isClassPresent("org.apache.spark.sql.streaming.StreamingQuery", SparkConf.class.getClassLoader()); CompatibilityLevel compatibilityLevel = ObjectUtils.instantiate("org.elasticsearch.spark.sql.SparkSQLCompatibilityLevel", CompatUtils.class.getClassLoader()); boolean isEshForSpark20 = "20".equals(compatibilityLevel.versionId()); String esSupportedSparkVersion = compatibilityLevel.versionDescription(); String errorMessage = null; if (!(isSpark13Level || isSpark20Level)) { String sparkVersion = getSparkVersionOr("1.0-1.2"); errorMessage = String.format("Incorrect classpath detected; Elasticsearch Spark compiled for Spark %s but used with unsupported Spark version %s", esSupportedSparkVersion, sparkVersion); } else if (isSpark20Level != isEshForSpark20) { // XOR can be applied as well but != increases readability String sparkVersion = getSparkVersionOr(isSpark13Level ? "1.3-1.6" : "2.0+"); errorMessage = String.format("Incorrect classpath detected; Elasticsearch Spark compiled for Spark %s but used with Spark %s", esSupportedSparkVersion, sparkVersion); } if (errorMessage != null) { if (throwOnIncompatible) { throw new EsHadoopIllegalStateException(errorMessage); } else { LogFactory.getLog("org.elasticsearch.spark.rdd.EsSpark").warn(errorMessage); } } }
static void checkSparkLibraryCompatibility(boolean throwOnIncompatible) { // check whether the correct es-hadoop is used with the correct Spark version boolean isSpark13Level = ObjectUtils.isClassPresent("org.apache.spark.sql.DataFrame", SparkConf.class.getClassLoader()); boolean isSpark20Level = ObjectUtils.isClassPresent("org.apache.spark.sql.streaming.StreamingQuery", SparkConf.class.getClassLoader()); CompatibilityLevel compatibilityLevel = ObjectUtils.instantiate("org.elasticsearch.spark.sql.SparkSQLCompatibilityLevel", CompatUtils.class.getClassLoader()); boolean isEshForSpark20 = "20".equals(compatibilityLevel.versionId()); String esSupportedSparkVersion = compatibilityLevel.versionDescription(); String errorMessage = null; if (!(isSpark13Level || isSpark20Level)) { String sparkVersion = getSparkVersionOr("1.0-1.2"); errorMessage = String.format("Incorrect classpath detected; Elasticsearch Spark compiled for Spark %s but used with unsupported Spark version %s", esSupportedSparkVersion, sparkVersion); } else if (isSpark20Level != isEshForSpark20) { // XOR can be applied as well but != increases readability String sparkVersion = getSparkVersionOr(isSpark13Level ? "1.3-1.6" : "2.0+"); errorMessage = String.format("Incorrect classpath detected; Elasticsearch Spark compiled for Spark %s but used with Spark %s", esSupportedSparkVersion, sparkVersion); } if (errorMessage != null) { if (throwOnIncompatible) { throw new EsHadoopIllegalStateException(errorMessage); } else { LogFactory.getLog("org.elasticsearch.spark.rdd.EsSpark").warn(errorMessage); } } }
public static <T> T instantiate(String className, ClassLoader loader, Settings settings) { T obj = instantiate(className, loader); if (obj instanceof SettingsAware) { ((SettingsAware) obj).setSettings(settings); } return obj; }
public void process(BytesArray storage) { // no extractors, no lookups if (ObjectUtils.isEmpty(paths)) { return; } results.clear(); if (log.isTraceEnabled()) { log.trace(String.format("About to look for paths [%s] in doc [%s]", Arrays.toString(paths), storage)); } results.addAll(ParsingUtils.values(new JacksonJsonParser(storage.bytes(), 0, storage.length()), paths)); }
static void checkSparkLibraryCompatibility(boolean throwOnIncompatible) { // check whether the correct es-hadoop is used with the correct Spark version boolean isSpark13Level = ObjectUtils.isClassPresent("org.apache.spark.sql.DataFrame", SparkConf.class.getClassLoader()); boolean isSpark20Level = ObjectUtils.isClassPresent("org.apache.spark.sql.streaming.StreamingQuery", SparkConf.class.getClassLoader()); CompatibilityLevel compatibilityLevel = ObjectUtils.instantiate("org.elasticsearch.spark.sql.SparkSQLCompatibilityLevel", CompatUtils.class.getClassLoader()); boolean isEshForSpark20 = "20".equals(compatibilityLevel.versionId()); String esSupportedSparkVersion = compatibilityLevel.versionDescription(); String errorMessage = null; if (!(isSpark13Level || isSpark20Level)) { String sparkVersion = getSparkVersionOr("1.0-1.2"); errorMessage = String.format("Incorrect classpath detected; Elasticsearch Spark compiled for Spark %s but used with unsupported Spark version %s", esSupportedSparkVersion, sparkVersion); } else if (isSpark20Level != isEshForSpark20) { // XOR can be applied as well but != increases readability String sparkVersion = getSparkVersionOr(isSpark13Level ? "1.3-1.6" : "2.0+"); errorMessage = String.format("Incorrect classpath detected; Elasticsearch Spark compiled for Spark %s but used with Spark %s", esSupportedSparkVersion, sparkVersion); } if (errorMessage != null) { if (throwOnIncompatible) { throw new EsHadoopIllegalStateException(errorMessage); } else { LogFactory.getLog("org.elasticsearch.spark.rdd.EsSpark").warn(errorMessage); } } }
for (Object o : ObjectUtils.toObjectArray(value)) { Result result = doWrite(o, generator, parentField); if (!result.isSuccesful()) {
protected FieldExtractor createFieldExtractor(String fieldName) { settings.setProperty(ConstantFieldExtractor.PROPERTY, fieldName); return ObjectUtils.instantiate(settings.getMappingDefaultClassExtractor(), settings); } }
@Override public void compile(String pattern) { this.pattern = pattern; // break it down into index/type String[] split = pattern.split("/"); Assert.isTrue(!ObjectUtils.isEmpty(split), "invalid pattern given " + pattern); // check pattern hasPattern = pattern.contains("{") && pattern.contains("}"); index = parse(split[0].trim()); if (split.length > 1) { // Assert the pattern is only at most 2, and at the least 1 Assert.isTrue(split.length == 2, "invalid pattern given " + pattern); type = parse(split[1].trim()); } else { type = null; } }
static void checkSparkLibraryCompatibility(boolean throwOnIncompatible) { // check whether the correct es-hadoop is used with the correct Spark version boolean isSpark13Level = ObjectUtils.isClassPresent("org.apache.spark.sql.DataFrame", SparkConf.class.getClassLoader()); boolean isSpark20Level = ObjectUtils.isClassPresent("org.apache.spark.sql.streaming.StreamingQuery", SparkConf.class.getClassLoader()); CompatibilityLevel compatibilityLevel = ObjectUtils.instantiate("org.elasticsearch.spark.sql.SparkSQLCompatibilityLevel", CompatUtils.class.getClassLoader()); boolean isEshForSpark20 = "20".equals(compatibilityLevel.versionId()); String esSupportedSparkVersion = compatibilityLevel.versionDescription(); String errorMessage = null; if (!(isSpark13Level || isSpark20Level)) { String sparkVersion = getSparkVersionOr("1.0-1.2"); errorMessage = String.format("Incorrect classpath detected; Elasticsearch Spark compiled for Spark %s but used with unsupported Spark version %s", esSupportedSparkVersion, sparkVersion); } else if (isSpark20Level != isEshForSpark20) { // XOR can be applied as well but != increases readability String sparkVersion = getSparkVersionOr(isSpark13Level ? "1.3-1.6" : "2.0+"); errorMessage = String.format("Incorrect classpath detected; Elasticsearch Spark compiled for Spark %s but used with Spark %s", esSupportedSparkVersion, sparkVersion); } if (errorMessage != null) { if (throwOnIncompatible) { throw new EsHadoopIllegalStateException(errorMessage); } else { LogFactory.getLog("org.elasticsearch.spark.rdd.EsSpark").warn(errorMessage); } } }
for (Object o : ObjectUtils.toObjectArray(value)) { Result result = doWrite(o, generator, parentField); if (!result.isSuccesful()) {
@Override protected FieldExtractor createFieldExtractor(String fieldName) { settings.setProperty(ConstantFieldExtractor.PROPERTY, fieldName); return ObjectUtils.<FieldExtractor> instantiate(settings.getMappingDefaultClassExtractor(), settings); } }
public static void setFilters(Settings settings, String... filters) { // clear any filters inside the settings settings.setProperty(InternalConfigurationOptions.INTERNAL_ES_QUERY_FILTERS, ""); if (ObjectUtils.isEmpty(filters)) { return; } settings.setProperty(InternalConfigurationOptions.INTERNAL_ES_QUERY_FILTERS, IOUtils.serializeToBase64(filters)); }
for (Object o : ObjectUtils.toObjectArray(value)) { Result result = doWrite(o, generator, parentField); if (!result.isSuccesful()) {
public JsonTemplatedBulk(Collection<Object> beforeObject, Collection<Object> afterObject, JsonFieldExtractors jsonExtractors, Settings settings) { super(beforeObject, afterObject, new NoOpValueWriter()); this.jsonExtractors = jsonExtractors; this.jsonWriter = ObjectUtils.instantiate(settings.getSerializerBytesConverterClassName(), settings); this.settings = settings; }
public EsStorage(String... configuration) { if (!ObjectUtils.isEmpty(configuration)) { try { for (String string : configuration) { // replace ; with line separators properties.load(new StringReader(string)); log.trace(properties.toString()); } } catch (IOException ex) { throw new EsHadoopIllegalArgumentException("Cannot parse options " + Arrays.toString(configuration), ex); } } }
for (Object o : ObjectUtils.toObjectArray(value)) { Result result = doWrite(o, generator, parentField); if (!result.isSuccesful()) {
public HttpRetryHandler(Settings settings) { String retryPolicyName = settings.getBatchWriteRetryPolicy(); if (ConfigurationOptions.ES_BATCH_WRITE_RETRY_POLICY_SIMPLE.equals(retryPolicyName)) { retryPolicyName = SimpleHttpRetryPolicy.class.getName(); } else if (ConfigurationOptions.ES_BATCH_WRITE_RETRY_POLICY_NONE.equals(retryPolicyName)) { retryPolicyName = NoHttpRetryPolicy.class.getName(); } HttpRetryPolicy retryPolicy = ObjectUtils.instantiate(retryPolicyName, settings); this.retry = retryPolicy.init(); this.retryLimit = settings.getBatchWriteRetryCount(); this.retryTime = settings.getBatchWriteRetryWait(); }