congrats Icon
New! Announcing our next generation AI code completions
Read here
Tabnine Logo
ObjectUtils.instantiate
Code IndexAdd Tabnine to your IDE (free)

How to use
instantiate
method
in
org.elasticsearch.hadoop.util.ObjectUtils

Best Java code snippets using org.elasticsearch.hadoop.util.ObjectUtils.instantiate (Showing top 20 results out of 315)

origin: elastic/elasticsearch-hadoop

public static <T> T instantiate(String className, Settings settings) {
  return instantiate(className, null, settings);
}
origin: elastic/elasticsearch-hadoop

public static <T> T instantiate(String className, ClassLoader loader, Settings settings) {
  T obj = instantiate(className, loader);
  if (obj instanceof SettingsAware) {
    ((SettingsAware) obj).setSettings(settings);
  }
  return obj;
}
origin: elastic/elasticsearch-hadoop

  @Override
  protected FieldExtractor createFieldExtractor(String fieldName) {
    settings.setProperty(ConstantFieldExtractor.PROPERTY, fieldName);
    return ObjectUtils.<FieldExtractor> instantiate(settings.getMappingDefaultClassExtractor(), settings);
  }
}
origin: elastic/elasticsearch-hadoop

public JsonTemplatedBulk(Collection<Object> beforeObject, Collection<Object> afterObject,
    JsonFieldExtractors jsonExtractors, Settings settings) {
  super(beforeObject, afterObject, new NoOpValueWriter());
  this.jsonExtractors = jsonExtractors;
  this.jsonWriter = ObjectUtils.instantiate(settings.getSerializerBytesConverterClassName(), settings);
  this.settings = settings;
}
origin: elastic/elasticsearch-hadoop

  protected FieldExtractor createFieldExtractor(String fieldName) {
    settings.setProperty(ConstantFieldExtractor.PROPERTY, fieldName);
    return ObjectUtils.instantiate(settings.getMappingDefaultClassExtractor(), settings);
  }
}
origin: elastic/elasticsearch-hadoop

public HttpRetryHandler(Settings settings) {
  String retryPolicyName = settings.getBatchWriteRetryPolicy();
  if (ConfigurationOptions.ES_BATCH_WRITE_RETRY_POLICY_SIMPLE.equals(retryPolicyName)) {
    retryPolicyName = SimpleHttpRetryPolicy.class.getName();
  }
  else if (ConfigurationOptions.ES_BATCH_WRITE_RETRY_POLICY_NONE.equals(retryPolicyName)) {
    retryPolicyName = NoHttpRetryPolicy.class.getName();
  }
  HttpRetryPolicy retryPolicy = ObjectUtils.instantiate(retryPolicyName, settings);
  this.retry = retryPolicy.init();
  this.retryLimit = settings.getBatchWriteRetryCount();
  this.retryTime = settings.getBatchWriteRetryWait();
}
origin: elastic/elasticsearch-hadoop

private Object wrapWithFormatter(String format, final FieldExtractor createFieldExtractor) {
  // instantiate field extractor
  final IndexFormatter iformatter = ObjectUtils.instantiate(settings.getMappingIndexFormatterClassName(), settings);
  iformatter.configure(format);
  return new FieldExtractor() {
    @Override
    public Object field(Object target) {
      String string = createFieldExtractor.field(target).toString();
      // typically a string in JSON so remove the quotes
      if (string.startsWith("\"")) {
        string = string.substring(1);
      }
      if (string.endsWith("\"")) {
        string = string.substring(0, string.length() - 1);
      }
      // hack: an index will always be a primitive so just call toString (instead of doing JSON parsing)
      // the returned value is not formatted as JSON since : 1. there's no need (it will be picked up down the chain), 2: date formatter depends on it
      return iformatter.format(string);
    }
  };
}
origin: elastic/elasticsearch-hadoop

RestClient(Settings settings, NetworkClient networkClient) {
  this.network = networkClient;
  this.scrollKeepAlive = TimeValue.timeValueMillis(settings.getScrollKeepAlive());
  this.indexReadMissingAsEmpty = settings.getIndexReadMissingAsEmpty();
  String retryPolicyName = settings.getBatchWriteRetryPolicy();
  if (ConfigurationOptions.ES_BATCH_WRITE_RETRY_POLICY_SIMPLE.equals(retryPolicyName)) {
    retryPolicyName = SimpleHttpRetryPolicy.class.getName();
  }
  else if (ConfigurationOptions.ES_BATCH_WRITE_RETRY_POLICY_NONE.equals(retryPolicyName)) {
    retryPolicyName = NoHttpRetryPolicy.class.getName();
  }
  this.retryPolicy = ObjectUtils.instantiate(retryPolicyName, settings);
  // Assume that the elasticsearch major version is the latest if the version is not already present in the settings
  this.internalVersion = settings.getInternalVersionOrLatest();
  this.errorExtractor = new ErrorExtractor(internalVersion);
}
origin: elastic/elasticsearch-hadoop

AbstractBulkFactory(Settings settings, MetadataExtractor metaExtractor, EsMajorVersion esMajorVersion) {
  this.settings = settings;
  this.esMajorVersion = esMajorVersion;
  this.requestParameterNames = new RequestParameterNames(esMajorVersion);
  this.valueWriter = ObjectUtils.instantiate(settings.getSerializerValueWriterClassName(), settings);
  this.metaExtractor = metaExtractor;
  jsonInput = settings.getInputAsJson();
  isStatic = metaExtractor == null;
  initExtractorsFromSettings(settings);
}
origin: elastic/elasticsearch-hadoop

@Test
public void createFieldExtractor() {
  Settings settings = new TestSettings();
  settings.setResourceWrite("test/{field}");
  settings.setInternalVersion(EsMajorVersion.V_7_X);
  InitializationUtils.setFieldExtractorIfNotSet(settings, MapFieldExtractor.class, LOG);
  IndexExtractor iformat = ObjectUtils.instantiate(settings.getMappingIndexExtractorClassName(), settings);
  iformat.compile(new Resource(settings, false).toString());
  assertThat(iformat.hasPattern(), is(true));
  Map<String, String> data = new HashMap<String, String>();
  data.put("field", "data");
  Object field = iformat.field(data);
  assertThat(field.toString(), equalTo("\"_index\":\"test\",\"_type\":\"data\""));
}
origin: elastic/elasticsearch-hadoop

@Test
public void testScrollWithHandlersThatPassWithMessages() throws IOException {
  MappingSet mappings = getMappingSet("numbers-as-strings");
  InputStream stream = getClass().getResourceAsStream(scrollData("numbers-as-strings"));
  Settings testSettings = new TestSettings();
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA, "" + readMetadata);
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA_FIELD, "" + metadataField);
  testSettings.setProperty(ConfigurationOptions.ES_OUTPUT_JSON, "" + readAsJson);
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLERS , "marco,polo,skip");
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLER + ".marco" , MarcoHandler.class.getName());
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLER + ".polo" , PoloHandler.class.getName());
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLER + ".skip" , NothingToSeeHereHandler.class.getName());
  JdkValueReader valueReader = ObjectUtils.instantiate(JdkValueReader.class.getName(), testSettings);
  ScrollReader reader = new ScrollReader(ScrollReaderConfigBuilder.builder(valueReader, mappings.getResolvedView(), testSettings));
  ScrollReader.Scroll scroll = reader.read(stream);
  assertThat(scroll.getTotalHits(), equalTo(196L));
  assertThat(scroll.getHits(), is(empty()));
}
origin: elastic/elasticsearch-hadoop

@Test
public void testScrollWithIgnoringHandler() throws IOException {
  MappingSet mappings = getMappingSet("numbers-as-strings");
  InputStream stream = getClass().getResourceAsStream(scrollData("numbers-as-strings"));
  Settings testSettings = new TestSettings();
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA, "" + readMetadata);
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA_FIELD, "" + metadataField);
  testSettings.setProperty(ConfigurationOptions.ES_OUTPUT_JSON, "" + readAsJson);
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLERS , "skipskipskip");
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLER + ".skipskipskip" , NothingToSeeHereHandler.class.getName());
  JdkValueReader valueReader = ObjectUtils.instantiate(JdkValueReader.class.getName(), testSettings);
  ScrollReader reader = new ScrollReader(ScrollReaderConfigBuilder.builder(valueReader, mappings.getResolvedView(), testSettings));
  ScrollReader.Scroll scroll = reader.read(stream);
  assertThat(scroll.getTotalHits(), equalTo(196L));
  assertThat(scroll.getHits(), is(empty()));
}
origin: elastic/elasticsearch-hadoop

@Test(expected = EsHadoopParsingException.class)
public void testScrollWithBreakOnInvalidMapping() throws IOException {
  MappingSet mappings = getMappingSet("numbers-as-strings");
  InputStream stream = getClass().getResourceAsStream(scrollData("numbers-as-strings"));
  Settings testSettings = new TestSettings();
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA, "" + readMetadata);
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA_FIELD, "" + metadataField);
  testSettings.setProperty(ConfigurationOptions.ES_OUTPUT_JSON, "" + readAsJson);
  JdkValueReader valueReader = ObjectUtils.instantiate(JdkValueReader.class.getName(), testSettings);
  ScrollReader reader = new ScrollReader(ScrollReaderConfigBuilder.builder(valueReader, mappings.getResolvedView(), testSettings));
  reader.read(stream);
  fail("Should not be able to parse string as long");
}
origin: elastic/elasticsearch-hadoop

@Test(expected = EsHadoopException.class)
public void testScrollWithThrowingErrorHandler() throws IOException {
  MappingSet mappings = getMappingSet("numbers-as-strings");
  InputStream stream = getClass().getResourceAsStream(scrollData("numbers-as-strings"));
  Settings testSettings = new TestSettings();
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA, "" + readMetadata);
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA_FIELD, "" + metadataField);
  testSettings.setProperty(ConfigurationOptions.ES_OUTPUT_JSON, "" + readAsJson);
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLERS , "throw");
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLER + ".throw" , ExceptionThrowingHandler.class.getName());
  JdkValueReader valueReader = ObjectUtils.instantiate(JdkValueReader.class.getName(), testSettings);
  ScrollReader reader = new ScrollReader(ScrollReaderConfigBuilder.builder(valueReader, mappings.getResolvedView(), testSettings));
  reader.read(stream);
  fail("Should not be able to parse string as long");
}
origin: elastic/elasticsearch-hadoop

@Test
public void testScrollWithHandlersThatCorrectsError() throws IOException {
  MappingSet mappings = getMappingSet("numbers-as-strings");
  InputStream stream = getClass().getResourceAsStream(scrollData("numbers-as-strings"));
  Settings testSettings = new TestSettings();
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA, "" + readMetadata);
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA_FIELD, "" + metadataField);
  testSettings.setProperty(ConfigurationOptions.ES_OUTPUT_JSON, "" + readAsJson);
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLERS , "fix");
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLER + ".fix" , CorrectingHandler.class.getName());
  JdkValueReader valueReader = ObjectUtils.instantiate(JdkValueReader.class.getName(), testSettings);
  ScrollReader reader = new ScrollReader(ScrollReaderConfigBuilder.builder(valueReader, mappings.getResolvedView(), testSettings));
  ScrollReader.Scroll scroll = reader.read(stream);
  assertThat(scroll.getTotalHits(), equalTo(196L));
  assertThat(scroll.getHits().size(), equalTo(1));
  assertEquals(4L, JsonUtils.query("number").apply(scroll.getHits().get(0)[1]));
}
origin: elastic/elasticsearch-hadoop

@Test(expected = EsHadoopException.class)
public void testScrollWithNeverendingHandler() throws IOException {
  MappingSet mappings = getMappingSet("numbers-as-strings");
  InputStream stream = getClass().getResourceAsStream(scrollData("numbers-as-strings"));
  Settings testSettings = new TestSettings();
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA, "" + readMetadata);
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA_FIELD, "" + metadataField);
  testSettings.setProperty(ConfigurationOptions.ES_OUTPUT_JSON, "" + readAsJson);
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLERS , "evil");
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLER + ".evil" , NeverSurrenderHandler.class.getName());
  JdkValueReader valueReader = ObjectUtils.instantiate(JdkValueReader.class.getName(), testSettings);
  ScrollReader reader = new ScrollReader(ScrollReaderConfigBuilder.builder(valueReader, mappings.getResolvedView(), testSettings));
  reader.read(stream);
  fail("Should not be able to parse string as long");
}
origin: elastic/elasticsearch-hadoop

@Test(expected = EsHadoopParsingException.class)
public void testScrollWithThrowingAbortErrorHandler() throws IOException {
  MappingSet mappings = getMappingSet("numbers-as-strings");
  InputStream stream = getClass().getResourceAsStream(scrollData("numbers-as-strings"));
  Settings testSettings = new TestSettings();
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA, "" + readMetadata);
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA_FIELD, "" + metadataField);
  testSettings.setProperty(ConfigurationOptions.ES_OUTPUT_JSON, "" + readAsJson);
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLERS , "throw");
  testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLER + ".throw" , AbortingExceptionThrowingHandler.class.getName());
  JdkValueReader valueReader = ObjectUtils.instantiate(JdkValueReader.class.getName(), testSettings);
  ScrollReader reader = new ScrollReader(ScrollReaderConfigBuilder.builder(valueReader, mappings.getResolvedView(), testSettings));
  reader.read(stream);
  fail("Should not be able to parse string as long");
}
origin: elastic/elasticsearch-hadoop

@Test(expected = EsHadoopIllegalArgumentException.class)
public void createFieldExtractorNull() {
  Settings settings = new TestSettings();
  settings.setResourceWrite("test/{field}");
  settings.setInternalVersion(EsMajorVersion.V_7_X);
  InitializationUtils.setFieldExtractorIfNotSet(settings, MapFieldExtractor.class, LOG);
  IndexExtractor iformat = ObjectUtils.instantiate(settings.getMappingIndexExtractorClassName(), settings);
  iformat.compile(new Resource(settings, false).toString());
  assertThat(iformat.hasPattern(), is(true));
  Map<String, String> data = new HashMap<String, String>();
  data.put("field", null);
  iformat.field(data);
  fail();
}
origin: elastic/elasticsearch-hadoop

  @Test(expected = EsHadoopIllegalArgumentException.class)
  public void createFieldExtractorFailure() {
    Settings settings = new TestSettings();
    settings.setResourceWrite("test/{optional}");
    settings.setInternalVersion(EsMajorVersion.V_7_X);
    InitializationUtils.setFieldExtractorIfNotSet(settings, MapFieldExtractor.class, LOG);

    IndexExtractor iformat = ObjectUtils.instantiate(settings.getMappingIndexExtractorClassName(), settings);
    iformat.compile(new Resource(settings, false).toString());

    assertThat(iformat.hasPattern(), is(true));

    Map<String, String> data = new HashMap<String, String>();
    data.put("field", "data");

    iformat.field(data);

    fail();
  }
}
origin: elastic/elasticsearch-hadoop

@Test
public void testScrollWithNestedArrays() throws IOException {
  MappingSet mappings = getMappingSet("nested-list");
  InputStream stream = getClass().getResourceAsStream(scrollData("nested-list"));
  Settings testSettings = new TestSettings();
  testSettings.setProperty(ConfigurationOptions.ES_READ_FIELD_AS_ARRAY_INCLUDE, "a:3");
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA, "" + readMetadata);
  testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA_FIELD, "" + metadataField);
  testSettings.setProperty(ConfigurationOptions.ES_OUTPUT_JSON, "" + readAsJson);
  JdkValueReader valueReader = ObjectUtils.instantiate(JdkValueReader.class.getName(), testSettings);
  ScrollReader reader = new ScrollReader(ScrollReaderConfigBuilder.builder(valueReader, mappings.getResolvedView(), testSettings));
  ScrollReader.Scroll scroll = reader.read(stream);
  // Case of already correctly nested array data
  assertEquals(1L, JsonUtils.query("a").get(0).get(0).get(0).apply(scroll.getHits().get(0)[1]));
  // Case of insufficiently nested array data
  assertEquals(9L, JsonUtils.query("a").get(0).get(0).get(0).apply(scroll.getHits().get(1)[1]));
  // Case of singleton data that is not nested in ANY array levels.
  assertEquals(10L, JsonUtils.query("a").get(0).get(0).get(0).apply(scroll.getHits().get(2)[1]));
}
org.elasticsearch.hadoop.utilObjectUtilsinstantiate

Popular methods of ObjectUtils

  • isEmpty
  • toObjectArray
  • isClassPresent

Popular in Java

  • Start an intent from android
  • compareTo (BigDecimal)
  • setRequestProperty (URLConnection)
  • onRequestPermissionsResult (Fragment)
  • Component (java.awt)
    A component is an object having a graphical representation that can be displayed on the screen and t
  • Point (java.awt)
    A point representing a location in (x,y) coordinate space, specified in integer precision.
  • BufferedImage (java.awt.image)
    The BufferedImage subclass describes an java.awt.Image with an accessible buffer of image data. All
  • LinkedList (java.util)
    Doubly-linked list implementation of the List and Dequeinterfaces. Implements all optional list oper
  • Executor (java.util.concurrent)
    An object that executes submitted Runnable tasks. This interface provides a way of decoupling task s
  • BasicDataSource (org.apache.commons.dbcp)
    Basic implementation of javax.sql.DataSource that is configured via JavaBeans properties. This is no
  • 21 Best Atom Packages for 2021
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyStudentsTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now