public static String concatenate(Collection<?> list) { return concatenate(list, DEFAULT_DELIMITER); }
public static void setDiscoveredNodes(Settings settings, Collection<String> nodes) { settings.setProperty(InternalConfigurationOptions.INTERNAL_ES_DISCOVERED_NODES, StringUtils.concatenate(nodes)); }
@Override public Response execute() { StringBuilder path = new StringBuilder(); if (indices.size() > 0) { path.append(StringUtils.concatenate(indices)); } else { path.append("_all"); } path.append("/_alias"); if (aliases.size() > 0) { path.append("/").append(StringUtils.concatenate(aliases)); } return new Response((Map<String, Object>) client.get(path.toString(), null)); }
/** * Encodes each string value of the list and concatenates the results using the supplied delimiter. * * @param list To be encoded and concatenated. * @param delimiter Separator for concatenation. * @return Concatenated and encoded string representation. */ public static String concatenateAndUriEncode(Collection<?> list, String delimiter) { Collection<String> escaped = new ArrayList<String>(); if (list != null) { for (Object object : list) { escaped.add(encode(object.toString())); } } return StringUtils.concatenate(escaped, delimiter); }
static String asProjection(Schema schema, Properties props) { List<String> fields = new ArrayList<String>(); addField(schema, fields, alias(new PropertiesSettings(props)), null); return StringUtils.concatenate(fields, ","); }
@Override public void sourceConfInit(FlowProcess<JobConf> flowProcess, Tap<JobConf, RecordReader, OutputCollector> tap, JobConf conf) { conf.setInputFormat(EsInputFormat.class); Settings set = loadSettings(conf, true); Collection<String> fields = CascadingUtils.fieldToAlias(set, getSourceFields()); // load only the necessary fields conf.set(InternalConfigurationOptions.INTERNAL_ES_TARGET_FIELDS, StringUtils.concatenate(fields)); if (log.isTraceEnabled()) { log.trace("Initialized (source) configuration " + HadoopCfgUtils.asProperties(conf)); } }
static String asProjection(RequiredFieldList list, Properties props) { List<String> fields = new ArrayList<String>(); FieldAlias alias = alias(new PropertiesSettings(props)); for (RequiredField field : list.getFields()) { addField(field, fields, alias, ""); } return StringUtils.concatenate(fields, ","); }
@Override public FileSplit[] getSplits(JobConf job, int numSplits) throws IOException { // first, merge input table properties (since there's no access to them ...) Settings settings = HadoopSettingsManager.loadFrom(job); //settings.merge(IOUtils.propsFromString(settings.getProperty(HiveConstants.INPUT_TBL_PROPERTIES))); Log log = LogFactory.getLog(getClass()); // move on to initialization InitializationUtils.setValueReaderIfNotSet(settings, HiveValueReader.class, log); if (settings.getOutputAsJson() == false) { // Only set the fields if we aren't asking for raw JSON settings.setProperty(InternalConfigurationOptions.INTERNAL_ES_TARGET_FIELDS, StringUtils.concatenate(HiveUtils.columnToAlias(settings), ",")); } HiveUtils.init(settings, log); // decorate original splits as FileSplit InputSplit[] shardSplits = super.getSplits(job, numSplits); FileSplit[] wrappers = new FileSplit[shardSplits.length]; Path path = new Path(job.get(HiveConstants.TABLE_LOCATION)); for (int i = 0; i < wrappers.length; i++) { wrappers[i] = new EsHiveSplit(shardSplits[i], path); } return wrappers; }
searchRequestBuilder.routing(StringUtils.concatenate(routing, ","));
throw new EsHadoopIllegalStateException("User specified source filters were found [" + userFilter + "], " + "but the connector is executing in a state where it has provided its own source filtering " + "[" + StringUtils.concatenate(fields, ",") + "]. Please clear the user specified source fields under the " + "[" + ConfigurationOptions.ES_READ_SOURCE_FILTER + "] property to continue."); .readMetadata(settings.getReadMetadata()) .filters(QueryUtils.parseFilters(settings)) .fields(StringUtils.concatenate(fields, ",")); input = queryBuilder.build(client, new ScrollReader(ScrollReaderConfigBuilder.builder(new JdkValueReader(), mapping, settings)));
public static String concatenate(Collection<?> list) { return concatenate(list, DEFAULT_DELIMITER); }
public static String concatenate(Collection<?> list) { return concatenate(list, DEFAULT_DELIMITER); }
@Override public Response execute() { StringBuilder path = new StringBuilder(); if (indices.size() > 0) { path.append(StringUtils.concatenate(indices)); } else { path.append("_all"); } path.append("/_alias"); if (aliases.size() > 0) { path.append("/").append(StringUtils.concatenate(aliases)); } return new Response((Map<String, Object>) client.get(path.toString(), null)); }
public static void setDiscoveredNodes(Settings settings, Collection<String> nodes) { settings.setProperty(InternalConfigurationOptions.INTERNAL_ES_DISCOVERED_NODES, StringUtils.concatenate(nodes)); }
public static String concatenateAndUriEncode(Collection<?> list, String delimiter) { Collection<String> escaped = new ArrayList<String>(); if (list != null) { for (Object object : list) { escaped.add(encodeQuery(object.toString())); } } return concatenate(escaped, delimiter); }
public static void setDiscoveredNodes(Settings settings, Collection<String> nodes) { settings.setProperty(InternalConfigurationOptions.INTERNAL_ES_DISCOVERED_NODES, StringUtils.concatenate(nodes)); }
@Override public Response execute() { StringBuilder path = new StringBuilder(); if (indices.size() > 0) { path.append(StringUtils.concatenate(indices)); } else { path.append("_all"); } path.append("/_alias"); if (aliases.size() > 0) { path.append("/").append(StringUtils.concatenate(aliases)); } return new Response((Map<String, Object>) client.get(path.toString(), null)); }
static String asProjection(Schema schema, Properties props) { List<String> fields = new ArrayList<String>(); addField(schema, fields, alias(new PropertiesSettings(props)), null); return StringUtils.concatenate(fields, ","); }
@Override public void sourceConfInit(FlowProcess<JobConf> flowProcess, Tap<JobConf, RecordReader, OutputCollector> tap, JobConf conf) { conf.setInputFormat(EsInputFormat.class); Settings set = loadSettings(conf, true); Collection<String> fields = CascadingUtils.fieldToAlias(set, getSourceFields()); // load only the necessary fields conf.set(InternalConfigurationOptions.INTERNAL_ES_TARGET_FIELDS, StringUtils.concatenate(fields)); if (log.isTraceEnabled()) { log.trace("Initialized (source) configuration " + HadoopCfgUtils.asProperties(conf)); } }
static String asProjection(RequiredFieldList list, Properties props) { List<String> fields = new ArrayList<String>(); FieldAlias alias = alias(new PropertiesSettings(props)); for (RequiredField field : list.getFields()) { addField(field, fields, alias, ""); } return StringUtils.concatenate(fields, ","); }