public SearchRequestBuilder shard(String shard) { Assert.hasText(shard, "Invalid shard"); this.shard = shard; return this; }
public MessageTemplate(ElasticCommonSchema schema, Map<String, String> labels, Set<String> tags, HostData host, String eventCategory, String eventType) { Assert.hasText(eventCategory, "Missing " + FieldNames.FIELD_EVENT_CATEGORY + " value for ECS template."); Assert.hasText(eventType, "Missing " + FieldNames.FIELD_EVENT_TYPE + " value for ECS template."); this.schema = schema; this.labels = labels; this.tags = tags; this.host = host; this.eventCategory = eventCategory; this.eventType = eventType; }
public static void hasText(CharSequence sequence) { hasText(sequence, "[Assertion failed] - this CharSequence argument must have text; it must not be null, empty, or blank"); }
public static String getPinnedNode(Settings settings) { String node = settings.getProperty(InternalConfigurationOptions.INTERNAL_ES_PINNED_NODE); Assert.hasText(node, "Task has not been pinned to a node..."); return node; }
public static String getJobTransportPoolingKey(Settings settings) { String jobKey = settings.getProperty(InternalConfigurationOptions.INTERNAL_TRANSPORT_POOLING_KEY); Assert.hasText(jobKey, "Job has not been assigned a transport pooling key..."); return jobKey; }
@SuppressWarnings("unchecked") public static <T> T instantiate(String className, ClassLoader loader) { Assert.hasText(className, "No class name given"); ClassLoader cl = (loader != null ? loader : ObjectUtils.class.getClassLoader()); Class<?> clz = null; try { clz = cl.loadClass(className); } catch (ClassNotFoundException e) { throw new EsHadoopIllegalStateException(String.format("Cannot load class [%s]", className), e); } try { return (T) clz.newInstance(); } catch (Exception ex) { throw new EsHadoopIllegalStateException(String.format("Cannot instantiate class [%s]", className), ex); } }
private void init(TableDesc tableDesc, boolean read) { Configuration cfg = getConf(); // NB: we can't just merge the table properties in, we need to save them per input/output otherwise clashes occur which confuse Hive Settings settings = HadoopSettingsManager.loadFrom(cfg); //settings.setProperty((read ? HiveConstants.INPUT_TBL_PROPERTIES : HiveConstants.OUTPUT_TBL_PROPERTIES), IOUtils.propsToString(tableDesc.getProperties())); if (read) { // no generic setting } else { // replace the default committer when using the old API HadoopCfgUtils.setOutputCommitterClass(cfg, EsOutputFormat.EsOutputCommitter.class.getName()); } Assert.hasText(tableDesc.getProperties().getProperty(TABLE_LOCATION), String.format( "no table location [%s] declared by Hive resulting in abnormal execution;", TABLE_LOCATION)); }
/** * Selects an appropriate field from the given Hive table schema to insert JSON data into if the feature is enabled * @param settings Settings to read schema information from * @return A FieldAlias object that projects the json source field into the select destination field */ static String discoverJsonFieldName(Settings settings, FieldAlias alias) { Set<String> virtualColumnsToBeRemoved = new HashSet<String>(HiveConstants.VIRTUAL_COLUMNS.length); Collections.addAll(virtualColumnsToBeRemoved, HiveConstants.VIRTUAL_COLUMNS); List<String> columnNames = StringUtils.tokenize(settings.getProperty(HiveConstants.COLUMNS), ","); Iterator<String> nameIter = columnNames.iterator(); List<String> columnTypes = StringUtils.tokenize(settings.getProperty(HiveConstants.COLUMNS_TYPES), ":"); Iterator<String> typeIter = columnTypes.iterator(); String candidateField = null; while(nameIter.hasNext() && candidateField == null) { String columnName = nameIter.next(); String type = typeIter.next(); if ("string".equalsIgnoreCase(type) && !virtualColumnsToBeRemoved.contains(columnName)) { candidateField = columnName; } } Assert.hasText(candidateField, "Could not identify a field to insert JSON data into " + "from the given fields : {" + columnNames + "} of types {" + columnTypes + "}"); // If the candidate field is aliased to something else, find the alias name and use that for the field name: candidateField = alias.toES(candidateField); return candidateField; }
/** * Field name to query from this level of an object */ public Query get(String field) { Assert.hasText(field, "Cannot query empty field name"); return new Query(field, this); }
private void init(Configuration cfg) throws IOException { Settings settings = HadoopSettingsManager.loadFrom(cfg); Assert.hasText(settings.getResourceWrite(), String.format("No resource ['%s'] (index/query/location) specified", ES_RESOURCE)); // Need to discover the ESVersion before checking if index exists. InitializationUtils.discoverEsVersion(settings, log); InitializationUtils.checkIdForOperation(settings); InitializationUtils.checkIndexExistence(settings); if (HadoopCfgUtils.getReduceTasks(cfg) != null) { if (HadoopCfgUtils.getSpeculativeReduce(cfg)) { log.warn("Speculative execution enabled for reducer - consider disabling it to prevent data corruption"); } } else { if (HadoopCfgUtils.getSpeculativeMap(cfg)) { log.warn("Speculative execution enabled for mapper - consider disabling it to prevent data corruption"); } } //log.info(String.format("Starting to write/index to [%s][%s]", settings.getTargetUri(), settings.getTargetResource())); } }
Assert.hasText(resource, errorMessage + resource); type = res.substring(slash + 1); typed = true; Assert.hasText(type, "No type found; expecting [index]/[type]"); } else { index = res; typed = false; Assert.hasText(index, "No index found; expecting [index]/[type]"); Assert.isTrue(!StringUtils.hasWhitespace(index) && !StringUtils.hasWhitespace(type), "Index/type should not contain whitespaces");
Assert.hasText(clientSettings.getResourceWrite(), "Could not locate write resource for ES error handler.");
public BytesArray generateMessage(String ts, String message, String exceptionType, String exceptionMessage, String rawEvent) { Assert.hasText(ts, "Missing " + FieldNames.FIELD_TIMESTAMP + " value for ECS template."); Assert.hasText(message, "Missing " + FieldNames.FIELD_MESSAGE + " value for ECS template."); Assert.hasText(exceptionType, "Missing " + FieldNames.FIELD_ERROR_CODE + " value for ECS template."); Assert.hasText(exceptionMessage, "Missing " + FieldNames.FIELD_ERROR_MESSAGE + " value for ECS template."); Assert.hasText(rawEvent, "Missing " + FieldNames.FIELD_EVENT_RAW + " value for ECS template.");
public QueryBuilder node(String node) { Assert.hasText(node, "Invalid node"); this.node = node; return this; }
public SearchRequestBuilder shard(String shard) { Assert.hasText(shard, "Invalid shard"); this.shard = shard; return this; }
public MessageTemplate(ElasticCommonSchema schema, Map<String, String> labels, Set<String> tags, HostData host, String eventCategory, String eventType) { Assert.hasText(eventCategory, "Missing " + FieldNames.FIELD_EVENT_CATEGORY + " value for ECS template."); Assert.hasText(eventType, "Missing " + FieldNames.FIELD_EVENT_TYPE + " value for ECS template."); this.schema = schema; this.labels = labels; this.tags = tags; this.host = host; this.eventCategory = eventCategory; this.eventType = eventType; }
public SearchRequestBuilder shard(String shard) { Assert.hasText(shard, "Invalid shard"); this.shard = shard; return this; }
public static String getPinnedNode(Settings settings) { String node = settings.getProperty(InternalConfigurationOptions.INTERNAL_ES_PINNED_NODE); Assert.hasText(node, "Task has not been pinned to a node..."); return node; }
public static String getJobTransportPoolingKey(Settings settings) { String jobKey = settings.getProperty(InternalConfigurationOptions.INTERNAL_TRANSPORT_POOLING_KEY); Assert.hasText(jobKey, "Job has not been assigned a transport pooling key..."); return jobKey; }
public static String getPinnedNode(Settings settings) { String node = settings.getProperty(InternalConfigurationOptions.INTERNAL_ES_PINNED_NODE); Assert.hasText(node, "Task has not been pinned to a node..."); return node; }