Refine search
/** * Returns the combined string, escaped for posix shell. * * @param command the list of strings to be combined * @return the resulting command string */ public static String shellCmd(List<String> command) { List<String> changedCommands = new ArrayList<>(command.size()); for (String str : command) { if (str == null) { continue; } changedCommands.add("'" + str.replaceAll("'", "'\"'\"'") + "'"); } return StringUtils.join(changedCommands, " "); }
public static StringBuilder appendSerdeParams( StringBuilder builder, Map<String, String> serdeParam) { serdeParam = new TreeMap<String, String>(serdeParam); builder.append("WITH SERDEPROPERTIES ( \n"); List<String> serdeCols = new ArrayList<String>(); for (Entry<String, String> entry : serdeParam.entrySet()) { serdeCols.add(" '" + entry.getKey() + "'='" + HiveStringUtils.escapeHiveCommand(entry.getValue()) + "'"); } builder.append(StringUtils.join(serdeCols, ", \n")).append(')'); return builder; }
private void checkColTypeChangeCompatible(List<FieldSchema> oldCols, List<FieldSchema> newCols) throws InvalidOperationException { List<String> incompatibleCols = new ArrayList<>(); int maxCols = Math.min(oldCols.size(), newCols.size()); for (int i = 0; i < maxCols; i++) { if (!ColumnType.areColTypesCompatible( ColumnType.getTypeName(oldCols.get(i).getType()), ColumnType.getTypeName(newCols.get(i).getType()))) { incompatibleCols.add(newCols.get(i).getName()); } } if (!incompatibleCols.isEmpty()) { throw new InvalidOperationException( "The following columns have types incompatible with the existing " + "columns in their respective positions :\n" + org.apache.commons.lang.StringUtils.join(incompatibleCols, ',') ); } }
if (part == null) { throw new HiveException(ErrorMsg.INVALID_PARTITION, StringUtils.join(descTbl.getPartSpec().keySet(), ','), tableName); Map<String, Boolean> stateMap = new HashMap<>(); for (String stat : StatsSetupConst.supportedStats) { valueMap.put(stat, 0L); stateMap.put(stat, true); Boolean state = StatsSetupConst.areBasicStatsUptoDate(props); for (String stat : StatsSetupConst.supportedStats) { stateMap.put(stat, stateMap.get(stat) && state); if (props != null && props.get(stat) != null) { valueMap.put(stat, valueMap.get(stat) + Long.parseLong(props.get(stat))); String[] dbTab = Utilities.getDbTableName(tableName); List<String> colNames = new ArrayList<String>(); colNames.add(colName.toLowerCase()); if (null == part) { if (tbl.isPartitioned()) { partitions.add(part.getName()); cols = Hive.getFieldsFromDeserializer(colPath, deserializer); colStats = db.getPartitionColumnStatistics(dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), partitions, colNames).get(part.getName());
private String propertiesToString(Map<String, String> props, List<String> exclude) { String prop_string = ""; if (!props.isEmpty()) { Map<String, String> properties = new TreeMap<String, String>(props); List<String> realProps = new ArrayList<String>(); for (String key : properties.keySet()) { if (properties.get(key) != null && (exclude == null || !exclude.contains(key))) { realProps.add(" '" + key + "'='" + HiveStringUtils.escapeHiveCommand(properties.get(key)) + "'"); } } prop_string += StringUtils.join(realProps, ", \n"); } return prop_string; }
@CheckForNull public static String getDeprecatedProtocolsString() { final List<String> deprecatedProtocols = new ArrayList<>(); final Set<String> agentProtocols = Jenkins.get().getAgentProtocols(); for (String name : agentProtocols) { AgentProtocol pr = AgentProtocol.of(name); if (pr != null && pr.isDeprecated()) { deprecatedProtocols.add(name); } } if (deprecatedProtocols.isEmpty()) { return null; } return StringUtils.join(deprecatedProtocols, ','); } }
metadata.put("class", descriptor.getId()); metadata.put("order", ++order); metadata.put("displayName", descriptor.getDisplayName()); metadata.put("description", descriptor.getDescription()); metadata.put("iconFilePathPattern", descriptor.getIconFilePathPattern()); if (ctx != null) { Icon icon = IconSet.icons .getIconByClassSpec(StringUtils.join(new String[]{iconClassName, iconStyle}, " ")); if (icon != null) { metadata.put("iconQualifiedUrl", icon.getQualifiedUrl(ctx)); category.getItems().add(metadata); } else { List<Map<String, Serializable>> temp = new ArrayList<Map<String, Serializable>>(); temp.add(metadata); category = new Category(ic.getId(), ic.getDisplayName(), ic.getDescription(), ic.getOrder(), ic.getMinToShow(), temp); categories.getItems().add(category);
ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx; Map<ColumnInfo, ExprNodeDesc> constants = cppCtx.getPropagatedConstants(op); cppCtx.getOpToConstantExprs().put(op, constants); foldOperator(op, cppCtx); List<ExprNodeDesc> colList = op.getConf().getColList(); Map<String, ExprNodeDesc> columnExprMap = op.getColumnExprMap(); if (colList != null) { for (int i = 0; i < colList.size(); i++) { ExprNodeDesc newCol = foldExpr(colList.get(i), constants, cppCtx, op, 0, false); if (!(colList.get(i) instanceof ExprNodeConstantDesc) && newCol instanceof ExprNodeConstantDesc) { } else { ExprNodeDesc desc = columnExprMap.get(colName); if (desc instanceof ExprNodeConstantDesc) { ((ExprNodeConstantDesc)newCol).setFoldedFromCol(((ExprNodeConstantDesc)desc).getFoldedFromCol()); ColumnInfo colInfo = op.getSchema().getSignature().get(i); if (!VirtualColumn.isVirtualColumnBasedOnAlias(colInfo)) { constants.put(colInfo, newCol); columnExprMap.put(columnNames.get(i), newCol); LOG.debug("New column list:(" + StringUtils.join(colList, " ") + ")");
Object value; if (params.containsKey(input.name)) { value = params.get(input.name); } else { value = input.getDefaultValue(); for (ParamOption option : optionInput.getOptions()) { if (option.getValue().equals(o)) { validChecked.add(o); break; params.put(input.name, validChecked); expanded = StringUtils.join(validChecked, delimiter); } else { // single-selection expanded = value.toString();
LOG.info("Got Cluster node info from ASM, count is " + String.valueOf(clusterNodeReports.size())); + ", queueCurrentCapacity=" + queueInfo.getCurrentCapacity() + ", queueMaxCapacity=" + queueInfo.getMaximumCapacity() + ", queueApplicationCount=" + queueInfo.getApplications().size() + ", queueChildQueueCount=" + queueInfo.getChildQueues().size()); localResources, StringUtils.join(jstormClientContext.shellArgs, JOYConstants.BLANK)); env.put(JOYConstants.DISTRIBUTEDSHELLSCRIPTLOCATION, hdfsShellScriptLocation); env.put(JOYConstants.DISTRIBUTEDSHELLSCRIPTLOCATION, hdfsShellScriptLocation); env.put(JOYConstants.DISTRIBUTEDSHELLSCRIPTTIMESTAMP, Long.toString(hdfsShellScriptTimestamp)); env.put(JOYConstants.DISTRIBUTEDSHELLSCRIPTLEN, Long.toString(hdfsShellScriptLen)); if (jstormClientContext.domainId != null && jstormClientContext.domainId.length() > 0) { commands.add(command.toString());
if (distinctTableNames.size() > tableBatchSize) { List<String> lowercaseTableNames = new ArrayList<>(); for (String tableName : tableNames) { lowercaseTableNames.add(org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier(tableName)); while (startIndex < distinctTableNames.size()) { int endIndex = Math.min(startIndex + tableBatchSize, distinctTableNames.size()); tables.addAll(ms.getTableObjectsByName(catName, dbName, distinctTableNames.subList( startIndex, endIndex))); throw newMetaException(e); } finally { endFunction("get_multi_table", tables != null, ex, join(tableNames, ","));
/*** * Initiates an orderly shutdown in which previously submitted * tasks are executed, but no new tasks are accepted. * Invocation has no additional effect if already shut down. * * This also blocks until all tasks have completed execution * request, or the timeout occurs, or the current thread is * interrupted, whichever happens first. * @param clazz {@link Class} that invokes shutdown on the {@link ExecutorService}. * @param executorService {@link ExecutorService} to shutdown. * @param logger {@link Logger} to log shutdown for invoking class. * @throws InterruptedException if shutdown is interrupted. */ public static void shutdownExecutorService(Class clazz, ExecutorService executorService, Logger logger) throws InterruptedException{ executorService.shutdown(); if (!executorService.awaitTermination(DEFAULT_EXECUTOR_SERVICE_SHUTDOWN_TIME_IN_MINUTES, TimeUnit.MINUTES)) { logger.warn("Executor service shutdown timed out."); List<Runnable> pendingTasks = executorService.shutdownNow(); logger.warn(String .format("%s was shutdown instantly. %s tasks were not executed: %s", clazz.getName(), pendingTasks.size(), StringUtils.join(pendingTasks, ","))); } } }
errors.add(String.format("class[%s] annotated by @Inventory but not having a static method valueOf", clz)); } else { m.valueOf.setAccessible(true); errors.add(String.format("class[%s] annotated by @Inventory but not having a static collection method %s", clz, collectionMethodName)); } else { m.valueOfCollection.setAccessible(true); inventoryMetadata.put(at.mappingVOClass(), m); }); if (!errors.isEmpty()) { throw new CloudRuntimeException(StringUtils.join(errors, "\n"));
if (!cte.materialize) { addCTEAsSubQuery(qb, cteName, alias); sqAliasToCTEName.put(alias, cteName); continue; " detected (cycle: " + StringUtils.join(viewsExpanded, " -> ") + " -> " + fullViewName + ")."); aliasToViewInfo.put(alias, new ObjectPair<String, ReadEntity>(fullViewName, viewInput)); String aliasId = getAliasId(alias, qb); if (aliasId != null) { .replace(SemanticAnalyzer.SUBQUERY_TAG_2, ""); viewAliasToInput.put(aliasId, viewInput); continue; ReadEntity newParentInput = null; if (wasView) { viewsExpanded.add(aliasToViewInfo.get(alias).getFirst()); newParentInput = aliasToViewInfo.get(alias).getSecond(); } else if (wasCTE) { ctesExpanded.add(sqAliasToCTEName.get(alias));
@Override public boolean sendMail(String title, String content, List<String> emailList, List<String> ccList) { String alertUrl = ConstUtils.EMAIL_ALERT_INTERFACE; if (StringUtils.isBlank(alertUrl)) { logger.error("emailAlertInterface url is empty!"); return false; } try { String charSet = "UTF-8"; Map<String, String> postMap = new HashMap<String, String>(); postMap.put("title", title); postMap.put("content", content); postMap.put("receiver", StringUtils.join(emailList, ",")); if(ccList != null && ccList.size() > 0){ postMap.put("cc", StringUtils.join(ccList, ",")); } String responseStr = HttpRequestUtil.doPost(alertUrl, postMap, charSet); if (responseStr == null) { logger.error("发送邮件失败 : url:{}", alertUrl); } return true; } catch (Exception e) { logger.error(e.getMessage(), e); return false; } }
dsProperties.put(PropertyNames.PROPERTY_CONNECTION_FACTORY, ds); dsProperties.put(PropertyNames.PROPERTY_CONNECTION_FACTORY2, ds); dsProperties.put(ConfVars.MANAGER_FACTORY_CLASS.getVarname(), "org.datanucleus.api.jdo.JDOPersistenceManagerFactory"); pmf = JDOHelper.getPersistenceManagerFactory(dsProperties); type = type.trim(); if (PINCLASSMAP.containsKey(type)) { dsc.pinAll(true, PINCLASSMAP.get(type)); } else { LOG.warn("{} is not one of the pinnable object types: {}", type, org.apache.commons.lang.StringUtils.join(PINCLASSMAP.keySet(), " "));
if (!running.isEmpty()) { if (running.size() > this.maxConcurrentRunsOneFlow) { throw new ExecutorManagerException("Flow " + flowId + " has more than " + this.maxConcurrentRunsOneFlow + " concurrent runs. Skipping", ExecutionOptions.CONCURRENT_OPTION_PIPELINE)) { Collections.sort(running); final Integer runningExecId = running.get(running.size() - 1); message = "Flow " + flowId + " is already running with exec id " + StringUtils.join(running, ",") + ". Will execute concurrently. \n";
public double estimateLayerSize(int level) { if (cuboidScheduler == null) { throw new UnsupportedOperationException("cuboid scheduler is null"); } List<List<Long>> layeredCuboids = cuboidScheduler.getCuboidsByLayer(); Map<Long, Double> cuboidSizeMap = getCuboidSizeMap(); double ret = 0; for (Long cuboidId : layeredCuboids.get(level)) { ret += cuboidSizeMap.get(cuboidId) == null ? 0.0 : cuboidSizeMap.get(cuboidId); } logger.info("Estimating size for layer {}, all cuboids are {}, total size is {}", level, StringUtils.join(layeredCuboids.get(level), ","), ret); return ret; }
@Override public QueryFragment generatePredicate(String columnName) { List<String> columnValues = _columnToValueList.get(columnName); int numValues = Math.min(RANDOM.nextInt(MAX_NUM_IN_CLAUSE_VALUES) + 1, columnValues.size()); Set<String> values = new HashSet<>(); while (values.size() < numValues) { values.add(pickRandom(columnValues)); } String inValues = StringUtils.join(values, ", "); boolean notIn = RANDOM.nextBoolean(); if (notIn) { return new StringQueryFragment(columnName + " NOT IN (" + inValues + ")"); } else { return new StringQueryFragment(columnName + " IN (" + inValues + ")"); } } }