@Override public void notify(Path path, PathChildrenCacheEvent event) { events.add(new Pair<>(path, event)); }
public void injectForName(String name, Component component) { injectedComponents.add(new Pair<>(name, component)); }
protected List<Pair<Type, List<Annotation>>> getAnnotatedConstructorParams() { Type[] types = constructor.getGenericParameterTypes(); Annotation[][] annotations = constructor.getParameterAnnotations(); List<Pair<Type, List<Annotation>>> ret = new ArrayList<>(); for (int i = 0; i < types.length; i++) { ret.add(new Pair<>(types[i], Arrays.asList(annotations[i]))); } return ret; }
private List<Pair<String, String>> decompress(Compressor.Compression compression) { String propertiesString = new String(compressor.decompress(compression), Charset.forName("utf8")); if (propertiesString.isEmpty()) return ImmutableList.of(); ImmutableList.Builder<Pair<String, String>> properties = new ImmutableList.Builder<>(); for (String propertyString : propertiesString.split(valueEndMarker)) { String[] property = propertyString.split(keyEndMarker); properties.add(new Pair<>(property[0], property[1])); } return properties.build(); }
/** * Attemps to make dockerImages which may be image tags or image ids to image ids. This only works * if the given tag is actually present locally. This is fine, because if it isn't - we can't delete * it, so no harm done. */ private Set<String> dockerImageToImageIds(List<DockerImage> dockerImages, List<Image> images) { Map<String, String> imageIdByImageTag = images.stream() .flatMap(image -> Optional.ofNullable(image.getRepoTags()) .map(Stream::of) .orElseGet(Stream::empty) .map(repoTag -> new Pair<>(repoTag, image.getId()))) .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond)); return dockerImages.stream() .map(DockerImage::asString) .map(tag -> imageIdByImageTag.getOrDefault(tag, tag)) .collect(Collectors.toSet()); }
/** * Executes the given command synchronously without timeout. * * @param command tokens * @return Retcode and stdout/stderr merged */ public Pair<Integer, String> exec(String[] command) throws IOException { ProcessBuilder pb = new ProcessBuilder(command); StringBuilder ret = new StringBuilder(); pb.environment().remove("VESPA_LOG_TARGET"); pb.redirectErrorStream(true); Process p = pb.start(); InputStream is = p.getInputStream(); while (true) { int b = is.read(); if (b == -1) break; ret.append((char)b); } int rc = 0; try { rc = p.waitFor(); } catch (InterruptedException e) { throw new RuntimeException(e); } return new Pair<>(rc, ret.toString()); }
/** * Adds an entry in the type map, pairing the given identifier with the given class specification. * * @param id The class identifier to register with. * @param spec The class to register. * @throws IllegalArgumentException Thrown if two classes attempt to register with the same identifier. */ private void add(int id, Class<? extends Identifiable> spec) { Class<?> old = get(id); if (old == null) { Constructor<? extends Identifiable> constructor; try { constructor = spec.getConstructor(); } catch (NoSuchMethodException e) { constructor = null; } typeMap.put(id, new Pair<Class<? extends Identifiable>, Constructor<? extends Identifiable>>(spec, constructor)); } else if (!spec.equals(old)) { throw new IllegalArgumentException("Can not register class '" + spec.toString() + "' with id " + id + ", because it already maps to class '" + old.toString() + "'."); } }
private List<Pair<String, String>> deriveRankingPhaseRankProperties(RankingExpression expression, String phase) { List<Pair<String, String>> properties = new ArrayList<>(); if (expression == null) return properties; String name = expression.getName(); if ("".equals(name)) name = phase; if (expression.getRoot() instanceof ReferenceNode) { properties.add(new Pair<>("vespa.rank." + phase, expression.getRoot().toString())); } else { properties.add(new Pair<>("vespa.rank." + phase, "rankingExpression(" + name + ")")); properties.add(new Pair<>("rankingExpression(" + name + ").rankingScript", expression.getRoot().toString())); } return properties; }
/** * Parses a schemamapping element and generates a map of field mappings * * @param e a schemamapping element * @return doctype,in-document → in-processor */ public static Map<Pair<String,String>, String> parseFieldNameSchemaMap(Element e) { Map<Pair<String, String>, String> ret = new HashMap<>(); for (Element sm : XML.getChildren(e, "map")) { for (Element fm : XML.getChildren(sm, "field")) { String from = fm.getAttribute("in-document"); String to = fm.getAttribute("in-processor"); String doctype = fm.getAttribute("doctype"); if ("".equals(doctype)) doctype=null; ret.put(new Pair<>(doctype, from), to); } } return ret; }
/** * The map for a given chain,docproc: * "Reverses" the direction, this is the mapping a docproc should do when a * doc comes in. The doctype is null if not given in map. * * @return (doctype,inProcessor)→inDocument */ public Map<Pair<String,String>, String> chainMap(String chain, String docproc) { Map<Pair<String, String>, String> ret = new HashMap<>(); for (Entry<SchemaMapKey, String> e : fields.entrySet()) { SchemaMapKey key = e.getKey(); if (key.getChain().equals(chain) && key.getDocproc().equals(docproc)) { // Reverse direction here ret.put(new Pair<>(key.getDoctype(),e.getValue()), key.getInDocument()); } } return ret; }
for (Constructor<?> ctor : publicConstructors) { long count = Arrays.stream(ctor.getParameterTypes()).filter(ConfigInstance.class::isAssignableFrom).count(); withParameterCount.add(new Pair<>(ctor, (int) count));
public List<Pair<String, String>> deriveRankProperties() { List<Pair<String, String>> properties = new ArrayList<>(); for (Iterator<NativeTable> i = tables.values().iterator(); i.hasNext();) { NativeTable table = i.next(); if (isFieldMatchTable(table)) properties.add(new Pair<>("nativeFieldMatch." + table.getType().getName() + "." + fieldName, table.getName())); if (isAttributeMatchTable(table)) properties.add(new Pair<>("nativeAttributeMatch." + table.getType().getName() + "." + fieldName, table.getName())); if (isProximityTable(table)) properties.add(new Pair<>("nativeProximity." + table.getType().getName() + "." + fieldName, table.getName())); } return properties; }
/** Reads the previously stored function expressions for these arguments */ List<Pair<String, RankingExpression>> readFunctions() { try { ApplicationFile file = application.getFile(modelFiles.functionsPath()); if ( ! file.exists()) return Collections.emptyList(); List<Pair<String, RankingExpression>> functions = new ArrayList<>(); try (BufferedReader reader = new BufferedReader(file.createReader())) { String line; while (null != (line = reader.readLine())) { String[] parts = line.split("\t"); String name = parts[0]; try { RankingExpression expression = new RankingExpression(parts[0], parts[1]); functions.add(new Pair<>(name, expression)); } catch (ParseException e) { throw new IllegalStateException("Could not parse " + name, e); } } return functions; } } catch (IOException e) { throw new UncheckedIOException(e); } }
private List<Pair<String, Tensor>> readSmallConstants() { try { ApplicationFile file = application.getFile(modelFiles.smallConstantsPath()); if ( ! file.exists()) return Collections.emptyList(); List<Pair<String, Tensor>> constants = new ArrayList<>(); BufferedReader reader = new BufferedReader(file.createReader()); String line; while (null != (line = reader.readLine())) { String[] parts = line.split("\t"); String name = parts[0]; TensorType type = TensorType.fromSpec(parts[1]); Tensor tensor = Tensor.from(type, parts[2]); constants.add(new Pair<>(name, tensor)); } return constants; } catch (IOException e) { throw new UncheckedIOException(e); } }
private List<HostSpec> allocateHostGroup(ClusterSpec clusterGroup, String flavor, int nodesInGroup, int startIndex, boolean canFail) { List<HostSpec> allocation = allocations.getOrDefault(clusterGroup, new ArrayList<>()); allocations.put(clusterGroup, allocation); int nextIndex = nextIndexInCluster.getOrDefault(new Pair<>(clusterGroup.type(), clusterGroup.id()), startIndex); while (allocation.size() < nodesInGroup) { if (freeNodes.get(flavor).isEmpty()) { if (canFail) throw new IllegalArgumentException("Insufficient capacity of flavor '" + flavor + "'"); else break; } Host newHost = freeNodes.removeValue(flavor, 0); ClusterMembership membership = ClusterMembership.from(clusterGroup, nextIndex++); allocation.add(new HostSpec(newHost.hostname(), newHost.aliases(), newHost.flavor(), Optional.of(membership), newHost.version())); } nextIndexInCluster.put(new Pair<>(clusterGroup.type(), clusterGroup.id()), nextIndex); while (allocation.size() > nodesInGroup) allocation.remove(0); return allocation; }
public Expression innerConvert(SelectInputExpression exp) { List<Pair<String, Expression>> cases = new LinkedList<>(); for (Pair<String, Expression> pair : exp.getCases()) { cases.add(new Pair<>(pair.getFirst(), branch().convert(pair.getSecond()))); } return new SelectInputExpression(cases); }
Class<?> valClass = getValueClass(); if (valClass != null) { Pair<Class<?>, Class<?>> key = new Pair<>(valClass, arg.getClass()); Constructor<?> cstr = constructorCache.get(key); if (cstr == null) {
@Override public void process(boolean validate, boolean documentsOnly) { if ( ! validate) return; Map<String, Pair<String, String>> fieldToClassAndSource = new HashMap<>(); for (DocumentSummary summary : search.getSummaries().values()) { if ("default".equals(summary.getName())) continue; for (SummaryField summaryField : summary.getSummaryFields() ) { if (summaryField.isImplicit()) continue; Pair<String, String> prevClassAndSource = fieldToClassAndSource.get(summaryField.getName()); for (Source source : summaryField.getSources()) { if (prevClassAndSource!=null) { String prevClass = prevClassAndSource.getFirst(); String prevSource = prevClassAndSource.getSecond(); if ( ! prevClass.equals(summary.getName())) { if ( ! prevSource.equals(source.getName())) { throw new IllegalArgumentException("For search '"+ search.getName() + "', summary class '" + summary.getName()+"'," + " summary field '" + summaryField.getName() + "':" + " Can not use source '" + source.getName() + "' for this summary field, an equally named field in summary class '" + prevClass + "' uses a different source: '"+prevSource+"'."); } } } else { fieldToClassAndSource.put(summaryField.getName(), new Pair<>(summary.getName(), source.getName())); } } } } }
static Pair<VespaModel, Container> createContainerModel(Path applicationPath, FileRegistry fileRegistry, File preprocessedApplicationDir, Networking networkingOption, ConfigModelRepo configModelRepo) throws Exception { DeployLogger logger = new BaseDeployLogger(); FilesApplicationPackage rawApplicationPackage = new FilesApplicationPackage.Builder(applicationPath.toFile()) .includeSourceFiles(true).preprocessedDir(preprocessedApplicationDir).build(); ApplicationPackage applicationPackage = rawApplicationPackage.preprocess(getZone(), logger); validateApplication(applicationPackage); DeployState deployState = createDeployState(applicationPackage, fileRegistry, logger); VespaModel root = VespaModel.createIncomplete(deployState); ApplicationConfigProducerRoot vespaRoot = new ApplicationConfigProducerRoot(root, "vespa", deployState.getDocumentModel(), deployState.getVespaVersion(), deployState.getProperties().applicationId()); Element spec = containerRootElement(applicationPackage); ContainerModel containerModel = newContainerModelBuilder(networkingOption).build(deployState, root, configModelRepo, vespaRoot, spec); containerModel.getCluster().prepare(deployState); initializeContainerModel(containerModel, configModelRepo); Container container = first(containerModel.getCluster().getContainers()); // TODO: Separate out model finalization from the VespaModel constructor, // such that the above and below code to finalize the container can be // replaced by root.finalize(); initializeContainer(deployState.getDeployLogger(), container, spec); root.freezeModelTopology(); return new Pair<>(root, container); }