public AceEventMention(String id, AceCharSeq extent, AceCharSeq anchor) { super(id, extent); mRolesToArguments = Generics.newHashMap(); this.mAnchor = anchor; }
public LeafAncestorEval(String str) { this.name = str; catAvg = Generics.newHashMap(); catNum = Generics.newHashMap(); }
public DFSAState(S id, DFSA<T,S> dfsa) { this.dfsa = dfsa; this.stateID = id; this.accepting = false; this.inputToTransition = Generics.newHashMap(); this.score = Double.NEGATIVE_INFINITY; }
TRegexGUITreeVisitor(TregexPattern p) { //String[] handles) { this.p = p; //this.handles = handles; matchedTrees = new ArrayList<>(); matchedParts = Generics.newHashMap(); }
/** Create an ArrayHeap. * * @param cmp The objects added will be ordered using the <code>Comparator</code>. */ public ArrayHeap(Comparator<? super E> cmp) { this.cmp = cmp; indexToEntry = new ArrayList<>(); objectToEntry = Generics.newHashMap(); }
public ArrayHeap(Comparator<? super E> cmp, int initCapacity) { this.cmp = cmp; indexToEntry = new ArrayList<>(initCapacity); objectToEntry = Generics.newHashMap(initCapacity); }
/** * Style the tag for a particular channel this style * @param channel The channel to style * @param style The style to use */ public void styleChannel(String channel, Style style){ if(this.channelStyles == null){ this.channelStyles = Generics.newHashMap(); } this.channelStyles.put(channel.toLowerCase(Locale.ENGLISH),style); }
private static Map<String,Integer> argSpec() { Map<String,Integer> argSpec = Generics.newHashMap(); argSpec.put("g", 0); argSpec.put("e", 1); return argSpec; }
/** * Makes it possible to uniquify a collection of objects which are normally * non-hashable. Alternatively, it lets you define an alternate hash function * for them for limited-use hashing. */ public static <ObjType, Hashable> Collection<ObjType> uniqueNonhashableObjects(Collection<ObjType> objects, Function<ObjType, Hashable> customHasher) { Map<Hashable, ObjType> hashesToObjects = Generics.newHashMap(); for (ObjType object : objects) { hashesToObjects.put(customHasher.apply(object), object); } return hashesToObjects.values(); }
private static Map<String,Integer> optArgDefs() { Map<String,Integer> optArgDefs = Generics.newHashMap(4); optArgDefs.put("s", 1); optArgDefs.put("w", 0); optArgDefs.put("f", 0); optArgDefs.put("o", 0); return optArgDefs; }
/** * Tired of Properties not behaving like {@code Map<String,String>}s? This method will solve that problem for you. */ public static Map<String, String> asMap(Properties properties) { Map<String, String> map = Generics.newHashMap(); for (Entry<Object, Object> entry : properties.entrySet()) { map.put((String)entry.getKey(), (String)entry.getValue()); } return map; }
public static <K,V> Map<K,V> newHashMap(int initialCapacity) { if (HASH_MAP_SIZE_CONSTRUCTOR == null) { return newHashMap(); } try { return ErasureUtils.uncheckedCast(HASH_MAP_SIZE_CONSTRUCTOR.newInstance(initialCapacity)); } catch (Exception e) { throw new RuntimeException(e); } }
/** Extract final coreference output from coreference document format. */ private static Map<Integer, CorefChain> makeCorefOutput(Document document) { Map<Integer, CorefChain> result = Generics.newHashMap(); for(CorefCluster c : document.corefClusters.values()) { result.put(c.clusterID, new CorefChain(c, document.positions)); } return result; }
public Lattice() { edges = new ArrayList<>(); nodes = Generics.newHashSet(); constraints = new ArrayList<>(); edgeStartsAt = Generics.newHashMap(); }
private static Map<String, TokenizerType> initializeClassMap() { Map<String, TokenizerType> map = Generics.newHashMap(); for (TokenizerType type : TokenizerType.values()) { if (type.className != null) { map.put(type.className.toUpperCase(), type); } } return Collections.unmodifiableMap(map); }
/** * Removes duplicate graphs from the set, using the string form of the graph * as the key (obviating issues with object equality). */ public static Collection<SemanticGraph> removeDuplicates(Collection<SemanticGraph> graphs) { Map<String, SemanticGraph> map = Generics.newHashMap(); for (SemanticGraph sg : graphs) { String keyVal = sg.toString().intern(); map.put(keyVal, sg); } return map.values(); }
public Embedding(String wordFile, String vectorFile, int embeddingSize) { this.wordVectors = Generics.newHashMap(); this.embeddingSize = embeddingSize; loadWordVectors(wordFile, vectorFile); }
private Map<List, Double> samplePathsFromGraph(int numPaths) { Map<List, Double> result = Generics.newHashMap(); for (int i = 0; i < numPaths; i++) { List l = sampleUniformPathFromGraph(); result.put(l, new Double(getOutputOfPathInGraph(l))); } return result; }
private static Map<String, String> preprocess(BufferedReader reader) throws IOException { Map<String, String> macros = Generics.newHashMap(); for(String line; (line = reader.readLine()) != null; ) { line = line.trim(); if(line.startsWith("macro ")){ Pair<String, String> macro = extractMacro(line); macros.put(macro.first(), macro.second()); } } return macros; }
public ArabicTreebankParserParams() { super(new ArabicTreebankLanguagePack()); optionsString = new StringBuilder(); optionsString.append("ArabicTreebankParserParams\n"); annotationPatterns = Generics.newHashMap(); activeAnnotations = new ArrayList<>(); //Initialize the headFinder here headFinder = headFinder(); initializeAnnotationPatterns(); }