public static TrainSimilarityModel trainModel(String name) throws IOException { SimilarityModel sm = new SimilarityModel(name, true); return sm; }
if (isName(np1)) { if (isName(np2)) { features.addAll(getNameNameFeatures(np1, np2)); else if (isCommonNoun(np2)) { features.addAll(getNameCommonFeatures(np1, np2)); else if (isPronoun(np2)) { features.addAll(getNamePronounFeatures(np1, np2)); else if (isNumber(np2)) { features.addAll(getNameNumberFeatures(np1, np2)); else if (isCommonNoun(np1)) { if (isName(np2)) { features.addAll(getNameCommonFeatures(np2, np1)); else if (isCommonNoun(np2)) { features.addAll(getCommonCommonFeatures(np1, np2)); else if (isPronoun(np2)) { features.addAll(getCommonPronounFeatures(np1, np2)); else if (isNumber(np2)) { features.addAll(getCommonNumberFeatures(np1, np2)); else if (isPronoun(np1)) { if (isName(np2)) { features.addAll(getNamePronounFeatures(np2, np1));
Map<Integer, Set<String>> headSets = constructHeadSets(entities); Map<Integer, Set<String>> nameSets = constructNameSets(entities); Set<Context> exclusionSet = constructExclusionSet(key, entities, headSets, nameSets, singletons); if (entityContexts.size() == 1) { addEvent(true, ec1, ec2); int startIndex = axi; do { if (!exclusionSet.contains(sec1)) { if (debugOn) System.err.println(ec1.toString()+" "+entityNameSet+" "+sec1.toString()+" "+nameSets.get(sec1.getId())); addEvent(false, ec1, sec1); break;
public static void main(String[] args) throws IOException { if (args.length == 0) { System.err.println("Usage: SimilarityModel modelName < tiger/NN bear/NN"); System.exit(1); } String modelName = args[0]; SimilarityModel model = new SimilarityModel(modelName, false); //Context.wn = new WordNet(System.getProperty("WNHOME"), true); //Context.morphy = new Morphy(Context.wn); BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); for (String line = in.readLine(); line != null; line = in.readLine()) { String[] words = line.split(" "); double p = model.compatible(Context.parseContext(words[0]), Context.parseContext(words[1])); System.out.println(p + " " + model.getFeatures(Context.parseContext(words[0]), Context.parseContext(words[1]))); } } }
/** * Constructs a mapping between the specified entities and the names associated with these entities. * * @param entities A mapping between a key and a list of mentions. * * @return a mapping between each key in the specified entity map and the name types associated with the each mention of that entity. */ @SuppressWarnings("unchecked") private Map<Integer, Set<String>> constructNameSets(HashList entities) { Map<Integer, Set<String>> nameSets = new HashMap<Integer, Set<String>>(); for (Iterator<Integer> ei = entities.keySet().iterator(); ei.hasNext();) { Integer key = ei.next(); List<Context> entityContexts = (List<Context>) entities.get(key); nameSets.put(key, constructNameSet(entityContexts)); } return nameSets; }
/** * Constructs a mapping between the specified entities and their head set. * * @param entities Mapping between a key and a list of mentions which compose an entity. * * @return a mapping between the keys of the specified entity mapping and the head set * generated from the mentions associated with that key. */ @SuppressWarnings("unchecked") private Map<Integer, Set<String>> constructHeadSets(HashList entities) { Map<Integer, Set<String>> headSets = new HashMap<Integer, Set<String>>(); for (Iterator<Integer> ei = entities.keySet().iterator(); ei.hasNext();) { Integer key = ei.next(); List<Context> entityContexts = (List<Context>) entities.get(key); headSets.put(key, constructHeadSet(entityContexts)); } return headSets; }
if (isName(np1)) { if (isName(np2)) { features.addAll(getNameNameFeatures(np1, np2)); else if (isCommonNoun(np2)) { features.addAll(getNameCommonFeatures(np1, np2)); else if (isPronoun(np2)) { features.addAll(getNamePronounFeatures(np1, np2)); else if (isNumber(np2)) { features.addAll(getNameNumberFeatures(np1, np2)); else if (isCommonNoun(np1)) { if (isName(np2)) { features.addAll(getNameCommonFeatures(np2, np1)); else if (isCommonNoun(np2)) { features.addAll(getCommonCommonFeatures(np1, np2)); else if (isPronoun(np2)) { features.addAll(getCommonPronounFeatures(np1, np2)); else if (isNumber(np2)) { features.addAll(getCommonNumberFeatures(np1, np2)); else if (isPronoun(np1)) { if (isName(np2)) { features.addAll(getNamePronounFeatures(np2, np1));
Map<Integer, Set<String>> headSets = constructHeadSets(entities); Map<Integer, Set<String>> nameSets = constructNameSets(entities); Set<Context> exclusionSet = constructExclusionSet(key, entities, headSets, nameSets, singletons); if (entityContexts.size() == 1) { addEvent(true, ec1, ec2); int startIndex = axi; do { if (debugOn) System.err.println(ec1.toString() + " " + entityNameSet + " " + sec1.toString() + " " + nameSets.get(sec1.getId())); addEvent(false, ec1, sec1); break;
public static void main(String[] args) throws IOException { if (args.length == 0) { System.err.println("Usage: SimilarityModel modelName < tiger/NN bear/NN"); System.exit(1); } String modelName = args[0]; SimilarityModel model = new SimilarityModel(modelName, false); //Context.wn = new WordNet(System.getProperty("WNHOME"), true); //Context.morphy = new Morphy(Context.wn); BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); for (String line = in.readLine(); line != null; line = in.readLine()) { String[] words = line.split(" "); double p = model.compatible(Context.parseContext(words[0]), Context.parseContext(words[1])); System.out.println(p + " " + model.getFeatures(Context.parseContext(words[0]), Context.parseContext(words[1]))); } } }
/** * Constructs a mapping between the specified entities and the names associated with these entities. * * @param entities A mapping between a key and a list of mentions. * * @return a mapping between each key in the specified entity map and the name types associated * with the each mention of that entity. */ @SuppressWarnings("unchecked") private Map<Integer, Set<String>> constructNameSets(HashList entities) { Map<Integer, Set<String>> nameSets = new HashMap<Integer, Set<String>>(); for (Iterator<Integer> ei = entities.keySet().iterator(); ei.hasNext();) { Integer key = ei.next(); List<Context> entityContexts = (List<Context>) entities.get(key); nameSets.put(key, constructNameSet(entityContexts)); } return nameSets; }
/** * Constructs a mapping between the specified entities and their head set. * * @param entities Mapping between a key and a list of mentions which compose an entity. * * @return a mapping between the keys of the specified entity mapping and the head set * generated from the mentions associated with that key. */ @SuppressWarnings("unchecked") private Map<Integer, Set<String>> constructHeadSets(HashList entities) { Map<Integer, Set<String>> headSets = new HashMap<Integer, Set<String>>(); for (Iterator<Integer> ei = entities.keySet().iterator(); ei.hasNext();) { Integer key = ei.next(); List<Context> entityContexts = (List<Context>) entities.get(key); headSets.put(key, constructHeadSet(entityContexts)); } return headSets; }
public static TestSimilarityModel testModel(String name) throws IOException { return new SimilarityModel(name, false); }
public static TrainSimilarityModel trainModel(String name) throws IOException { SimilarityModel sm = new SimilarityModel(name, true); return sm; }
public static TestSimilarityModel testModel(String name) throws IOException { return new SimilarityModel(name, false); }