public static void main(String[] args) { try { final CommandLine cmd = CommandLine .parser() .withName("./ecbplus-splitter") .withHeader("Splits ECB+ results by folder")
public static void main(String[] args) { try { final CommandLine cmd = CommandLine .parser() .withName("eu.fbk.dkm.pikes.resources.darmstadt-loader") .withHeader("Load Boxer corpus and split it")
public static void main(String[] args) throws IOException { final CommandLine cmd = CommandLine .parser() .withName("file-test") .withHeader("Check eu.fbk.dkm.pikes.resources.mpqa file") .withOption("i", "input-file", "the MPQA file", "FILE", CommandLine.Type.FILE_EXISTING, true, false, true) .withLogger(LoggerFactory.getLogger("eu.fbk.fssa")).parse(args); final File inputFile = cmd.getOptionValue("i", File.class); final RecordSet annotations = RecordSet.readFromFile(inputFile); for (Record record : annotations.getRecords()) { System.out.println(record.getName()); System.out.println(record.getSpan()); for (String attr : record.getValueMap().keySet()) { System.out.println(attr + " = " + record.getValueMap().get(attr)); } System.out.println(); } }
public static void main(String[] args) { try { final CommandLine cmd = CommandLine .parser() .withName("VerbNetStatisticsExtractor") .withHeader("Extracts statistics from OntoNotes on frequency of VerbNet/FrameNet") .withOption("n", "ontonotes", "OntoNotes folder", "FOLDER", CommandLine.Type.DIRECTORY_EXISTING, true, false, true) .withOption("o", "output", "output file", "FILE", CommandLine.Type.FILE, true, false, true) .withLogger(LoggerFactory.getLogger("eu.fbk.nafview")).parse(args); final File dir = cmd.getOptionValue("n", File.class); final File output = cmd.getOptionValue("o", File.class); VerbNetStatisticsExtractor statisticsExtractor = new VerbNetStatisticsExtractor(); statisticsExtractor.loadDir(dir.getAbsolutePath()); try { statisticsExtractor.loadFrequencies(); } catch (Exception e) { e.printStackTrace(); } BufferedWriter writer = new BufferedWriter(new FileWriter(output)); for (String key : statisticsExtractor.getVnTotals().keySet()) { writer.append("VN").append("\t").append(key).append("\t").append(statisticsExtractor.getVnTotals().get(key).toString()).append("\n"); } for (String key : statisticsExtractor.getFnTotals().keySet()) { writer.append("FN").append("\t").append(key).append("\t").append(statisticsExtractor.getFnTotals().get(key).toString()).append("\n"); } writer.close(); } catch (final Throwable ex) { CommandLine.fail(ex); } }
.parser() .withName("PropBankBank") .withHeader(
public static void main(String[] args) { try { final CommandLine cmd = CommandLine .parser() .withName("./annotate-sentences") .withHeader("Annotate sentences")
public static void main(String[] args) { try { final CommandLine cmd = CommandLine .parser() .withName("./taol-extractor") .withHeader("Convert file from txt to NAF")
public static void main(String[] args) { try { final CommandLine cmd = CommandLine .parser() .withName("./annotate-sentences") .withHeader("Annotate sentences")
.parser() .withName("cr-extractor") .withHeader("Extract CR documents from TREC dataset and save them in NAF format")
.parser() .withName("fbis-extractor") .withHeader("Extract FBIS documents from TREC dataset and save them in NAF format")
.parser() .withName("latimes-extractor") .withHeader("Extract LATIMES documents from TREC dataset and save them in NAF format")
.parser() .withName("fr94-extractor") .withHeader("Extract FR94 documents from TREC dataset and save them in NAF format")
.parser() .withName("ft-extractor") .withHeader("Extract FT documents from TREC dataset and save them in NAF format")
public static void main(String[] args) { final CommandLine cmd = CommandLine .parser() .withName("naf-offset-reader") .withHeader("Read offset from a file")
public static void main(final String... args) { try { final CommandLine cmd = CommandLine .parser() .withName("index-subjectivity-lexicon") .withHeader("Processes the original file of the subjectivity lexicon, " // + "producing a TSV file with an indexed version of it that can " // + "be used with the eu.fbk.dkm.pikes.resources.SubjectivityLexicon Java API class.") .withOption("i", "input", "the input file name", "FILE", CommandLine.Type.FILE_EXISTING, true, false, true) .withOption("o", "output", "the output file name", "FILE", CommandLine.Type.FILE, true, false, true) // .withLogger(LoggerFactory.getLogger("eu.fbk")) // .parse(args); final File inputFile = cmd.getOptionValue("i", File.class); final File outputFile = cmd.getOptionValue("o", File.class); final SubjectivityLexicon lexicon = index(inputFile.getAbsolutePath()); lexicon.writeTo(outputFile.getAbsolutePath()); } catch (final Throwable ex) { CommandLine.fail(ex); } }
public static void main(String[] args) { try { final CommandLine cmd = CommandLine .parser() .withName("./tintop-client") .withHeader("Run the Tintop Client") .withOption("i", "input", "Input file", "FILE", CommandLine.Type.FILE_EXISTING, true, false, true) .withOption("s", "server", "Server address", "URL:PORT", CommandLine.Type.STRING, true, false, true) .withOption("t", "timeout", String.format("Timeout (default %d ms)", DEFAULT_TIMEOUT), "milliseconds", CommandLine.Type.INTEGER, true, false, false) .withLogger(LoggerFactory.getLogger("eu.fbk")).parse(args); String serverUrl = cmd.getOptionValue("server", String.class); File inputFile = cmd.getOptionValue("input", File.class); Integer timeout = cmd.getOptionValue("timeout", Integer.class, DEFAULT_TIMEOUT); URL url = new URL(serverUrl); TintopServer server = new TintopServer(url); TintopClient client = new TintopClient(server, timeout); String whole = FileUtils.readFileToString(inputFile); System.out.println(client.call(whole)); } catch (Exception e) { CommandLine.fail(e); } }
public static void main(final String... args) { try { final CommandLine cmd = CommandLine .parser() .withName("index-senticnet-lexicon") .withHeader("Processes the RDF data of eu.fbk.dkm.pikes.resources.SenticNet, " // + "producing a TSV file with an indexed version of it that can " // + "be used with the eu.fbk.dkm.pikes.resources.SenticNet Java API class.") .withOption("i", "input", "the input file name", "FILE", Type.FILE_EXISTING, true, false, true) .withOption("o", "output", "the output file name", "FILE", Type.FILE, true, false, true) // .withLogger(LoggerFactory.getLogger("eu.fbk")) // .parse(args); final File inputFile = cmd.getOptionValue("i", File.class); final File outputFile = cmd.getOptionValue("o", File.class); final SenticNet lexicon = index(inputFile.getAbsolutePath()); lexicon.writeTo(outputFile.getAbsolutePath()); } catch (final Throwable ex) { CommandLine.fail(ex); } }
public static void main(final String[] args) throws IOException, XMLStreamException { try { final CommandLine cmd = CommandLine .parser() .withName("corpus-preprocessor") .withHeader(
public static void main(String[] args) { try { final CommandLine cmd = CommandLine .parser() .withName("sentiwordnet-loader") .withHeader( "Produces NAF files, a TSV file with sentiment annotations " + "and an HTML file with annotated sentences " + "starting from the MPQA v.2 corpus") .withOption("i", "input", "the corpus file", "DIR", CommandLine.Type.FILE_EXISTING, true, false, true) .withLogger(LoggerFactory.getLogger("eu.fbk.fssa")).parse(args); final File inputFile = cmd.getOptionValue("input", File.class); SentiWordNet.setPath(inputFile); SentiWordNet.init(); System.out.println(SentiWordNet.searchValue("00478311-a")); } catch (final Throwable ex) { CommandLine.fail(ex); } } }
.parser() .withName("./tint-server") .withHeader("Run the Tint Server")