public String toString() { return "'" + this.format.getName() + "' verbatim rdfizer"; } }
public String toString() { return "'" + this.format.getName() + "' verbatim rdfizer"; } }
/** * Sets the RDF serialization format in the given Configuration. * @param conf Configuration containing MapReduce tool options. * @param format The expected format of any RDF text data. */ public static void setRDFFormat(Configuration conf, RDFFormat format) { conf.set(FORMAT_PROP, format.getName()); }
/** * Set the RDF serialization format to parse. All input files must have the * same format. * @param job Job to apply the setting to * @param format Format of any and all input files */ public static void setRDFFormat(Job job, RDFFormat format) { job.getConfiguration().set(FORMAT_PROP, format.getName()); }
@Override public void describeTo(Description description) { description.appendText(format.getName()).appendText(" String "); if (delegates.length == 1) { description.appendDescriptionOf(delegates[0]); } else { for (Matcher<? extends RepositoryConnection> delegate : delegates) { description.appendText("\n ").appendDescriptionOf(delegate); } } }
@Override public void describeTo(Description description) { description.appendText(format.getName()).appendText(" String "); if (delegates.length == 1) { description.appendDescriptionOf(delegates[0]); } else { for (Matcher<? extends RepositoryConnection> delegate : delegates) { description.appendText("\n ").appendDescriptionOf(delegate); } } }
@Override public String getAsText() { return ((RDFFormat)getValue()).getName(); }
public CacheEntry.Status rdfize(final InputStream is, final RDFHandler handler, final String baseUri) { try { parser.setRDFHandler(handler); parser.parse(is, baseUri); } catch (IOException e) { logger.warn("I/O error in " + format.getName() + " rdfizer", e); return CacheEntry.Status.Failure; } catch (RDFParseException e) { logger.warn("RDF parsing error in " + format.getName() + " rdfizer", e); return CacheEntry.Status.ParseError; } catch (RDFHandlerException e) { logger.warn("RDF handler error in " + format.getName() + " rdfizer", e); return CacheEntry.Status.Failure; } catch (Throwable t) { logger.error("unclassified error in " + format.getName() + " rdfizer", t); } return CacheEntry.Status.Success; }
public CacheEntry.Status rdfize(final InputStream is, final RDFHandler handler, final String baseUri) { try { parser.setRDFHandler(handler); parser.parse(is, baseUri); } catch (IOException e) { logger.warn("I/O error in " + format.getName() + " rdfizer", e); return CacheEntry.Status.Failure; } catch (RDFParseException e) { logger.warn("RDF parsing error in " + format.getName() + " rdfizer", e); return CacheEntry.Status.ParseError; } catch (RDFHandlerException e) { logger.warn("RDF handler error in " + format.getName() + " rdfizer", e); return CacheEntry.Status.Failure; } catch (Throwable t) { logger.error("unclassified error in " + format.getName() + " rdfizer", t); } return CacheEntry.Status.Success; }
@Override protected Map<String, Object> referenceData(HttpServletRequest request) { Map<String, Object> result = new HashMap<String, Object>(); Map<String, String> resultFormats = new TreeMap<String, String>(); for (RDFWriterFactory factory : RDFWriterRegistry.getInstance().getAll()) { RDFFormat resultFormat = factory.getRDFFormat(); resultFormats.put(resultFormat.getName(), resultFormat.getName()); } result.put("resultFormats", resultFormats); return result; } }
@Override protected Map<String, Object> referenceData(HttpServletRequest arg0) throws Exception { Map<String, Object> result = new HashMap<String, Object>(); Map<String, String> rdfFormats = new TreeMap<String, String>(); for (RDFParserFactory factory : RDFParserRegistry.getInstance().getAll()) { RDFFormat format = factory.getRDFFormat(); rdfFormats.put(format.getName(), format.getName()); } result.put("formats", rdfFormats); return result; } }
@Override protected Map<String, Object> referenceData(HttpServletRequest request) { @SuppressWarnings("unchecked") Map<String, Object> result = (Map<String, Object>)super.referenceData(request); Map<String, String> resultFormats = new TreeMap<String, String>(); for (RDFWriterFactory factory : RDFWriterRegistry.getInstance().getAll()) { RDFFormat resultFormat = factory.getRDFFormat(); resultFormats.put(resultFormat.getName(), resultFormat.getName()); } result.put("resultFormats", resultFormats); return result; } }
@Override public boolean importDataset(InputStream in, RDFFormat format, String dataset, boolean cleanBefore) { log.debug("Importing {} data into dataset {}", format.getName(), dataset); try { java.net.URI target = credentials.buildUrl(getDatasetUriBuilder(dataset)); CloseableHttpResponse response; if (cleanBefore) { response = client.put(target, in, format); } else { response = client.post(target, in, format); } try { log.debug("Request resolved with {} status code: {}", response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()); return (response.getStatusLine().getStatusCode() == 200); } finally { response.close(); } } catch (IllegalArgumentException | URISyntaxException | IOException e) { log.error("Error importing dataset: {}", e.getMessage(), e); throw new RuntimeException(e); } }
@Override public boolean importResource(String resource, Model data, String dataset, boolean cleanBefore) { RDFFormat format = RDFFormat.TURTLE; log.debug("Importing {} data for resource {} in {}", format.getName(), resource, dataset); try { java.net.URI target = credentials.buildUrl(getResourceUriBuilder(dataset, resource)); ByteArrayOutputStream out = new ByteArrayOutputStream(); Rio.write(data, out, format); InputStream in = new ByteArrayInputStream(out.toByteArray()); CloseableHttpResponse response; if (cleanBefore) { response = client.put(target, in, format); } else { response = client.post(target, in, format); } try { log.debug("Request resolved with {} status code: {}", response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()); return (response.getStatusLine().getStatusCode() == 200); } finally { response.close(); } } catch (IllegalArgumentException | URISyntaxException | RDFHandlerException | IOException e) { log.error("Error importing resource: {}", e.getMessage(), e); throw new RuntimeException(e); } }
/** * Instantiates the RecordReader. * @param format RDF serialization format to parse. * @param charBufferSize Number of input characters to hold in * memory; if exceeded, wait until the parser * thread consumes some text before proceeding * with reading input. * @param statementBufferSize Number of output statements to hold in * memory; if exceeded, wait until the * client consumes data before proceeding * with parsing. * @param timeoutSeconds Number of seconds to wait for the parser * thread to provide the next statement (or * state that there are none). If exceeded, * abort. */ RdfFileRecordReader(RDFFormat format, int charBufferSize, int statementBufferSize, int timeoutSeconds) { rdfParser = Rio.createParser(format); rdfParser.setRDFHandler(this); statementCache = new LinkedBlockingQueue<RyaStatementWritable>(statementBufferSize); pipeOut = new PipedWriter(); pipeIn = new PipedReader(charBufferSize); this.timeoutSeconds = timeoutSeconds; logger.info("Initializing RecordReader with parameters:"); logger.info("\tRDF serialization format = " + format.getName()); logger.info("\tinput buffer size = " + charBufferSize + " characters"); logger.info("\tstatement cache size = " + statementBufferSize); logger.info("\tparser timeout = " + timeoutSeconds + " seconds"); }
private void addDocumentMetadata(final Collection<Statement> statements, final ValueFactory vf) throws SailException { // Metadata about the document itself IRI docURI = vf.createIRI(selfURI); statements.add(vf.createStatement(docURI, RDF.TYPE, vf.createIRI("http://xmlns.com/foaf/0.1/Document"))); statements.add(vf.createStatement(docURI, RDFS.LABEL, vf.createLiteral("" + format.get().getName() + " description of resource '" + typeSpecificId + "'"))); // Note: we go to the trouble of special-casing the dataset URI, so that // it is properly rewritten, along with all other TwitLogic resource // URIs (which are rewritten through the Sail). if (null != datasetURI) { statements.add(vf.createStatement(docURI, RDFS.SEEALSO, datasetURI)); } }
private Model getResource(UriBuilder uriBuilder) { RDFFormat format = RDFFormat.TURTLE; try { java.net.URI target = credentials.buildUrl(uriBuilder); log.debug("Exporting {} data from resource {}", format.getName(), target.toString()); String entity = client.get(target, format.getDefaultMIMEType()); return Rio.parse(new StringReader(entity), target.toString(), format, new ParserConfig(), ValueFactoryImpl.getInstance(), new ParseErrorLogger()); } catch (IllegalArgumentException | URISyntaxException | RDFParseException | IOException e) { if (e instanceof ClientProtocolException && "Unexpected response status: 404".compareTo(e.getMessage())==0) { //keeping old behavior, should not be silently fail (i.e. return empty model)? return new LinkedHashModel(); } else { throw new RuntimeException(e); } } }
/** Test the default mime type for each {@link RDFFormat}. */ @SuppressWarnings({ "rawtypes", "unchecked" }) public void test_conneg_rdf_data_01() { for (RDFFormat format : RDFFormat.values()) { final ConnegUtil util = new ConnegUtil(format.getDefaultMIMEType()); assertEquals(format.getName(), format, util.getRDFFormat()); if (!format.getName().equals("JSON")) { assertNull(format.getName(), util.getTupleQueryResultFormat()); } assertSameArray(new ConnegScore[] {// new ConnegScore(1f, format) },// util.getScores(RDFFormat.class)); } }
@Override public Model exportDataset(String dataset) { RDFFormat format = RDFFormat.TURTLE; try { final java.net.URI target = credentials.buildUrl(getDatasetUriBuilder(dataset)); log.debug("Exporting {} data from dataset {}", format.getName(), dataset); final String entity = client.get(target, format.getDefaultMIMEType()); return Rio.parse(new StringReader(entity), target.toString(), format, new ParserConfig(), ValueFactoryImpl.getInstance(), new ParseErrorLogger()); } catch (IllegalArgumentException | URISyntaxException | RDFParseException | IOException e) { throw new RuntimeException(e); } }
/** Test the default mime type for each {@link RDFFormat}. */ @SuppressWarnings({ "rawtypes", "unchecked" }) public void test_conneg_rdf_data_01() { for (RDFFormat format : RDFFormat.values()) { final ConnegUtil util = new ConnegUtil(format.getDefaultMIMEType()); assertEquals(format.getName(), format, util.getRDFFormat()); if (!format.getName().equals("JSON")) { assertNull(format.getName(), util.getTupleQueryResultFormat()); } assertSameArray(new ConnegScore[] {// new ConnegScore(1f, format) },// util.getScores(RDFFormat.class)); } }