@Nullable private static <T> T retrieveProperty(final String property, final Class<T> type, final T defaultValue) { final String value = System.getProperty(property); if (value != null) { try { return Data.convert(value, type); } catch (final Throwable ex) { LOGGER.warn("Could not retrieve property '" + property + "'", ex); } } return defaultValue; }
private String format(@Nullable final Value value) { return value == null ? "*" : Data.toString(value, Data.getNamespaceMap()); }
private String toInternalFilename(final String filename) { return this.forceCompression || Data.isMimeTypeCompressible( // Data.extensionToMimeType(filename)) ? filename + ".gz" : filename; }
public SerializerAvro(@Nullable final Dictionary<URI> dictionary) { this.dictionary = dictionary; this.factory = Data.getValueFactory(); this.datatypeFactory = Data.getDatatypeFactory(); }
private static void write(final Stream<Record> axioms, final OutputStream stream, @Nullable final String formatString) throws IOException { // Determine target RDF format and compression based on format string final Compression compression = detectCompression(formatString, Compression.NONE); final RDFFormat format = detectRDFFormat(formatString, null); if (format == null) { if (formatString == null) { throw new IllegalArgumentException( "Must specify output format (-t) if writing to STDOUT"); } else { throw new IllegalArgumentException("Cannot detect RDF format for " + formatString); } } // Setup compression, if necessary final OutputStream actualStream = compression.write(Data.getExecutor(), stream); // Performs writing RDFUtil.writeRDF(actualStream, format, Data.getNamespaceMap(), null, Record.encode(axioms, ImmutableSet.of(KS.AXIOM))); }
final InputStream stream = retrieveURL(configLocation).openStream(); final RDFFormat format = RDFFormat.forFileName(configLocation); config = RDFUtil.readRDF(stream, format, Data.getNamespaceMap(), null, false).toList(); stream.close(); if (s.equals(LAUNCHER_URI)) { if (p.getLocalName().equals(PROPERTY_THREAD_NAME)) { threadName = Data.convert(o, String.class); } else if (p.getLocalName().equals(PROPERTY_THREAD_COUNT)) { threadCount = Data.convert(o, Integer.class); } else if (p.getLocalName().equals(PROPERTY_LOG_CONFIG)) { logConfig = Data.convert(o, String.class); } else if (p.getLocalName().equals(PROPERTY_COMPONENT)) { componentURI = (URI) o; Data.setExecutor(Util.newScheduler(threadCount, threadName, true));
SessionImpl(@Nullable final String username, @Nullable final String password) { super(Data.newNamespaceMap(Data.newNamespaceMap(), Data.getNamespaceMap()), username, password); }
final boolean validate = !cmd.hasOption('i'); final Criteria criteria = !cmd.hasOption('c') ? Criteria.overwrite() : // Criteria.parse(cmd.getOptionValue('c'), Data.getNamespaceMap()); final URI globalURI = cmd.hasOption('g') ? (URI) Data.parseValue( cmd.getOptionValue('g'), Data.getNamespaceMap()) : CKR.GLOBAL; final String credentials = cmd.getOptionValue('u');
final ValueFactory factory = Data.getValueFactory(); final int size = suppliedRecordsOrIDs.size(); final List<Outcome> outcomes = Lists.newArrayListWithCapacity(size); outcomes.add(newOutcome(Status.ERROR_INVALID_INPUT, null, "Missing ID for record:\n" + suppliedRecord // .toString(Data.getNamespaceMap(), true)));
@SuppressWarnings("unchecked") @Nullable <T extends Value> T rewriteValue(@Nullable final T value) { if (value instanceof URI) { final URI uri = (URI) value; final String string = uri.stringValue(); if (string.startsWith(this.fromPrefix)) { return (T) Data.getValueFactory().createURI( this.toPrefix + string.substring(this.fromPrefix.length())); } } return value; }
String instantiate(final BindingSet bindings) { final Object[] placeholderValues = new String[this.placeholderVariables.length]; for (int i = 0; i < placeholderValues.length; ++i) { final Value value = bindings.getValue(this.placeholderVariables[i]); placeholderValues[i] = Data.toString(value, null); } return String.format(this.text, placeholderValues); }
private synchronized void update(final Map<URI, Map<URI, Record>> tables, final int revision) throws IOException { if (this.revision != revision) { throw new IOException("Commit failed due to concurrent modifications " + this.revision + ", " + revision); } OutputStream stream = null; try { stream = Files.writeWithBackup(this.fileSystem, this.filePath); final List<Record> records = Lists.newArrayList(); for (final URI type : tables.keySet()) { records.addAll(tables.get(type).values()); } final RDFFormat format = RDFFormat.forFileName(this.filePath.getName()); RDFUtil.writeRDF(stream, format, Data.getNamespaceMap(), null, Record.encode(Stream.create(records), ImmutableSet.<URI>of())); ++this.revision; this.tables = tables; MemoryDataStore.LOGGER.info("MemoryDataStore updated, {} records persisted", records.size()); } catch (final Throwable ex) { MemoryDataStore.LOGGER.error("MemoryDataStore update failed", ex); } finally { Util.closeQuietly(stream); } }
private Path getFullPath(final String fileName) { final String typeDirectory = MoreObjects.firstNonNull(Data.extensionToMimeType(fileName), "application/octet-stream").replace('/', '_'); final String bucketDirectory = Data.hash(fileName).substring(0, 2); return new Path(this.rootPath, typeDirectory + "/" + bucketDirectory + "/" + fileName); }
this.cleanupFuture = Data.getExecutor().scheduleWithFixedDelay(new Runnable() {
metadata.setID(Data.getValueFactory().createURI(resourceID + "_file")); fileName = metadata.getUnique(NFO.FILE_NAME, String.class); String fileType = metadata.getUnique(NIE.MIME_TYPE, String.class); fileType = fileType != null ? fileType : Data.extensionToMimeType(fileName); metadata.set(NFO.FILE_NAME, fileName); metadata.set(NIE.MIME_TYPE, fileType);
final List<String> mimeExtensions = Data.mimeTypeToExtensions(suppliedFileType); if (!mimeExtensions.isEmpty()) { fileExt = mimeExtensions.get(0);
final String zipName = Data.hash(this.zipNameCounter++) + ".zip"; final Path zipPath = pathForZipFile(zipName);
private static void write(final Stream<Record> axioms, final File file, @Nullable final String formatString) throws IOException { // Determine target RDF format and compression based on format string Compression compression = detectCompression(file.getName(), null); if (compression == null) { compression = detectCompression(formatString, Compression.NONE); } RDFFormat format = detectRDFFormat(file.getName(), null); if (format == null) { format = detectRDFFormat(formatString, null); } if (format == null) { throw new IllegalArgumentException("Cannot detect RDF format of " + file); } // Setup compression, if necessary final OutputStream actualStream = compression.write(Data.getExecutor(), file); // Performs writing try { RDFUtil.writeRDF(actualStream, format, Data.getNamespaceMap(), null, Record.encode(axioms, ImmutableSet.of(KS.AXIOM))); } finally { Util.closeQuietly(actualStream); } }
@Override protected URI computeNext() { try { final String line = reader.readLine(); return line == null ? endOfData() : Data.getValueFactory().createURI( line); } catch (final Throwable ex) { throw Throwables.propagate(ex); } }
public SerializerAvro(final String fileName) throws IOException { this.dictionary = Dictionary.createHadoopDictionary(URI.class, fileName); this.factory = Data.getValueFactory(); this.datatypeFactory = Data.getDatatypeFactory(); }