parserOptions.setVerifyData(false); final AsynchronousStatementBufferFactory<BigdataStatement,File> statementBufferFactory = new AsynchronousStatementBufferFactory<BigdataStatement,File>( statementBufferFactory.submitAll(resource, new com.bigdata.rdf.load.RDFFilenameFilter(), rejectedExecutionDelay); statementBufferFactory.submitOne(resource); statementBufferFactory.awaitAll(); System.err.println(statementBufferFactory.getCounters().toString()); statementBufferFactory.cancelAll(true/* mayInterruptIfRunning */);
try { assertSumOfLatchs(); notifyStart(); assertSumOfLatchs(); assertSumOfLatchs(); workflowLatch_document.dec(); workflowLatch_parser.dec(); assertSumOfLatchs();
/** * Submit a resource for processing. * * @param resource * The resource (file or URL, but not a directory). * * @throws Exception * if there is a problem creating the parser task. * @throws RejectedExecutionException * if the work queue for the parser service is full. */ public void submitOne(final R resource) throws Exception { lock.lock(); try { // Note: the parser task will obtain the lock when it runs. final Callable<?> task = newParserTask(resource); submitOne(resource, task); } finally { lock.unlock(); } }
public String toString() { return super.toString() + "::" + getCounters(); }
store, new File(resource), parallel); assertEquals("errorCount", 1, factory.getDocumentErrorCount());
try { assertSumOfLatchs(); assertSumOfLatchs(); assertSumOfLatchs(); assertSumOfLatchs(); assertSumOfLatchs(); notifyEnd();
.getAsynchronousIndexWriteConfiguration(); assertLiveness(lexiconRelation.getTerm2IdIndex() .getIndexMetadata().getName(), config); .getAsynchronousIndexWriteConfiguration(); assertLiveness(lexiconRelation.getBlobsIndex() .getIndexMetadata().getName(), config);
/** * Submit a resource for processing. * * @param resource * The resource (file or URL, but not a directory). * * @throws Exception * if there is a problem creating the parser task. * @throws RejectedExecutionException * if the work queue for the parser service is full. */ public void submitOne(final R resource) throws Exception { lock.lock(); try { // Note: the parser task will obtain the lock when it runs. final Callable<?> task = newParserTask(resource); submitOne(resource, task); } finally { lock.unlock(); } }
public String toString() { return super.toString() + "::" + getCounters(); }
store, new File(resource), parallel); assertEquals("errorCount", 1, factory.getDocumentErrorCount());
try { assertSumOfLatchs(); assertSumOfLatchs(); assertSumOfLatchs(); assertSumOfLatchs(); assertSumOfLatchs(); notifyEnd();
.getAsynchronousIndexWriteConfiguration(); assertLiveness(lexiconRelation.getTerm2IdIndex() .getIndexMetadata().getName(), config); .getAsynchronousIndexWriteConfiguration(); assertLiveness(lexiconRelation.getBlobsIndex() .getIndexMetadata().getName(), config);
parserOptions.setVerifyData(false); final AsynchronousStatementBufferFactory<BigdataStatement,File> statementBufferFactory = new AsynchronousStatementBufferFactory<BigdataStatement,File>( statementBufferFactory.submitAll(resource, new com.bigdata.rdf.load.RDFFilenameFilter(), rejectedExecutionDelay); statementBufferFactory.submitOne(resource); statementBufferFactory.awaitAll(); System.err.println(statementBufferFactory.getCounters().toString()); statementBufferFactory.cancelAll(true/* mayInterruptIfRunning */);
final Callable<?> task = newParserTask(resource); submitOne(resource, task);
/** * Verify counters for latches which must sum atomically to the * {@link #workflowLatch_document}. */ private void assertSumOfLatchs() { if(!lock.isHeldByCurrentThread()) throw new IllegalMonitorStateException(); /* * Sum the latches for the distinct workflow states for a document * across all documents. */ final long n1 = workflowLatch_parser.get()// + workflowLatch_bufferTids.get()// + workflowLatch_bufferOther.get()// ; final long n2 = workflowLatch_document.get(); if (n1 != n2) { throw new AssertionError("Sum of Latches=" + n1 + ", but unfinished=" + n2 + " : " + getCounters().toString()); } }
store, new File(resource), parallel); assertEquals("errorCount", 1, factory.getDocumentErrorCount());
try { assertSumOfLatchs(); notifyStart(); assertSumOfLatchs(); assertSumOfLatchs(); workflowLatch_document.dec(); workflowLatch_parser.dec(); assertSumOfLatchs();
final AsynchronousStatementBufferFactory<BigdataStatement, File> statementBufferFactory = new AsynchronousStatementBufferFactory<BigdataStatement, File>( statementBufferFactory.submitAll(resource, new com.bigdata.rdf.load.RDFFilenameFilter(), rejectedExecutionDelay); statementBufferFactory.submitOne(resource); statementBufferFactory.awaitAll(); System.err.println(statementBufferFactory.getCounters() .toString()); .cancelAll(true/* mayInterruptIfRunning */);
final Callable<?> task = newParserTask(resource); submitOne(resource, task);
/** * Verify counters for latches which must sum atomically to the * {@link #workflowLatch_document}. */ private void assertSumOfLatchs() { if(!lock.isHeldByCurrentThread()) throw new IllegalMonitorStateException(); /* * Sum the latches for the distinct workflow states for a document * across all documents. */ final long n1 = workflowLatch_parser.get()// + workflowLatch_bufferTids.get()// + workflowLatch_bufferOther.get()// ; final long n2 = workflowLatch_document.get(); if (n1 != n2) { throw new AssertionError("Sum of Latches=" + n1 + ", but unfinished=" + n2 + " : " + getCounters().toString()); } }