/** * Closes all given <tt>Closeable</tt>s. * @see #close(Closeable...) */ public static void close(Iterable<? extends Closeable> objects) throws IOException { Throwable th = null; for (Closeable object : objects) { try { if (object != null) { object.close(); } } catch (Throwable t) { addSuppressed(th, t); if (th == null) { th = t; } } } reThrow(th); }
/** * Decrement the reference count of the given {@link DocValuesProducer} * generations. */ synchronized void decRef(List<Long> dvProducersGens) throws IOException { Throwable t = null; for (Long gen : dvProducersGens) { RefCount<DocValuesProducer> dvp = genDVProducers.get(gen); assert dvp != null : "gen=" + gen; try { dvp.decRef(); } catch (Throwable th) { if (t != null) { t = th; } } } if (t != null) { IOUtils.reThrow(t); } } }
/** * Closes all given <tt>Closeable</tt>s. * @see #close(Closeable...) */ public static void close(Iterable<? extends Closeable> objects) throws IOException { Throwable th = null; for (Closeable object : objects) { try { if (object != null) { object.close(); } } catch (Throwable t) { addSuppressed(th, t); if (th == null) { th = t; } } } reThrow(th); }
/** * Decrement the reference count of the given {@link DocValuesProducer} * generations. */ synchronized void decRef(List<Long> dvProducersGens) throws IOException { Throwable t = null; for (Long gen : dvProducersGens) { RefCount<DocValuesProducer> dvp = genDVProducers.get(gen); assert dvp != null : "gen=" + gen; try { dvp.decRef(); } catch (Throwable th) { if (t != null) { t = th; } } } if (t != null) { IOUtils.reThrow(t); } } }
/** * Deletes all given <tt>Path</tt>s, if they exist. Some of the * <tt>File</tt>s may be null; they are * ignored. After everything is deleted, the method either * throws the first exception it hit while deleting, or * completes normally if there were no exceptions. * * @param files files to delete */ public static void deleteFilesIfExist(Collection<? extends Path> files) throws IOException { Throwable th = null; for (Path file : files) { try { if (file != null) { Files.deleteIfExists(file); } } catch (Throwable t) { addSuppressed(th, t); if (th == null) { th = t; } } } reThrow(th); }
/** * Deletes all given <tt>Path</tt>s, if they exist. Some of the * <tt>File</tt>s may be null; they are * ignored. After everything is deleted, the method either * throws the first exception it hit while deleting, or * completes normally if there were no exceptions. * * @param files files to delete */ public static void deleteFilesIfExist(Collection<? extends Path> files) throws IOException { Throwable th = null; for (Path file : files) { try { if (file != null) { Files.deleteIfExists(file); } } catch (Throwable t) { addSuppressed(th, t); if (th == null) { th = t; } } } reThrow(th); }
void tragicEvent(Throwable tragedy, String location) throws IOException { // unbox our internal AbortingException if (tragedy instanceof AbortingException) { tragedy = tragedy.getCause(); } // This is not supposed to be tragic: IW is supposed to catch this and // ignore, because it means we asked the merge to abort: assert tragedy instanceof MergePolicy.MergeAbortedException == false; // We cannot hold IW's lock here else it can lead to deadlock: assert Thread.holdsLock(this) == false; // How can it be a tragedy when nothing happened? assert tragedy != null; if (infoStream.isEnabled("IW")) { infoStream.message("IW", "hit tragic " + tragedy.getClass().getSimpleName() + " inside " + location); } synchronized (this) { // It's possible you could have a really bad day if (this.tragedy != null) { // Another thread is already dealing / has dealt with the tragedy: IOUtils.reThrow(tragedy); } this.tragedy = tragedy; } // if we are already closed (e.g. called by rollback), this will be a no-op. if (shouldClose(false)) { rollbackInternal(); } IOUtils.reThrow(tragedy); }
void tragicEvent(Throwable tragedy, String location) throws IOException { // unbox our internal AbortingException if (tragedy instanceof AbortingException) { tragedy = tragedy.getCause(); } // This is not supposed to be tragic: IW is supposed to catch this and // ignore, because it means we asked the merge to abort: assert tragedy instanceof MergePolicy.MergeAbortedException == false; // We cannot hold IW's lock here else it can lead to deadlock: assert Thread.holdsLock(this) == false; // How can it be a tragedy when nothing happened? assert tragedy != null; if (infoStream.isEnabled("IW")) { infoStream.message("IW", "hit tragic " + tragedy.getClass().getSimpleName() + " inside " + location); } synchronized (this) { // It's possible you could have a really bad day if (this.tragedy != null) { // Another thread is already dealing / has dealt with the tragedy: IOUtils.reThrow(tragedy); } this.tragedy = tragedy; } // if we are already closed (e.g. called by rollback), this will be a no-op. if (shouldClose(false)) { rollbackInternal(); } IOUtils.reThrow(tragedy); }
} catch (Throwable e) { if (failFast) { IOUtils.reThrow(e);
} catch (Throwable e) { if (failFast) { IOUtils.reThrow(e);
@Override protected void doClose() throws IOException { Throwable firstExc = null; for (final LeafReader r : getSequentialSubReaders()) { // try to close each reader, even if an exception is thrown try { r.decRef(); } catch (Throwable t) { if (firstExc == null) { firstExc = t; } } } if (writer != null) { try { writer.decRefDeleter(segmentInfos); } catch (AlreadyClosedException ex) { // This is OK, it just means our original writer was // closed before we were, and this may leave some // un-referenced files in the index, which is // harmless. The next time IW is opened on the // index, it will delete them. } } // throw the first exception IOUtils.reThrow(firstExc); }
@Override protected void doClose() throws IOException { Throwable firstExc = null; for (final LeafReader r : getSequentialSubReaders()) { // try to close each reader, even if an exception is thrown try { r.decRef(); } catch (Throwable t) { if (firstExc == null) { firstExc = t; } } } if (writer != null) { try { writer.decRefDeleter(segmentInfos); } catch (AlreadyClosedException ex) { // This is OK, it just means our original writer was // closed before we were, and this may leave some // un-referenced files in the index, which is // harmless. The next time IW is opened on the // index, it will delete them. } } // throw the first exception IOUtils.reThrow(firstExc); }
} catch (Throwable e) { if (failFast) { IOUtils.reThrow(e);
} catch (Throwable e) { if (failFast) { IOUtils.reThrow(e);
/** * Test the term index. * @lucene.experimental */ public static Status.TermIndexStatus testPostings(CodecReader reader, PrintStream infoStream, boolean verbose, boolean failFast) throws IOException { // TODO: we should go and verify term vectors match, if // crossCheckTermVectors is on... Status.TermIndexStatus status; final int maxDoc = reader.maxDoc(); try { if (infoStream != null) { infoStream.print(" test: terms, freq, prox..."); } final Fields fields = reader.getPostingsReader().getMergeInstance(); final FieldInfos fieldInfos = reader.getFieldInfos(); status = checkFields(fields, reader.getLiveDocs(), maxDoc, fieldInfos, true, false, infoStream, verbose); } catch (Throwable e) { if (failFast) { IOUtils.reThrow(e); } msg(infoStream, "ERROR: " + e); status = new Status.TermIndexStatus(); status.error = e; if (infoStream != null) { e.printStackTrace(infoStream); } } return status; }
final private void handleMergeException(Throwable t, MergePolicy.OneMerge merge) throws IOException { if (infoStream.isEnabled("IW")) { infoStream.message("IW", "handleMergeException: merge=" + segString(merge.segments) + " exc=" + t); } // Set the exception on the merge, so if // forceMerge is waiting on us it sees the root // cause exception: merge.setException(t); addMergeException(merge); if (t instanceof MergePolicy.MergeAbortedException) { // We can ignore this exception (it happens when // deleteAll or rollback is called), unless the // merge involves segments from external directories, // in which case we must throw it so, for example, the // rollbackTransaction code in addIndexes* is // executed. if (merge.isExternal) { throw (MergePolicy.MergeAbortedException) t; } } else { IOUtils.reThrow(t); } }
/** * Test the term index. * @lucene.experimental */ public static Status.TermIndexStatus testPostings(CodecReader reader, PrintStream infoStream, boolean verbose, boolean failFast) throws IOException { // TODO: we should go and verify term vectors match, if // crossCheckTermVectors is on... Status.TermIndexStatus status; final int maxDoc = reader.maxDoc(); try { if (infoStream != null) { infoStream.print(" test: terms, freq, prox..."); } final Fields fields = reader.getPostingsReader().getMergeInstance(); final FieldInfos fieldInfos = reader.getFieldInfos(); status = checkFields(fields, reader.getLiveDocs(), maxDoc, fieldInfos, true, false, infoStream, verbose); } catch (Throwable e) { if (failFast) { IOUtils.reThrow(e); } msg(infoStream, "ERROR: " + e); status = new Status.TermIndexStatus(); status.error = e; if (infoStream != null) { e.printStackTrace(infoStream); } } return status; }
final private void handleMergeException(Throwable t, MergePolicy.OneMerge merge) throws IOException { if (infoStream.isEnabled("IW")) { infoStream.message("IW", "handleMergeException: merge=" + segString(merge.segments) + " exc=" + t); } // Set the exception on the merge, so if // forceMerge is waiting on us it sees the root // cause exception: merge.setException(t); addMergeException(merge); if (t instanceof MergePolicy.MergeAbortedException) { // We can ignore this exception (it happens when // deleteAll or rollback is called), unless the // merge involves segments from external directories, // in which case we must throw it so, for example, the // rollbackTransaction code in addIndexes* is // executed. if (merge.isExternal) { throw (MergePolicy.MergeAbortedException) t; } } else { IOUtils.reThrow(t); } }
IOUtils.reThrow(firstExc);