/** * Close the fileName. */ public void close() { CarbonUtil.closeStreams(dataInputStream); }
public void close() { CarbonUtil.closeStreams(in); } }
/** * Below method will be used to close streams */ public void closeStream() { CarbonUtil.closeStreams(stream); if (null != executorService) { executorService.shutdownNow(); } this.backupBuffer = null; this.currentBuffer = null; }
protected void releaseResouce() { for (int indexColId = 0; indexColId < indexColumns.size(); indexColId++) { CarbonUtil.closeStreams( currentDataOutStreams.get(indexColId)); } } }
/** * Below method will be used to close streams */ public void close() { CarbonUtil.closeStreams(stream); if (null != executorService && !executorService.isShutdown()) { executorService.shutdownNow(); } }
/** * Unlock API for unlocking of the acquired lock. * * @return */ @Override public boolean unlock() { boolean status = false; try { if (null != fileLock) { fileLock.release(); status = true; } } catch (IOException e) { status = false; } finally { CarbonUtil.closeStreams(channel); } return status; } }
/** * closeStreams void */ public synchronized void closeStreams() { // removing taskKey Entry while closing the stream // This will make sure the cleanup of the task status even in case of some failure. removeBadRecordKey(taskKey); CarbonUtil.closeStreams(bufferedWriter, outStream, bufferedCSVWriter, outCSVStream); }
/** * Below method will be used to convert the apply metadata to * ValueEncoderMeta object * * @param encoderMeta * @return ValueEncoderMeta object */ public static ValueEncoderMeta deserializeEncoderMetaV2(byte[] encoderMeta) { // TODO : should remove the unnecessary fields. ByteArrayInputStream aos = null; ObjectInputStream objStream = null; ValueEncoderMeta meta = null; try { aos = new ByteArrayInputStream(encoderMeta); objStream = new ObjectInputStream(aos); meta = (ValueEncoderMeta) objStream.readObject(); } catch (ClassNotFoundException e) { LOGGER.error(e); } catch (IOException e) { CarbonUtil.closeStreams(objStream); } return meta; }
/** * Below method will be used to convert the thrift object to byte array. */ public static byte[] getByteArray(TBase t) { ByteArrayOutputStream stream = new ByteArrayOutputStream(); byte[] thriftByteArray = null; TProtocol binaryOut = new TCompactProtocol(new TIOStreamTransport(stream)); try { t.write(binaryOut); stream.flush(); thriftByteArray = stream.toByteArray(); } catch (TException | IOException e) { LOGGER.error("Error while converting to byte array from thrift object: " + e.getMessage()); closeStreams(stream); } finally { closeStreams(stream); } return thriftByteArray; }
public static String readHeader(String csvFilePath, Configuration hadoopConf) throws IOException { DataInputStream fileReader = null; BufferedReader bufferedReader = null; String readLine = null; try { fileReader = FileFactory.getDataInputStream( csvFilePath, FileFactory.getFileType(csvFilePath), -1, hadoopConf); bufferedReader = new BufferedReader(new InputStreamReader(fileReader, Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET))); readLine = bufferedReader.readLine(); } finally { CarbonUtil.closeStreams(fileReader, bufferedReader); } return readLine; }
/** * @param csvFilePath * @return */ public static String readHeader(String csvFilePath) throws IOException { DataInputStream fileReader = null; BufferedReader bufferedReader = null; String readLine = null; try { fileReader = FileFactory.getDataInputStream(csvFilePath, FileFactory.getFileType(csvFilePath)); bufferedReader = new BufferedReader(new InputStreamReader(fileReader, Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET))); readLine = bufferedReader.readLine(); } finally { CarbonUtil.closeStreams(fileReader, bufferedReader); } return readLine; }
/** * Below method will be used to convert the byte array value to thrift object for * data chunk * * @param data thrift byte array * @param creator type of thrift * @return thrift object * @throws IOException any problem while converting the object */ private static TBase read(byte[] data, TBaseCreator creator, int offset, int length) throws IOException { ByteArrayInputStream stream = new ByteArrayInputStream(data, offset, length); TProtocol binaryIn = new TCompactProtocol(new TIOStreamTransport(stream)); TBase t = creator.create(); try { t.read(binaryIn); } catch (TException e) { throw new IOException(e); } finally { CarbonUtil.closeStreams(stream); } return t; }
protected void writeBloomDataMapFile() { try { for (int indexColId = 0; indexColId < indexColumns.size(); indexColId++) { CarbonBloomFilter bloomFilter = indexBloomFilters.get(indexColId); bloomFilter.setBlockletNo(currentBlockletId); // only in higher version of guava-bloom-filter, it provides readFrom/writeTo interface. // In lower version, we use default java serializer to write bloomfilter. bloomFilter.write(this.currentDataOutStreams.get(indexColId)); this.currentDataOutStreams.get(indexColId).flush(); } } catch (Exception e) { for (DataOutputStream dataOutputStream : currentDataOutStreams) { CarbonUtil.closeStreams(dataOutputStream); } throw new RuntimeException(e); } finally { resetBloomFilters(); } }
/** * Writes the segment file in json format * @param segmentFile * @param path * @throws IOException */ public static void writeSegmentFile(SegmentFile segmentFile, String path) throws IOException { AtomicFileOperations fileWrite = AtomicFileOperationFactory.getAtomicFileOperations(path); BufferedWriter brWriter = null; DataOutputStream dataOutputStream = null; Gson gsonObjectToWrite = new Gson(); try { dataOutputStream = fileWrite.openForWrite(FileWriteOperation.OVERWRITE); brWriter = new BufferedWriter(new OutputStreamWriter(dataOutputStream, Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET))); String metadataInstance = gsonObjectToWrite.toJson(segmentFile); brWriter.write(metadataInstance); brWriter.flush(); } catch (IOException ie) { LOGGER.error("Error message: " + ie.getLocalizedMessage()); fileWrite.setFailed(); throw ie; } finally { CarbonUtil.closeStreams(brWriter); fileWrite.close(); } }
/** * load bloom filter of {@code colName} from {@code shardPath} */ public static List<CarbonBloomFilter> loadBloomIndex( String shardPath, String colName) { DataInputStream dataInStream = null; List<CarbonBloomFilter> bloomFilters = new ArrayList<>(); try { String indexFile = getBloomIndexFile(shardPath, colName); dataInStream = FileFactory.getDataInputStream(indexFile, FileFactory.getFileType(indexFile)); while (dataInStream.available() > 0) { CarbonBloomFilter bloomFilter = new CarbonBloomFilter(); bloomFilter.readFields(dataInStream); bloomFilter.setShardName(new Path(shardPath).getName()); bloomFilters.add(bloomFilter); } LOGGER.info(String.format("Read %d bloom indices from %s", bloomFilters.size(), indexFile)); return bloomFilters; } catch (IOException e) { LOGGER.error("Error occurs while reading bloom index", e); throw new RuntimeException("Error occurs while reading bloom index", e); } finally { CarbonUtil.closeStreams(dataInStream); } }
/** * Below method will be used to write data to sort temp file * * @throws CarbonSortKeyAndGroupByException problem while writing */ private void writeDataToFile(Object[][] recordHolderList, int entryCountLocal, File file) throws CarbonSortKeyAndGroupByException { DataOutputStream stream = null; try { // open stream stream = FileFactory.getDataOutputStream(file.getPath(), FileFactory.FileType.LOCAL, parameters.getFileWriteBufferSize(), parameters.getSortTempCompressorName()); // write number of entries to the file stream.writeInt(entryCountLocal); for (int i = 0; i < entryCountLocal; i++) { sortStepRowHandler.writeRawRowAsIntermediateSortTempRowToOutputStream( recordHolderList[i], stream, reUsableByteArrayDataOutputStream.get()); } } catch (IOException e) { throw new CarbonSortKeyAndGroupByException("Problem while writing the file", e); } finally { // close streams CarbonUtil.closeStreams(stream); } }
private void initDataMapFile() throws IOException { if (!FileFactory.isFileExist(dataMapPath)) { if (!FileFactory.mkdirs(dataMapPath, FileFactory.getFileType(dataMapPath))) { throw new IOException("Failed to create directory " + dataMapPath); } } for (int indexColId = 0; indexColId < indexColumns.size(); indexColId++) { String dmFile = BloomIndexFileStore.getBloomIndexFile(dataMapPath, indexColumns.get(indexColId).getColName()); DataOutputStream dataOutStream = null; try { FileFactory.createNewFile(dmFile, FileFactory.getFileType(dmFile)); dataOutStream = FileFactory.getDataOutputStream(dmFile, FileFactory.getFileType(dmFile)); } catch (IOException e) { CarbonUtil.closeStreams(dataOutStream); throw new IOException(e); } this.currentDMFiles.add(dmFile); this.currentDataOutStreams.add(dataOutStream); } }
/** * Finish writing current file. It will flush stream, copy and rename temp file to final file * @param copyInCurrentThread set to false if want to do data copy in a new thread */ protected void commitCurrentFile(boolean copyInCurrentThread) { notifyDataMapBlockEnd(); CarbonUtil.closeStreams(this.fileOutputStream, this.fileChannel); if (!enableDirectlyWriteDataToStorePath) { try { if (copyInCurrentThread) { CarbonUtil.copyCarbonDataFileToCarbonStorePath(carbonDataFileTempPath, model.getCarbonDataDirectoryPath(), fileSizeInBytes); FileFactory .deleteFile(carbonDataFileTempPath, FileFactory.getFileType(carbonDataFileTempPath)); } else { executorServiceSubmitList .add(executorService.submit(new CompleteHdfsBackendThread(carbonDataFileTempPath))); } } catch (IOException e) { LOGGER.error(e); } } }
@Override public void close(TaskAttemptContext context) throws IOException, InterruptedException { try { isClosed = true; // append remain buffer data if (!hasException && !isFirstRow) { appendBlockletToDataFile(); converter.finish(); } } finally { // close resource CarbonUtil.closeStreams(outputStream); if (output != null) { output.close(); } if (badRecordLogger != null) { badRecordLogger.closeStreams(); } } }
/** * write a page to sort temp file * @param rowPage page * @param file file * @throws CarbonSortKeyAndGroupByException */ private void writeDataToFile(UnsafeCarbonRowPage rowPage, File file) throws CarbonSortKeyAndGroupByException { DataOutputStream stream = null; try { // open stream stream = FileFactory.getDataOutputStream(file.getPath(), FileFactory.FileType.LOCAL, parameters.getFileWriteBufferSize(), parameters.getSortTempCompressorName()); int actualSize = rowPage.getBuffer().getActualSize(); // write number of entries to the file stream.writeInt(actualSize); for (int i = 0; i < actualSize; i++) { rowPage.writeRow( rowPage.getBuffer().get(i) + rowPage.getDataBlock().getBaseOffset(), stream); } } catch (IOException | MemoryException e) { throw new CarbonSortKeyAndGroupByException("Problem while writing the file", e); } finally { // close streams CarbonUtil.closeStreams(stream); } }