@Override public ITupleReference add(ITupleReference tuple) throws HyracksDataException { bulkLoader.add(tuple); return tuple; }
@Override public ITupleReference add(ITupleReference tuple) throws HyracksDataException { bulkLoader.add(tuple); return tuple; }
@Override public ITupleReference delete(ITupleReference tuple) throws HyracksDataException { // this ensure deleted keys are also added to the bulkloader bulkLoader.add(tuple); return tuple; }
@Override public ITupleReference add(ITupleReference tuple) throws HyracksDataException { bulkLoader.add(tuple); return tuple; }
@Override public ITupleReference delete(ITupleReference tuple) throws HyracksDataException { try { buddyBTreeBulkLoader.add(tuple); } catch (HyracksDataException e) { //deleting a key multiple times is OK if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) { cleanupArtifacts(); throw e; } } return tuple; }
@Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { accessor.reset(buffer); int tupleCount = accessor.getTupleCount(); for (int i = 0; i < tupleCount; i++) { tuple.reset(accessor, i); bulkLoader.add(tuple); } }
@Override public ITupleReference delete(ITupleReference tuple) throws HyracksDataException { ILSMTreeTupleWriter tupleWriter = (ILSMTreeTupleWriter) ((AbstractTreeIndexBulkLoader) bulkLoader).getLeafFrame().getTupleWriter(); tupleWriter.setAntimatter(true); try { bulkLoader.add(tuple); } finally { tupleWriter.setAntimatter(false); } return tuple; }
@Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { accessor.reset(buffer); int tupleCount = accessor.getTupleCount(); for (int i = 0; i < tupleCount; i++) { tuple.reset(accessor, i); bulkLoader.add(tuple); } FrameUtils.flushFrame(buffer, writer); }
private void bulkloadIntTupleRange(int begin, int end) throws Exception { if (end < begin) { throw new IllegalArgumentException("Invalid range: [" + begin + ", " + end + "]"); } IIndexBulkLoader bulkloader = index.createBulkLoader(1.0f, false, end - begin, true); for (int i = begin; i <= end; i++) { TupleUtils.createIntegerTuple(builder, tuple, i); bulkloader.add(tuple); } bulkloader.end(); }
protected void insertBTreeTuple() throws HyracksDataException { // Build tuple. DataOutput output = btreeTupleBuilder.getDataOutput(); // Add inverted-list 'pointer' value fields. try { output.writeInt(currentInvListStartPageId); btreeTupleBuilder.addFieldEndOffset(); output.writeInt(currentPageId); btreeTupleBuilder.addFieldEndOffset(); output.writeInt(currentInvListStartOffset); btreeTupleBuilder.addFieldEndOffset(); output.writeInt(invListBuilder.getListSize()); btreeTupleBuilder.addFieldEndOffset(); } catch (IOException e) { throw HyracksDataException.create(e); } // Reset tuple reference and add it into the BTree load. btreeTupleReference.reset(btreeTupleBuilder.getFieldEndOffsets(), btreeTupleBuilder.getByteArray()); btreeBulkloader.add(btreeTupleReference); btreeTupleBuilder.reset(); }
public static void bulkLoadCheckTuples(IIndexTestContext ctx, Collection<CheckTuple> checkTuples, boolean filtered) throws HyracksDataException { int fieldCount = ctx.getFieldCount(); int numTuples = checkTuples.size(); ArrayTupleBuilder tupleBuilder = filtered ? new ArrayTupleBuilder(fieldCount + 1) : new ArrayTupleBuilder(fieldCount); ArrayTupleReference tuple = new ArrayTupleReference(); // Perform bulk load. IIndexBulkLoader bulkLoader = ctx.getIndex().createBulkLoader(0.7f, false, numTuples, false); int c = 1; for (CheckTuple checkTuple : checkTuples) { if (LOGGER.isInfoEnabled()) { //if (c % (numTuples / 10) == 0) { LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples); //} } createTupleFromCheckTuple(checkTuple, tupleBuilder, tuple, ctx.getFieldSerdes(), filtered); bulkLoader.add(tuple); c++; } bulkLoader.end(); }
@Override public long runExperiment(DataGenThread dataGen, int numThreads) throws Exception { btree.create(); long start = System.currentTimeMillis(); IIndexBulkLoader bulkLoader = btree.createBulkLoader(1.0f, false, 0L, true); for (int i = 0; i < numBatches; i++) { TupleBatch batch = dataGen.tupleBatchQueue.take(); for (int j = 0; j < batch.size(); j++) { bulkLoader.add(batch.get(j)); } } bulkLoader.end(); long end = System.currentTimeMillis(); long time = end - start; return time; } }
@Override public void initialize() throws HyracksDataException { IIndexBuilder indexBuilder = indexBuilderFactory.create(ctx, partition); IIndexDataflowHelper indexHelper = dataflowHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); FileIndexTupleTranslator filesTupleTranslator = new FileIndexTupleTranslator(); // Build the index indexBuilder.build(); // Open the index indexHelper.open(); try { ILSMIndex index = (ILSMIndex) indexHelper.getIndexInstance(); Map<String, Object> parameters = new HashMap<>(); parameters.put(LSMIOOperationCallback.KEY_FLUSHED_COMPONENT_ID, LSMComponentId.DEFAULT_COMPONENT_ID); // Create bulk loader IIndexBulkLoader bulkLoader = index.createBulkLoader(BTree.DEFAULT_FILL_FACTOR, false, files.size(), false, parameters); // Load files for (ExternalFile file : files) { bulkLoader.add(filesTupleTranslator.getTupleFromFile(file)); } bulkLoader.end(); } finally { indexHelper.close(); } }
bulkLoader.add(tuple); } catch (HyracksDataException e) { if (e.getErrorCode() == ErrorCode.UNSORTED_LOAD_INPUT || e.getErrorCode() == ErrorCode.DUPLICATE_KEY
bulkLoader.add(tuple);
for (int i = 0; i < ins; i++) { TupleUtils.createIntegerTuple(tb, tuple, i, i, 5); bulkLoader.add(tuple);