public void copyFrom(BytesRef from) { addEntry(from.length()); from.copyTo(data); }
public String toString() { BytesArray ba = new BytesArray(length()); copyTo(ba); return ba.toString(); } }
if (payload.length() > ba.available()) { if (autoFlush) { flush();
@Test public void testWriteBulkEntryWithHandlersThatCorrectsData() { BytesRef response = new BytesRef(); response.add("abcdefg".getBytes()); BulkCommand command = Mockito.mock(BulkCommand.class); Mockito.when(command.write(1)).thenThrow(new EsHadoopIllegalStateException("Things broke")); Mockito.when(command.write(10)).thenReturn(response); Settings settings = new TestSettings(); settings.setProperty(SerializationHandlerLoader.ES_WRITE_DATA_ERROR_HANDLERS, "fix"); settings.setProperty(SerializationHandlerLoader.ES_WRITE_DATA_ERROR_HANDLER+".fix", CorrectingHandler.class.getName()); BulkEntryWriter bulkEntryWriter = new BulkEntryWriter(settings, command); BytesRef value = bulkEntryWriter.writeBulkEntry(1); Assert.assertNotNull("Skipped values should be null", value); Assert.assertEquals(7, response.length()); Assert.assertArrayEquals("abcdefg".getBytes(), response.toString().getBytes()); }
data.copyFrom(newEntry); if (ba.available() < newEntry.length()) { trackingArrayExpanded = true;
public void copyFrom(BytesRef from) { addEntry(from.length()); from.copyTo(data); }
public void copyFrom(BytesRef from) { addEntry(from.length()); from.copyTo(data); }
public void copyFrom(BytesRef from) { addEntry(from.length()); from.copyTo(data); }
public void copyFrom(BytesRef from) { addEntry(from.length()); from.copyTo(data); }
public String toString() { BytesArray ba = new BytesArray(length()); copyTo(ba); return ba.toString(); } }
public String toString() { BytesArray ba = new BytesArray(length()); copyTo(ba); return ba.toString(); } }
public String toString() { BytesArray ba = new BytesArray(length()); copyTo(ba); return ba.toString(); } }
public String toString() { BytesArray ba = new BytesArray(length()); copyTo(ba); return ba.toString(); } }
if (payload.length() > ba.available()) { if (autoFlush) { flush();
if (payload.length() > ba.available()) { if (autoFlush) { flush();
if (payload.length() > ba.available()) { if (autoFlush) { flush();
private void doWriteToIndex(BytesRef payload) { // check space first // ba is the backing array for data if (payload.length() > ba.available()) { if (autoFlush) { flush(); } else { throw new EsHadoopIllegalStateException( String.format("Auto-flush disabled and bulk buffer full; disable manual flush or increase capacity [current size %s]; bailing out", ba.capacity())); } } data.copyFrom(payload); payload.reset(); dataEntries++; if (bufferEntriesThreshold > 0 && dataEntries >= bufferEntriesThreshold) { if (autoFlush) { flush(); } else { // handle the corner case of manual flush that occurs only after the buffer is completely full (think size of 1) if (dataEntries > bufferEntriesThreshold) { throw new EsHadoopIllegalStateException( String.format( "Auto-flush disabled and maximum number of entries surpassed; disable manual flush or increase capacity [current size %s]; bailing out", bufferEntriesThreshold)); } } } }
data.copyFrom(newEntry); if (ba.available() < newEntry.length()) { trackingArrayExpanded = true;
data.copyFrom(newEntry); if (ba.available() < newEntry.length()) { trackingArrayExpanded = true;
data.copyFrom(newEntry); if (ba.available() < newEntry.length()) { trackingArrayExpanded = true;