private void checkBlock(byte[] data, boolean newAlgo) throws CHKEncodeException, InvalidCompressionCodecException, CHKVerifyException, CHKDecodeException, IOException { byte cryptoAlgorithm = newAlgo ? Key.ALGO_AES_CTR_256_SHA256 : Key.ALGO_AES_PCFB_256_SHA256; byte[] copyOfData = new byte[data.length]; System.arraycopy(data, 0, copyOfData, 0, data.length); ClientCHKBlock encodedBlock = ClientCHKBlock.encode(new ArrayBucket(data), false, false, (short)-1, data.length, null, false, null, cryptoAlgorithm); // Not modified in-place. assert(Arrays.equals(data, copyOfData)); ClientCHK key = encodedBlock.getClientKey(); if(newAlgo) { // Check with no JCA. ClientCHKBlock otherEncodedBlock = ClientCHKBlock.encode(new ArrayBucket(data), false, false, (short)-1, data.length, null, false, null, cryptoAlgorithm, true); assertTrue(key.equals(otherEncodedBlock.getClientKey())); assertTrue(Arrays.equals(otherEncodedBlock.getBlock().data, encodedBlock.getBlock().data)); assertTrue(Arrays.equals(otherEncodedBlock.getBlock().headers, encodedBlock.getBlock().headers)); } // Verify it. CHKBlock block = CHKBlock.construct(encodedBlock.getBlock().data, encodedBlock.getBlock().headers, cryptoAlgorithm); ClientCHKBlock checkBlock = new ClientCHKBlock(block, key); ArrayBucket checkData = (ArrayBucket) checkBlock.decode(new ArrayBucketFactory(), data.length, false); assert(Arrays.equals(checkData.toByteArray(), data)); if(newAlgo) { checkData = (ArrayBucket) checkBlock.decode(new ArrayBucketFactory(), data.length, false, true); assert(Arrays.equals(checkData.toByteArray(), data)); } }
public static ClientCHK[] makeKeys(byte[][] blocks, byte[] cryptoKey, byte cryptoAlgorithm) throws CHKEncodeException { ClientCHK[] keys = new ClientCHK[blocks.length]; for(int i=0;i<blocks.length;i++) keys[i] = ClientCHKBlock.encodeSplitfileBlock(blocks[i], cryptoKey, cryptoAlgorithm).getClientKey(); return keys; }
Bucket decode(BucketFactory bf, int maxLength, boolean dontCompress, boolean forceNoJCA) throws CHKDecodeException, IOException { if(key.cryptoAlgorithm == Key.ALGO_AES_PCFB_256_SHA256) return decodeOld(bf, maxLength, dontCompress); else if(key.cryptoAlgorithm == Key.ALGO_AES_CTR_256_SHA256) { if(Rijndael.AesCtrProvider == null || forceNoJCA) return decodeNewNoJCA(bf, maxLength, dontCompress); else return decodeNew(bf, maxLength, dontCompress); } else throw new UnsupportedOperationException(); }
public CHKBlock encodeCheckBlock(int i) throws CHKEncodeException { return ClientCHKBlock.encodeSplitfileBlock(checkBlocks[i], cryptoKey, cryptoAlgorithm).getBlock(); }
private void checkRAMStore(boolean newFormat) throws IOException, CHKEncodeException, CHKVerifyException, CHKDecodeException { CHKStore store = new CHKStore(); new RAMFreenetStore<CHKBlock>(store, 10); // Encode a block String test = "test"; ClientCHKBlock block = encodeBlock(test, newFormat); store.put(block.getBlock(), false); ClientCHK key = block.getClientKey(); CHKBlock verify = store.fetch(key.getNodeCHK(), false, false, null); String data = decodeBlock(verify, key); assertEquals(test, data); }
byte[] data = dataString.getBytes("UTF-8"); ClientCHKBlock b; b = ClientCHKBlock.encode(data, false, false, (short)-1, 0, COMPRESSOR_TYPE.DEFAULT_COMPRESSORDESCRIPTOR, false); CHKBlock block = b.getBlock(); ClientCHK chk = b.getClientKey(); byte[] encData = block.getData(); byte[] encHeaders = block.getHeaders(); ClientCHKBlock newBlock = new ClientCHKBlock(encData, encHeaders, chk, true); keys[i] = chk; Logger.minor(RealNodeRequestInsertTest.class, "Decoded: "+new String(newBlock.memoryDecode(), "UTF-8")); Logger.normal(RealNodeRequestInsertTest.class,"CHK: "+chk.getURI()); Logger.minor(RealNodeRequestInsertTest.class,"Headers: "+HexUtil.bytesToHex(block.getHeaders()));
private String decodeBlock(CHKBlock verify, ClientCHK key) throws CHKVerifyException, CHKDecodeException, IOException { ClientCHKBlock cb = new ClientCHKBlock(verify, key); Bucket output = cb.decode(new ArrayBucketFactory(), 32768, false); byte[] buf = BucketTools.toByteArray(output); return new String(buf, "UTF-8"); }
String test = "test" + i; ClientCHKBlock block = encodeBlockCHK(test); store.put(block.getBlock(), false); assertEquals(saltStore.fetch(block.getKey().getRoutingKey(), block.getKey().getFullKey(), false, false, false, false, null), null); tests.add(test); chkBlocks.add(block); String test = tests.remove(0); //get the first element ClientCHKBlock block = chkBlocks.remove(0); //get the first element ClientCHK key = block.getClientKey(); CHKBlock verify = store.fetch(key.getNodeCHK(), false, false, null); String data = decodeBlockCHK(verify, key); assertEquals(test, data); assertNotNull(saltStore.fetch(block.getKey().getRoutingKey(), block.getKey().getFullKey(), false, false, false, false, null));
/** Generate keys for each block and record them. * @throws IOException */ private void generateKeys(byte[][] dataBlocks, int offset) throws IOException { for(int i=0;i<dataBlocks.length;i++) { setKey(i + offset, encodeBlock(dataBlocks[i]).getClientKey()); } }
/** * Encode a Bucket of data to a CHKBlock. * @param sourceData The bucket of data to encode. Can be arbitrarily large. * @param asMetadata Is this a metadata key? * @param dontCompress If set, don't even try to compress. * @param alreadyCompressedCodec If !dontCompress, and this is >=0, then the * data is already compressed, and this is the algorithm. * @param compressorDescriptor * @param cryptoAlgorithm * @param cryptoKey * @throws CHKEncodeException * @throws IOException If there is an error reading from the Bucket. * @throws InvalidCompressionCodecException */ static public ClientCHKBlock encode(Bucket sourceData, boolean asMetadata, boolean dontCompress, short alreadyCompressedCodec, long sourceLength, String compressorDescriptor, boolean pre1254, byte[] cryptoKey, byte cryptoAlgorithm) throws CHKEncodeException, IOException { return encode(sourceData, asMetadata, dontCompress, alreadyCompressedCodec, sourceLength, compressorDescriptor, pre1254, cryptoKey, cryptoAlgorithm, false); }
public static ClientKeyBlock createKeyBlock(ClientKey key, KeyBlock block) throws KeyVerifyException { if(key instanceof ClientSSK) return ClientSSKBlock.construct((SSKBlock)block, (ClientSSK)key); else //if(key instanceof ClientCHK return new ClientCHKBlock((CHKBlock)block, (ClientCHK)key); }
byte[] decodedData; try { decodedBlock = new ClientCHKBlock(block, decodeKey); decodedData = decodedBlock.memoryDecode(); } catch (CHKVerifyException e) { Logger.error(this, "Verify failed on block for "+decodeKey);
/** * Decode the CHK and recover the original data * @return the original data * @throws IOException If there is a bucket error. */ @Override public Bucket decode(BucketFactory bf, int maxLength, boolean dontCompress) throws CHKDecodeException, IOException { return decode(bf, maxLength, dontCompress, false); }
ClientCHKBlock encodeBlock(byte[] buf) { assert (buf.length == CHKBlock.DATA_LENGTH); ClientCHKBlock block; try { block = ClientCHKBlock.encodeSplitfileBlock(buf, splitfileCryptoKey, splitfileCryptoAlgorithm); } catch (CHKEncodeException e) { throw new Error(e); // Impossible! } return block; }
private void checkDecodedBlock(int i, byte[] data) { ClientCHK key = getKey(i); if(key == null) { Logger.error(this, "Key not found"); failOffThread(new FetchException(FetchExceptionMode.INTERNAL_ERROR, "Key not found")); return; } ClientCHKBlock block = encodeBlock(key, data); String decoded = i >= dataBlockCount ? "Encoded" : "Decoded"; if(block == null || !key.getNodeCHK().equals(block.getKey())) { Logger.error(this, decoded+" cross-segment block "+i+" failed!"); failOffThread(new FetchException(FetchExceptionMode.SPLITFILE_DECODE_ERROR, decoded+" cross-segment block does not match expected key")); return; } else { reportBlockToSegmentOffThread(i, key, block, data); } }
/** Add a random block that has not been added already or decoded already. * @throws IOException */ private boolean addRandomBlock(SplitFileInserterStorage storage, SplitFileFetcherStorage fetcherStorage, Random random) throws IOException { int segCount = storage.segments.length; boolean[] exhaustedSegments = new boolean[segCount]; for(int i=0;i<segCount;i++) { while(true) { int segNo = random.nextInt(segCount); if(exhaustedSegments[segNo]) continue; SplitFileFetcherSegmentStorage segment = fetcherStorage.segments[segNo]; if(segment.isDecodingOrFinished()) { exhaustedSegments[segNo] = true; break; } while(true) { int blockNo = random.nextInt(segment.totalBlocks()); if(segment.hasBlock(blockNo)) { continue; } ClientCHKBlock block = storage.segments[segNo].encodeBlock(blockNo); boolean success = segment.onGotKey(block.getClientKey().getNodeCHK(), block.getBlock()); assertTrue(success); return true; } } } return false; }
private String decodeBlockCHK(CHKBlock verify, ClientCHK key) throws CHKVerifyException, CHKDecodeException, IOException { ClientCHKBlock cb = new ClientCHKBlock(verify, key); Bucket output = cb.decode(new ArrayBucketFactory(), 32768, false); byte[] buf = BucketTools.toByteArray(output); return new String(buf, "UTF-8"); }
ClientCHKBlock block = encodeBlockCHK(test); assertEquals(saltStore.fetch(block.getKey().getRoutingKey(), block.getKey().getFullKey(), false, false, false, false, null), null); store.put(block.getBlock(), false); tests.add(test); chkBlocks.add(block); String test = tests.remove(0); //get the first element ClientCHKBlock block = chkBlocks.remove(0); //get the first element ClientCHK key = block.getClientKey(); CHKBlock verify = store.fetch(key.getNodeCHK(), false, false, null); String data = decodeBlockCHK(verify, key);
private void writeCheckBlock(int checkBlockNo, byte[] buf) throws IOException { parent.writeCheckBlock(segNo, checkBlockNo, buf); if(DEBUG_ENCODE) { SplitFileInserterSegmentStorage segment = segments[checkBlockNo + dataBlockCount]; ClientCHK key = segment.encodeBlock(buf).getClientKey(); segment.setKey(blockNumbers[checkBlockNo + dataBlockCount], key); } }
/** * Encode a block of data to a CHKBlock. * @param sourceData The data to encode. * @param asMetadata Is this a metadata key? * @param dontCompress If set, don't even try to compress. * @param alreadyCompressedCodec If !dontCompress, and this is >=0, then the * data is already compressed, and this is the algorithm. * @param compressorDescriptor Should be null, or list of compressors to try. * @throws InvalidCompressionCodecException */ static public ClientCHKBlock encode(byte[] sourceData, boolean asMetadata, boolean dontCompress, short alreadyCompressedCodec, int sourceLength, String compressorDescriptor, boolean pre1254) throws CHKEncodeException, InvalidCompressionCodecException { try { return encode(new ArrayBucket(sourceData), asMetadata, dontCompress, alreadyCompressedCodec, sourceLength, compressorDescriptor, pre1254, null, Key.ALGO_AES_CTR_256_SHA256); } catch (IOException e) { // Can't happen throw new Error(e); } }