/** * Recreate a {@link HollowWriteStateEngine} which can be used to write a snapshot of or continue * a delta chain from the supplied {@link HollowReadStateEngine}. * <p> * The returned state engine will be ready to write a snapshot which will exactly recreate the data in the supplied {@link HollowReadStateEngine}. * A delta chain may be continued from this state by calling {@link HollowWriteStateEngine#prepareForNextCycle()}. * * @param readEngine the read state engine * @return the write state engine */ public static HollowWriteStateEngine recreateAndPopulateUsingReadEngine(final HollowReadStateEngine readEngine) { final HollowWriteStateEngine writeEngine = new HollowWriteStateEngine(); populateStateEngineWithTypeWriteStates(writeEngine, readEngine.getSchemas()); populateUsingReadEngine(writeEngine, readEngine); return writeEngine; }
/** * @param director a {@link HollowCombinerCopyDirector} which will specify which specific records to copy from the input(s). * @param inputs the set of {@link HollowReadStateEngine} to combine data from. */ public HollowCombiner(HollowCombinerCopyDirector director, HollowReadStateEngine... inputs) { this(director, HollowWriteStateCreator.createWithSchemas(inputs[0].getSchemas()), inputs); }
HollowProducer.ReadState hardRestore(long versionDesired, HollowConsumer.BlobRetriever blobRetriever) { return restore(versionDesired, blobRetriever, (restoreFrom, restoreTo) -> HollowWriteStateCreator.populateUsingReadEngine(restoreTo, restoreFrom)); }
/** * @param schemas The schemas from the data model * @return a write state engine which is pre-populated with the specified data model. */ public static HollowWriteStateEngine createWithSchemas(Collection<HollowSchema> schemas) { HollowWriteStateEngine stateEngine = new HollowWriteStateEngine(); populateStateEngineWithTypeWriteStates(stateEngine, schemas); return stateEngine; }
break; case addSchemaFileToDataModel: HollowWriteStateCreator.readSchemaFileIntoWriteState(arg.getValue(), engine); break; case pathToGeneratedFiles:
@Test public void recreatesUsingReadEngine() throws IOException { HollowWriteStateEngine writeEngine = new HollowWriteStateEngine(); HollowObjectMapper mapper = new HollowObjectMapper(writeEngine); mapper.add(new Integer(1)); writeEngine.addHeaderTag("CopyTag", "copied"); HollowReadStateEngine readEngine = StateEngineRoundTripper.roundTripSnapshot(writeEngine); HollowWriteStateEngine recreatedWriteEngine = HollowWriteStateCreator.recreateAndPopulateUsingReadEngine(readEngine); HollowReadStateEngine recreatedReadEngine = StateEngineRoundTripper.roundTripSnapshot(recreatedWriteEngine); Assert.assertEquals(HollowChecksum.forStateEngine(readEngine), HollowChecksum.forStateEngine(recreatedReadEngine)); Assert.assertEquals("copied", recreatedReadEngine.getHeaderTag("CopyTag")); Assert.assertEquals(readEngine.getCurrentRandomizedTag(), recreatedReadEngine.getCurrentRandomizedTag()); }
@Test public void throwsExceptionIfWriteStateIsPopulated() throws IOException { HollowWriteStateEngine writeEngine = new HollowWriteStateEngine(); HollowObjectMapper mapper = new HollowObjectMapper(writeEngine); mapper.add(new Integer(1)); HollowReadStateEngine readEngine = StateEngineRoundTripper.roundTripSnapshot(writeEngine); try { HollowWriteStateCreator.populateUsingReadEngine(writeEngine, readEngine); Assert.fail(); } catch(IllegalStateException expected) { } }
/** * Reads a schema file into the provided HollowWriteStateEngine. The schema file must be on the classpath. * * @param schemaFilePath the path to the schema * @param engine the write state engine * @throws IOException if the schema could not be read */ public static void readSchemaFileIntoWriteState(String schemaFilePath, HollowWriteStateEngine engine) throws IOException { InputStream input = null; try { input = HollowWriteStateCreator.class.getClassLoader().getResourceAsStream(schemaFilePath); Collection<HollowSchema> schemas = HollowSchemaParser.parseCollectionOfSchemas(new BufferedReader(new InputStreamReader(input))); populateStateEngineWithTypeWriteStates(engine, schemas); } finally { if (input != null) { input.close(); } } }
@Test public void testReadSchemaFileIntoWriteState() throws Exception { HollowWriteStateEngine engine = new HollowWriteStateEngine(); Assert.assertEquals("Should have no type states", 0, engine.getOrderedTypeStates().size()); HollowWriteStateCreator.readSchemaFileIntoWriteState("schema1.txt", engine); Assert.assertEquals("Should now have types", 2, engine.getOrderedTypeStates().size()); }
@Test public void repopulationFailsIfShardsAreIncorrectlyPreconfigured() throws IOException { HollowWriteStateEngine writeEngine = new HollowWriteStateEngine(); HollowObjectMapper mapper = new HollowObjectMapper(writeEngine); mapper.add(new Integer(1)); mapper.add(new Integer(2)); HollowReadStateEngine readEngine = StateEngineRoundTripper.roundTripSnapshot(writeEngine); HollowWriteStateEngine repopulatedWriteStateEngine = new HollowWriteStateEngine(); new HollowObjectMapper(repopulatedWriteStateEngine).initializeTypeState(IntegerWithWrongShardConfiguration.class); try { HollowWriteStateCreator.populateUsingReadEngine(repopulatedWriteStateEngine, readEngine); Assert.fail(); } catch(Exception expected) { } }
/** * Initializes the producer data model for the given schemas. * <p> * Data model initialization is required prior to {@link #restore(long, HollowConsumer.BlobRetriever) restoring} * the producer. * This ensures that restoration can correctly compare the producer's current data model * with the data model of the restored data state and manage any differences in those models * (such as not restoring state for any types in the restoring data model not present in the * producer's current data model). * <p> * After initialization a data model initialization event will be emitted * to all registered data model initialization * {@link com.netflix.hollow.api.producer.listener.DataModelInitializationListener listeners}. * * @param schemas the data model classes * @throws IllegalArgumentException if {@code schemas} is empty * @see #restore(long, HollowConsumer.BlobRetriever) */ public void initializeDataModel(HollowSchema... schemas) { Objects.requireNonNull(schemas); if (schemas.length == 0) { throw new IllegalArgumentException("classes is empty"); } long start = currentTimeMillis(); HollowWriteStateCreator.populateStateEngineWithTypeWriteStates(getWriteEngine(), Arrays.asList(schemas)); listeners.listeners().fireProducerInit(currentTimeMillis() - start); isInitialized = true; }
break; case addSchemaFileToDataModel: HollowWriteStateCreator.readSchemaFileIntoWriteState(arg.getValue(), engine); break; case apiClassName:
/** * @param inputs the set of {@link HollowReadStateEngine} to combine data from. */ public HollowCombiner(HollowReadStateEngine... inputs) { this(HollowWriteStateCreator.createWithSchemas(inputs[0].getSchemas()), inputs); }
new HollowObjectMapper(repopulatedWriteStateEngine).initializeTypeState(IntegerWithMoreThanOneField.class); HollowWriteStateCreator.populateUsingReadEngine(repopulatedWriteStateEngine, readEngine);
@Test public void determinesIfSchemasAreTransitivelyDependent() throws IOException { String schemasText = "TypeA { TypeB b; }" + "TypeB { TypeC c; }" + "TypeC { TypeD d; }"; List<HollowSchema> schemas = HollowSchemaParser.parseCollectionOfSchemas(schemasText); HollowWriteStateEngine stateEngine = new HollowWriteStateEngine(); HollowWriteStateCreator.populateStateEngineWithTypeWriteStates(stateEngine, schemas); Assert.assertTrue(HollowSchemaSorter.typeIsTransitivelyDependent(stateEngine, "TypeA", "TypeB")); Assert.assertTrue(HollowSchemaSorter.typeIsTransitivelyDependent(stateEngine, "TypeA", "TypeC")); Assert.assertTrue(HollowSchemaSorter.typeIsTransitivelyDependent(stateEngine, "TypeB", "TypeC")); Assert.assertFalse(HollowSchemaSorter.typeIsTransitivelyDependent(stateEngine, "TypeC", "TypeB")); Assert.assertFalse(HollowSchemaSorter.typeIsTransitivelyDependent(stateEngine, "TypeB", "TypeA")); Assert.assertFalse(HollowSchemaSorter.typeIsTransitivelyDependent(stateEngine, "TypeC", "TypeA")); }
private static HollowObjectMapper createNewHollowObjectMapperFromExisting(HollowObjectMapper objectMapper) { Collection<HollowSchema> schemas = objectMapper.getStateEngine().getSchemas(); HollowWriteStateEngine writeEngine = HollowWriteStateCreator.createWithSchemas(schemas); return new HollowObjectMapper(writeEngine); }
new HollowObjectMapper(repopulatedWriteStateEngine).initializeTypeState(Integer.class); HollowWriteStateCreator.populateUsingReadEngine(repopulatedWriteStateEngine, readEngine);
public HollowSplitter(HollowSplitterCopyDirector director, HollowReadStateEngine inputStateEngine) { this.inputStateEngine = inputStateEngine; this.outputStateEngines = new HollowWriteStateEngine[director.getNumShards()]; this.director = director; List<HollowSchema> schemas = inputStateEngine.getSchemas(); for(int i=0;i<director.getNumShards();i++) outputStateEngines[i] = HollowWriteStateCreator.createWithSchemas(schemas); }
/** * Create a delta patcher which will patch between the states contained in the two state engines. * * @param from The earlier state * @param to The later state. */ public HollowStateDeltaPatcher(HollowReadStateEngine from, HollowReadStateEngine to) { this.from = from; this.to = to; this.schemas = HollowSchemaSorter.dependencyOrderedSchemaList(getCommonSchemas(from, to)); this.writeEngine = HollowWriteStateCreator.createWithSchemas(schemas); this.changedOrdinalsBetweenStates = discoverChangedOrdinalsBetweenStates(); }
public HollowHistoricalStateDataAccess copyButRemapOrdinals(HollowHistoricalStateDataAccess previous, OrdinalRemapper ordinalRemapper) { HollowWriteStateEngine writeEngine = HollowWriteStateCreator.createWithSchemas(schemasWithoutKeys(previous.getSchemas())); IntMapOrdinalRemapper typeRemovedOrdinalRemapping = new IntMapOrdinalRemapper(); for(String typeName : previous.getAllTypes()) { HollowHistoricalTypeDataAccess typeDataAccess = (HollowHistoricalTypeDataAccess) previous.getTypeDataAccess(typeName); copyRemappedRecords(typeDataAccess.getRemovedRecords(), ordinalRemapper, writeEngine); IntMap ordinalLookupMap = remapPreviousOrdinalMapping(typeDataAccess.getOrdinalRemap(), typeName, ordinalRemapper); typeRemovedOrdinalRemapping.addOrdinalRemapping(typeName, ordinalLookupMap); } return new HollowHistoricalStateDataAccess(totalHistory, previous.getVersion(), roundTripStateEngine(writeEngine), typeRemovedOrdinalRemapping, previous.getSchemaChanges()); }