@SafeVarargs static NodeRecordCheck forSparseNodes( RecordField<NodeRecord,ConsistencyReport.NodeConsistencyReport>... additional ) { RecordField<NodeRecord,ConsistencyReport.NodeConsistencyReport>[] basic = ArrayUtil.<RecordField<NodeRecord,ConsistencyReport.NodeConsistencyReport>>array( LabelsField.LABELS ); return new NodeRecordCheck( union( basic, additional ) ); }
@SafeVarargs static NodeRecordCheck forDenseNodes( RecordField<NodeRecord,ConsistencyReport.NodeConsistencyReport>... additional ) { RecordField<NodeRecord,ConsistencyReport.NodeConsistencyReport>[] basic = ArrayUtil.<RecordField<NodeRecord,ConsistencyReport.NodeConsistencyReport>>array( RelationshipGroupField.NEXT_GROUP, LabelsField.LABELS ); return new NodeRecordCheck( union( basic, additional ) ); }
@SafeVarargs public static RelationshipRecordCheck relationshipRecordCheckBackwardPass( RecordField<RelationshipRecord,ConsistencyReport.RelationshipConsistencyReport>... additional ) { return new RelationshipRecordCheck( union( ArrayUtil.<RecordField<RelationshipRecord,ConsistencyReport.RelationshipConsistencyReport>>array( NodeField.SOURCE, NodeField.TARGET ), additional ) ); }
@Test public void shouldPickUpOverriddenBooleanConfigurationParameters() { // GIVEN String[] args = array( "-c", "myoptionenabled" ); // WHEN ServerCommandLineArgs parsed = ServerCommandLineArgs.parse( args ); // THEN assertEquals( stringMap( "myoptionenabled", Boolean.TRUE.toString() ), parsed.configOverrides() ); }
@Test public void shouldPickUpMultipleOverriddenConfigurationParameters() { // GIVEN String[] args = array( "-c", "my_first_option=first", "-c", "myoptionenabled", "-c", "my_second_option=second" ); // WHEN ServerCommandLineArgs parsed = ServerCommandLineArgs.parse( args ); // THEN assertEquals( stringMap( "my_first_option", "first" , "myoptionenabled", Boolean.TRUE.toString(), "my_second_option", "second" ), parsed.configOverrides() ); }
@Test public void shouldPickUpOverriddenConfigurationParameters() { // GIVEN String[] args = array( "-c", "myoption=myvalue" ); // WHEN ServerCommandLineArgs parsed = ServerCommandLineArgs.parse( args ); // THEN assertEquals( stringMap( "myoption", "myvalue" ), parsed.configOverrides() ); }
@Test public void shouldReportPopulatingIfAnyIsPopulating() { // given for ( InternalIndexState state : array( InternalIndexState.ONLINE, InternalIndexState.POPULATING ) ) { for ( IndexProvider populatingProvider : aliveProviders ) { // when for ( IndexProvider aliveProvider : aliveProviders ) { setInitialState( aliveProvider, populatingProvider == aliveProvider ? InternalIndexState.POPULATING : state ); } InternalIndexState initialState = fusionIndexProvider.getInitialState( AN_INDEX ); // then assertEquals( InternalIndexState.POPULATING, initialState ); } } }
@Test public void mustSelectCorrectTargetForAllGivenValueCombinations() { // given EnumMap<IndexSlot,Value[]> values = FusionIndexTestHelp.valuesByGroup(); Value[] allValues = FusionIndexTestHelp.allValues(); for ( IndexSlot slot : IndexSlot.values() ) { Value[] group = values.get( slot ); for ( Value value : group ) { // when IndexProvider selected = instanceSelector.select( slotSelector.selectSlot( array( value ), GROUP_OF ) ); // then assertSame( orLucene( providers.get( slot ) ), selected ); } } // All composite values should go to lucene for ( Value firstValue : allValues ) { for ( Value secondValue : allValues ) { // when IndexProvider selected = instanceSelector.select( slotSelector.selectSlot( array( firstValue, secondValue ), GROUP_OF ) ); // then assertSame( providers.get( LUCENE ), selected ); } } }
@Test public void shouldCreateUnionOfIndexLimitations() { UnionIndexCapability union; // given union = unionOfIndexLimitations( IndexCapability.LIMITIATION_NONE, IndexCapability.LIMITIATION_NONE ); // then assertEquals( Collections.emptySet(), asSet( union.limitations() ) ); // given union = unionOfIndexLimitations( IndexCapability.LIMITIATION_NONE, array( IndexLimitation.SLOW_CONTAINS ) ); // then assertEquals( asSet( IndexLimitation.SLOW_CONTAINS ), asSet( union.limitations() ) ); // given union = unionOfIndexLimitations( array( IndexLimitation.SLOW_CONTAINS ), array( IndexLimitation.SLOW_CONTAINS ) ); // then assertEquals( asSet( IndexLimitation.SLOW_CONTAINS ), asSet( union.limitations() ) ); }
command.execute( array( "--database", databaseName, "--memory", "8g" ) );
@Test public void shouldNotReportConflictOnSameValueSameEntityId() throws IndexEntryConflictException { // given Value value = Values.of( 123 ); long entityId = 10; // when NativeIndexValue merged = detector.merge( key( entityId, value ), key( entityId, value ), NativeIndexValue.INSTANCE, NativeIndexValue.INSTANCE ); // then assertNull( merged ); detector.checkConflict( array() ); // <-- should not throw conflict exception }
@Test public void shouldAllowMissingIdHeaderEntry() throws Exception { // GIVEN CharSeeker seeker = seeker( "one\ttwo" ); Extractors extractors = new Extractors( ';' ); // WHEN Header header = DataFactories.defaultFormatNodeFileHeader().create( seeker, TABS, IdType.ACTUAL, groups ); // THEN assertArrayEquals( array( entry( "one", Type.PROPERTY, extractors.string() ), entry( "two", Type.PROPERTY, extractors.string() ) ), header.entries() ); seeker.close(); }
Node node = db.createNode( random.nextBoolean() ? array( label ) : new Label[0] ); for ( String key : keys )
@Test public void shouldHaveEmptyHeadersBeInterpretedAsIgnored() throws Exception { // GIVEN CharSeeker seeker = seeker( "one:id\ttwo\t\tdate:long" ); IdType idType = IdType.ACTUAL; Extractors extractors = new Extractors( '\t' ); // WHEN Header header = DataFactories.defaultFormatNodeFileHeader().create( seeker, TABS, idType, groups ); // THEN assertArrayEquals( array( entry( "one", Type.ID, extractors.long_() ), entry( "two", Type.PROPERTY, extractors.string() ), entry( null, Type.IGNORE, null ), entry( "date", Type.PROPERTY, extractors.long_() ) ), header.entries() ); seeker.close(); }
@Test public void shouldReportConflictOnSameValueAndDifferentEntityIds() { // given Value value = Values.of( 123 ); long entityId1 = 10; long entityId2 = 20; // when NativeIndexValue merged = detector.merge( key( entityId1, value ), key( entityId2, value ), NativeIndexValue.INSTANCE, NativeIndexValue.INSTANCE ); // then assertNull( merged ); try { detector.checkConflict( array( value ) ); fail( "Should've detected conflict" ); } catch ( IndexEntryConflictException e ) { assertEquals( entityId1, e.getExistingNodeId() ); assertEquals( entityId2, e.getAddedNodeId() ); assertEquals( value, e.getSinglePropertyValue() ); } }
@Test public void shouldParseDefaultNodeFileHeaderCorrectly() throws Exception { // GIVEN CharSeeker seeker = seeker( "ID:ID,label-one:label,also-labels:LABEL,name,age:long" ); IdType idType = IdType.STRING; Extractors extractors = new Extractors( ',' ); // WHEN Header header = DataFactories.defaultFormatNodeFileHeader().create( seeker, COMMAS, idType, groups ); // THEN assertArrayEquals( array( entry( "ID", Type.ID, idType.extractor( extractors ) ), entry( "label-one", Type.LABEL, extractors.stringArray() ), entry( "also-labels", Type.LABEL, extractors.stringArray() ), entry( "name", Type.PROPERTY, extractors.string() ), entry( "age", Type.PROPERTY, extractors.long_() ) ), header.entries() ); seeker.close(); }
@Test public void shouldParseDefaultRelationshipFileHeaderCorrectly() throws Exception { // GIVEN CharSeeker seeker = seeker( ":START_ID\t:END_ID\ttype:TYPE\tdate:long\tmore:long[]" ); IdType idType = IdType.ACTUAL; Extractors extractors = new Extractors( '\t' ); // WHEN Header header = DataFactories.defaultFormatRelationshipFileHeader().create( seeker, TABS, idType, groups ); // THEN assertArrayEquals( array( entry( null, Type.START_ID, idType.extractor( extractors ) ), entry( null, Type.END_ID, idType.extractor( extractors ) ), entry( "type", Type.TYPE, extractors.string() ), entry( "date", Type.PROPERTY, extractors.long_() ), entry( "more", Type.PROPERTY, extractors.longArray() ) ), header.entries() ); seeker.close(); }
@Test public void shouldParseGroupName() throws Exception { // GIVEN String groupOneName = "GroupOne"; String groupTwoName = "GroupTwo"; CharSeeker seeker = seeker( ":START_ID(" + groupOneName + ")\t:END_ID(" + groupTwoName + ")\ttype:TYPE\tdate:long\tmore:long[]" ); IdType idType = IdType.ACTUAL; Extractors extractors = new Extractors( '\t' ); groups.getOrCreate( groupOneName ); groups.getOrCreate( groupTwoName ); // WHEN Header header = DataFactories.defaultFormatRelationshipFileHeader().create( seeker, TABS, idType, groups ); // THEN assertArrayEquals( array( entry( null, Type.START_ID, "GroupOne", idType.extractor( extractors ) ), entry( null, Type.END_ID, "GroupTwo", idType.extractor( extractors ) ), entry( "type", Type.TYPE, extractors.string() ), entry( "date", Type.PROPERTY, extractors.long_() ), entry( "more", Type.PROPERTY, extractors.longArray() ) ), header.entries() ); seeker.close(); }
@Test public void shouldParseHeaderFromFirstLineOfFirstInputFile() throws Exception { // GIVEN final CharReadable firstSource = wrap( "id:ID\tname:String\tbirth_date:long" ); final CharReadable secondSource = wrap( "0\tThe node\t123456789" ); DataFactory dataFactory = DataFactories.data( value -> value, () -> new MultiReadable( Readables.iterator( IOFunctions.identity(), firstSource, secondSource ) ) ); Header.Factory headerFactory = defaultFormatNodeFileHeader(); Extractors extractors = new Extractors( ';' ); // WHEN CharSeeker seeker = CharSeekers.charSeeker( new MultiReadable( dataFactory.create( TABS ).stream() ), TABS, false ); Header header = headerFactory.create( seeker, TABS, IdType.ACTUAL, groups ); // THEN assertArrayEquals( array( entry( "id", Type.ID, extractors.long_() ), entry( "name", Type.PROPERTY, extractors.string() ), entry( "birth_date", Type.PROPERTY, extractors.long_() ) ), header.entries() ); seeker.close(); }