/** Build a new {@link CharsRef} that has the same content as this builder. */ public IntsRef toIntsRef() { return IntsRef.deepCopyOf(get()); }
public Set<IntsRef> toFiniteStrings(TokenStream stream) throws IOException { final TokenStreamToAutomaton ts2a = getTokenStreamToAutomaton(); Automaton automaton; try (TokenStream ts = stream) { automaton = toAutomaton(ts, ts2a); } LimitedFiniteStringsIterator finiteStrings = new LimitedFiniteStringsIterator(automaton, maxGraphExpansions); Set<IntsRef> set = new HashSet<>(); for (IntsRef string = finiteStrings.next(); string != null; string = finiteStrings.next()) { set.add(IntsRef.deepCopyOf(string)); } return Collections.unmodifiableSet(set); }
/** Build a new {@link CharsRef} that has the same content as this builder. */ public IntsRef toIntsRef() { return IntsRef.deepCopyOf(get()); }
/** Build a new {@link CharsRef} that has the same content as this builder. */ public IntsRef toIntsRef() { return IntsRef.deepCopyOf(get()); }
/** Build a new {@link CharsRef} that has the same content as this builder. */ public IntsRef toIntsRef() { return IntsRef.deepCopyOf(get()); }
/** * Adds a context with boost, set <code>exact</code> to false * if the context is a prefix of any indexed contexts */ public void addContext(CharSequence context, float boost, boolean exact) { if (boost < 0f) { throw new IllegalArgumentException("'boost' must be >= 0"); } for (int i = 0; i < context.length(); i++) { if (ContextSuggestField.CONTEXT_SEPARATOR == context.charAt(i)) { throw new IllegalArgumentException("Illegal value [" + context + "] UTF-16 codepoint [0x" + Integer.toHexString((int) context.charAt(i))+ "] at position " + i + " is a reserved character"); } } contexts.put(IntsRef.deepCopyOf(Util.toIntsRef(new BytesRef(context), scratch)), new ContextMetaData(boost, exact)); }
public Set<IntsRef> toFiniteStrings(TokenStream stream) throws IOException { final TokenStreamToAutomaton ts2a = getTokenStreamToAutomaton(); Automaton automaton; try (TokenStream ts = stream) { automaton = toAutomaton(ts, ts2a); } LimitedFiniteStringsIterator finiteStrings = new LimitedFiniteStringsIterator(automaton, maxGraphExpansions); Set<IntsRef> set = new HashSet<>(); for (IntsRef string = finiteStrings.next(); string != null; string = finiteStrings.next()) { set.add(IntsRef.deepCopyOf(string)); } return Collections.unmodifiableSet(set); }
public Set<IntsRef> toFiniteStrings(TokenStream stream) throws IOException { final TokenStreamToAutomaton ts2a = getTokenStreamToAutomaton(); Automaton automaton; try (TokenStream ts = stream) { automaton = toAutomaton(ts, ts2a); } LimitedFiniteStringsIterator finiteStrings = new LimitedFiniteStringsIterator(automaton, maxGraphExpansions); Set<IntsRef> set = new HashSet<>(); for (IntsRef string = finiteStrings.next(); string != null; string = finiteStrings.next()) { set.add(IntsRef.deepCopyOf(string)); } return Collections.unmodifiableSet(set); }
public Set<IntsRef> toFiniteStrings(TokenStream stream) throws IOException { final TokenStreamToAutomaton ts2a = getTokenStreamToAutomaton(); Automaton automaton; try (TokenStream ts = stream) { automaton = toAutomaton(ts, ts2a); } LimitedFiniteStringsIterator finiteStrings = new LimitedFiniteStringsIterator(automaton, maxGraphExpansions); Set<IntsRef> set = new HashSet<>(); for (IntsRef string = finiteStrings.next(); string != null; string = finiteStrings.next()) { set.add(IntsRef.deepCopyOf(string)); } return Collections.unmodifiableSet(set); }
public Set<IntsRef> toFiniteStrings(TokenStream stream) throws IOException { final TokenStreamToAutomaton ts2a = getTokenStreamToAutomaton(); Automaton automaton; try (TokenStream ts = stream) { automaton = toAutomaton(ts, ts2a); } LimitedFiniteStringsIterator finiteStrings = new LimitedFiniteStringsIterator(automaton, maxGraphExpansions); Set<IntsRef> set = new HashSet<>(); for (IntsRef string = finiteStrings.next(); string != null; string = finiteStrings.next()) { set.add(IntsRef.deepCopyOf(string)); } return Collections.unmodifiableSet(set); }
FiniteStringsIterator finiteStrings = new FiniteStringsIterator(automaton); for (IntsRef string; (string = finiteStrings.next()) != null;) { refs.add(IntsRef.deepCopyOf(string));
/** * Add a string value to an object entry */ private String doValString(final String val) { images.add(val); types.add(JsonTokenizer.getTokenTypes()[LITERAL]); incr.add(1); this.incrNodeObjectPath(); this.setLastNode(0); nodes.add(IntsRef.deepCopyOf(curNodePath)); this.decrNodeObjectPath(); datatypes.add(XSDDatatype.XSD_STRING); states.pop(); // remove OBJECT_VAL state return "\"" + val + "\"" + this.getWhitespace() + ","; }
/** * Add a string value to an object entry */ private String doValString(final String val) { images.add(val); types.add(ExtendedJsonTokenizer.getTokenTypes()[LITERAL]); incr.add(1); this.incrNodeObjectPath(); this.setLastNode(0); nodes.add(IntsRef.deepCopyOf(curNodePath)); this.decrNodeObjectPath(); datatypes.add(XSDDatatype.XSD_STRING); states.pop(); // remove OBJECT_VAL state return "\"" + val + "\"" + this.getWhitespace() + ","; }
final String val = "stepha" + this.getWhitespace() + "n" + this.getWhitespace() + "e"; this.addToLastNode(1); nodes.add(IntsRef.deepCopyOf(curNodePath)); images.add(val); types.add(JsonTokenizer.getTokenTypes()[LITERAL]); case 1: // DOUBLE case this.addToLastNode(1); nodes.add(IntsRef.deepCopyOf(curNodePath)); images.add("34.560e-9"); types.add(JsonTokenizer.getTokenTypes()[NUMBER]); case 2: // LONG case this.addToLastNode(1); nodes.add(IntsRef.deepCopyOf(curNodePath)); images.add("34560e-9"); types.add(JsonTokenizer.getTokenTypes()[NUMBER]); case 3: // true case this.addToLastNode(1); nodes.add(IntsRef.deepCopyOf(curNodePath)); images.add("true"); types.add(JsonTokenizer.getTokenTypes()[TRUE]); case 4: // false case this.addToLastNode(1); nodes.add(IntsRef.deepCopyOf(curNodePath)); images.add("false"); types.add(JsonTokenizer.getTokenTypes()[FALSE]);
final String val = "stepha" + this.getWhitespace() + "n" + this.getWhitespace() + "e"; this.addToLastNode(1); nodes.add(IntsRef.deepCopyOf(curNodePath)); images.add(val); types.add(ExtendedJsonTokenizer.getTokenTypes()[LITERAL]); case 1: // DOUBLE case this.addToLastNode(1); nodes.add(IntsRef.deepCopyOf(curNodePath)); images.add("34.560e-9"); types.add(ExtendedJsonTokenizer.getTokenTypes()[NUMBER]); case 2: // LONG case this.addToLastNode(1); nodes.add(IntsRef.deepCopyOf(curNodePath)); images.add("34560e-9"); types.add(ExtendedJsonTokenizer.getTokenTypes()[NUMBER]); case 3: // true case this.addToLastNode(1); nodes.add(IntsRef.deepCopyOf(curNodePath)); images.add("true"); types.add(ExtendedJsonTokenizer.getTokenTypes()[TRUE]); case 4: // false case this.addToLastNode(1); nodes.add(IntsRef.deepCopyOf(curNodePath)); images.add("false"); types.add(ExtendedJsonTokenizer.getTokenTypes()[FALSE]);
/** * Copy the inner's stream attributes values to the main stream's ones. This filter * uses an inner stream, therefore it needs to be cleared so that other filters * have clean attributes data. Because of that, the attributes datatypeURI and * node have to saved in order to be restored after. */ private void copyInnerStreamAttributes() { // backup datatype and node path final IntsRef nodePath = IntsRef.deepCopyOf(nodeAtt.node()); final char[] dt = dtypeAtt.datatypeURI(); // clear attributes input.clearAttributes(); // copy inner attributes final int len = tokenTermAtt.length(); termAtt.copyBuffer(tokenTermAtt.buffer(), 0, len); offsetAtt.setOffset(tokenOffsetAtt.startOffset(), tokenOffsetAtt.endOffset()); posIncrAtt.setPositionIncrement(tokenPosIncrAtt.getPositionIncrement()); typeAtt.setType(tokenTypeAtt.type()); // TupleTokenizer handles the setting of tuple/cell values and the datatype URI // restore datatype and node nodeAtt.copyNode(nodePath); dtypeAtt.setDatatypeURI(dt); }
/** * Copy the inner's stream attributes values to the main stream's ones. This filter * uses an inner stream, therefore it needs to be cleared so that other filters * have clean attributes data. Because of that, the attributes datatypeURI and * node have to saved in order to be restored after. */ private void copyInnerStreamAttributes() { // backup datatype, node identifier and path final IntsRef nodeId = IntsRef.deepCopyOf(nodeAtt.node()); final char[] dt = dtypeAtt.datatypeURI(); final String[] path = pathAtt.path(); // clear attributes input.clearAttributes(); // copy inner attributes final int len = tokenTermAtt.length(); termAtt.copyBuffer(tokenTermAtt.buffer(), 0, len); offsetAtt.setOffset(tokenOffsetAtt.startOffset(), tokenOffsetAtt.endOffset()); posIncrAtt.setPositionIncrement(tokenPosIncrAtt.getPositionIncrement()); typeAtt.setType(tokenTypeAtt.type()); // TupleTokenizer handles the setting of tuple/cell values and the datatype URI // restore datatype, node and path attributes nodeAtt.copyNode(nodeId); dtypeAtt.setDatatypeURI(dt); pathAtt.setPath(path); }