Analyzer analyzer = new Analyzer() {
public TokenStream tokenStream(String fieldName, Reader reader) {
return new TokenStream() {
private final String[] TOKENS = {"1", "2", "3", "4", "5"};
private final int[] INCREMENTS = {1, 2, 1, 0, 1};
private int i = 0;
public Token next() {
if (i == TOKENS.length)
return null;
Token t = new Token(TOKENS[i], i, i);
t.setPositionIncrement(INCREMENTS[i]);
i++;
return t;
}
};
}
};
RAMDirectory store = new RAMDirectory();
IndexWriter writer = new IndexWriter(store, analyzer, true);
Document d = new Document();
d.add(new Field("field", "bogus", Field.Store.YES, Field.Index.TOKENIZED));
writer.addDocument(d);
writer.optimize();
writer.close();
IndexSearcher searcher = new IndexSearcher(store);
PhraseQuery q;
Hits hits;
q = new PhraseQuery();
q.add(new Term("field", "1"));
q.add(new Term("field", "2"));
hits = searcher.search(q);
assertEquals(0, hits.length());
q = new PhraseQuery();
q.add(new Term("field", "2"));
q.add(new Term("field", "3"));
hits = searcher.search(q);
assertEquals(1, hits.length());
q = new PhraseQuery();
q.add(new Term("field", "3"));
q.add(new Term("field", "4"));
hits = searcher.search(q);
assertEquals(0, hits.length());
q = new PhraseQuery();
q.add(new Term("field", "2"));
q.add(new Term("field", "4"));
hits = searcher.search(q);
assertEquals(1, hits.length());
q = new PhraseQuery();
q.add(new Term("field", "3"));
q.add(new Term("field", "5"));
hits = searcher.search(q);
assertEquals(1, hits.length());
q = new PhraseQuery();
q.add(new Term("field", "4"));
q.add(new Term("field", "5"));
hits = searcher.search(q);
assertEquals(1, hits.length());
q = new PhraseQuery();
q.add(new Term("field", "2"));
q.add(new Term("field", "5"));
hits = searcher.search(q);
assertEquals(0, hits.length());