DocHelperpublic class DocHelper extends Object Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. |
Fields Summary |
---|
public static final String | FIELD_1_TEXT | public static final String | TEXT_FIELD_1_KEY | public static Field | textField1 | public static final String | FIELD_2_TEXT | public static final int[] | FIELD_2_FREQS | public static final String | TEXT_FIELD_2_KEY | public static Field | textField2 | public static final String | FIELD_2_COMPRESSED_TEXT | public static final int[] | COMPRESSED_FIELD_2_FREQS | public static final String | COMPRESSED_TEXT_FIELD_2_KEY | public static Field | compressedTextField2 | public static final String | FIELD_3_TEXT | public static final String | TEXT_FIELD_3_KEY | public static Field | textField3 | public static final String | KEYWORD_TEXT | public static final String | KEYWORD_FIELD_KEY | public static Field | keyField | public static final String | NO_NORMS_TEXT | public static final String | NO_NORMS_KEY | public static Field | noNormsField | public static final String | UNINDEXED_FIELD_TEXT | public static final String | UNINDEXED_FIELD_KEY | public static Field | unIndField | public static final String | UNSTORED_1_FIELD_TEXT | public static final String | UNSTORED_FIELD_1_KEY | public static Field | unStoredField1 | public static final String | UNSTORED_2_FIELD_TEXT | public static final String | UNSTORED_FIELD_2_KEY | public static Field | unStoredField2 | public static final String | LAZY_FIELD_BINARY_KEY | public static byte[] | LAZY_FIELD_BINARY_BYTES | public static Field | lazyFieldBinary | public static final String | LAZY_FIELD_KEY | public static final String | LAZY_FIELD_TEXT | public static Field | lazyField | public static final String | LARGE_LAZY_FIELD_KEY | public static String | LARGE_LAZY_FIELD_TEXT | public static Field | largeLazyField | public static final String | FIELD_UTF1_TEXT | public static final String | TEXT_FIELD_UTF1_KEY | public static Field | textUtfField1 | public static final String | FIELD_UTF2_TEXT | public static final int[] | FIELD_UTF2_FREQS | public static final String | TEXT_FIELD_UTF2_KEY | public static Field | textUtfField2 | public static Map | nameValues | public static Field[] | fields | public static Map | all | public static Map | indexed | public static Map | stored | public static Map | unstored | public static Map | unindexed | public static Map | termvector | public static Map | notermvector | public static Map | lazy | public static Map | noNorms |
Methods Summary |
---|
private static void | add(java.util.Map map, org.apache.lucene.document.Fieldable field)
//Initialize the large Lazy Field
StringBuffer buffer = new StringBuffer();
for (int i = 0; i < 10000; i++)
{
buffer.append("Lazily loading lengths of language in lieu of laughing ");
}
try {
LAZY_FIELD_BINARY_BYTES = "These are some binary field bytes".getBytes("UTF8");
} catch (UnsupportedEncodingException e) {
}
lazyFieldBinary = new Field(LAZY_FIELD_BINARY_KEY, LAZY_FIELD_BINARY_BYTES, Field.Store.YES);
fields[fields.length - 2] = lazyFieldBinary;
LARGE_LAZY_FIELD_TEXT = buffer.toString();
largeLazyField = new Field(LARGE_LAZY_FIELD_KEY, LARGE_LAZY_FIELD_TEXT, Field.Store.YES, Field.Index.TOKENIZED);
fields[fields.length - 1] = largeLazyField;
for (int i=0; i<fields.length; i++) {
Fieldable f = fields[i];
add(all,f);
if (f.isIndexed()) add(indexed,f);
else add(unindexed,f);
if (f.isTermVectorStored()) add(termvector,f);
if (f.isIndexed() && !f.isTermVectorStored()) add(notermvector,f);
if (f.isStored()) add(stored,f);
else add(unstored,f);
if (f.getOmitNorms()) add(noNorms,f);
if (f.isLazy()) add(lazy, f);
}
map.put(field.name(), field);
| public static int | numFields(org.apache.lucene.document.Document doc)
return doc.getFields().size();
| public static void | setupDoc(org.apache.lucene.document.Document doc)Adds the fields above to a document
nameValues = new HashMap();
nameValues.put(TEXT_FIELD_1_KEY, FIELD_1_TEXT);
nameValues.put(TEXT_FIELD_2_KEY, FIELD_2_TEXT);
nameValues.put(COMPRESSED_TEXT_FIELD_2_KEY, FIELD_2_COMPRESSED_TEXT);
nameValues.put(TEXT_FIELD_3_KEY, FIELD_3_TEXT);
nameValues.put(KEYWORD_FIELD_KEY, KEYWORD_TEXT);
nameValues.put(NO_NORMS_KEY, NO_NORMS_TEXT);
nameValues.put(UNINDEXED_FIELD_KEY, UNINDEXED_FIELD_TEXT);
nameValues.put(UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT);
nameValues.put(UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT);
nameValues.put(LAZY_FIELD_KEY, LAZY_FIELD_TEXT);
nameValues.put(LAZY_FIELD_BINARY_KEY, LAZY_FIELD_BINARY_BYTES);
nameValues.put(LARGE_LAZY_FIELD_KEY, LARGE_LAZY_FIELD_TEXT);
nameValues.put(TEXT_FIELD_UTF1_KEY, FIELD_UTF1_TEXT);
nameValues.put(TEXT_FIELD_UTF2_KEY, FIELD_UTF2_TEXT);
for (int i=0; i<fields.length; i++) {
doc.add(fields[i]);
}
| public static void | writeDoc(org.apache.lucene.store.Directory dir, org.apache.lucene.document.Document doc)Writes the document to the directory using a segment named "test"
writeDoc(dir, "test", doc);
| public static void | writeDoc(org.apache.lucene.store.Directory dir, java.lang.String segment, org.apache.lucene.document.Document doc)Writes the document to the directory in the given segment
Similarity similarity = Similarity.getDefault();
writeDoc(dir, new WhitespaceAnalyzer(), similarity, segment, doc);
| public static void | writeDoc(org.apache.lucene.store.Directory dir, org.apache.lucene.analysis.Analyzer analyzer, org.apache.lucene.search.Similarity similarity, org.apache.lucene.document.Document doc)Writes the document to the directory segment named "test" using the specified analyzer and similarity
writeDoc(dir, analyzer, similarity, "test", doc);
| public static void | writeDoc(org.apache.lucene.store.Directory dir, org.apache.lucene.analysis.Analyzer analyzer, org.apache.lucene.search.Similarity similarity, java.lang.String segment, org.apache.lucene.document.Document doc)Writes the document to the directory segment using the analyzer and the similarity score
DocumentWriter writer = new DocumentWriter(dir, analyzer, similarity, 50);
writer.addDocument(segment, doc);
|
|