@Override public Collection<Document> toDocuments(LuceneIndex index, Object value) { if (value == null) { return Collections.emptyList(); } LuceneSerializer mapper = getFieldMapper(value, index.getFieldNames()); Collection<Document> docs = mapper.toDocuments(index, value); if (logger.isDebugEnabled()) { logger.debug("HeterogeneousLuceneSerializer.toDocuments:" + docs); } return docs; }
@Override public void create(Object key, Object value) throws IOException { long start = stats.startUpdate(); Collection<Document> docs = Collections.emptyList(); boolean exceptionHappened = false; try { try { docs = serializer.toDocuments(index, value); } catch (Exception e) { exceptionHappened = true; stats.incFailedEntries(); logger.info("Failed to add index for " + value + " due to " + e.getMessage()); } if (!exceptionHappened) { docs.forEach(doc -> SerializerUtil.addKey(key, doc)); writer.addDocuments(docs); } } finally { stats.endUpdate(start); } }
public static Document invokeSerializer(LuceneSerializer mapper, Object object, String[] fields) { LuceneIndex index = Mockito.mock(LuceneIndex.class); Mockito.when(index.getFieldNames()).thenReturn(fields); Collection<Document> docs = mapper.toDocuments(index, object); assertEquals(1, docs.size()); return docs.iterator().next(); } }
@Override public void update(Object key, Object value) throws IOException { long start = stats.startUpdate(); Collection<Document> docs = Collections.emptyList(); boolean exceptionHappened = false; try { try { docs = serializer.toDocuments(index, value); } catch (Exception e) { exceptionHappened = true; stats.incFailedEntries(); logger.info("Failed to update index for " + value + " due to " + e.getMessage()); } if (!exceptionHappened) { docs.forEach(doc -> SerializerUtil.addKey(key, doc)); Term keyTerm = SerializerUtil.toKeyTerm(key); writer.updateDocuments(keyTerm, docs); } } finally { stats.endUpdate(start); } }