Hibernate Search Integration with Apache Solr unab

2019-07-16 13:05发布

问题:

In my current application I use hibernate search to index and searching data. It works fine. But when building a cluster of server instances I do not need to use Master Slave clusters using JMS or JGroups.

So I am trying to integrate hibernate search with apache solr. I had follow this example.

And did some minor changes to be compatible with new apache.lucene.core version.

public class HibernateSearchSolrWorkerBackend implements BackendQueueProcessor {
private static final String ID_FIELD_NAME = "id";

private static final ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock();
private static final ReentrantReadWriteLock.WriteLock writeLock = readWriteLock.writeLock();

private ConcurrentUpdateSolrClient solrServer;

@Override
public void initialize(Properties properties, WorkerBuildContext workerBuildContext, DirectoryBasedIndexManager directoryBasedIndexManager) {
    solrServer = new ConcurrentUpdateSolrClient("http://localhost:8983/solr/test", 20, 4);

}

@Override
public void close() {
}

@Override
public void applyWork(List<LuceneWork> luceneWorks, IndexingMonitor indexingMonitor) {
    List<SolrInputDocument> solrWorks = new ArrayList<>(luceneWorks.size());
    List<String> documentsForDeletion = new ArrayList<>();

    for (LuceneWork work : luceneWorks) {
        SolrInputDocument solrWork = new SolrInputDocument();
        if (work instanceof AddLuceneWork) {
            handleAddLuceneWork((AddLuceneWork) work, solrWork);
        } else if (work instanceof UpdateLuceneWork) {
            handleUpdateLuceneWork((UpdateLuceneWork) work, solrWork);
        } else if (work instanceof DeleteLuceneWork) {
            documentsForDeletion.add(((DeleteLuceneWork)work).getIdInString());
        } else {
            throw new RuntimeException("Encountered unsupported lucene work " + work);
        }
        solrWorks.add(solrWork);
    }
    try {
         deleteDocs(documentsForDeletion);
         solrServer.add(solrWorks);
         softCommit();

    } catch (SolrServerException | IOException e) {
        throw new RuntimeException("Failed to update solr", e);
    }

}

@Override
public void applyStreamWork(LuceneWork luceneWork, IndexingMonitor indexingMonitor) {
    throw new RuntimeException("HibernateSearchSolrWorkerBackend.applyStreamWork isn't implemented");
}

@Override
public Lock getExclusiveWriteLock() {
    return writeLock;
}

@Override
public void indexMappingChanged() {
}

private void deleteDocs(Collection<String> collection) throws IOException, SolrServerException {
    if (collection.size()>0) {
        StringBuilder stringBuilder = new StringBuilder(collection.size()*10);
        stringBuilder.append(ID_FIELD_NAME).append(":(");
        boolean first=true;
        for (String id : collection) {
            if (!first) {
                stringBuilder.append(',');
            }
            else {
                first=false;
            }
            stringBuilder.append(id);
        }
        stringBuilder.append(')');
        solrServer.deleteByQuery(stringBuilder.toString());
    }
}

private void copyFields(Document document, SolrInputDocument solrInputDocument) {
    boolean addedId = false;
    for (IndexableField fieldable : document.getFields()) {
        if (fieldable.name().equals(ID_FIELD_NAME)) {
            if (addedId)
                continue;
            else
                addedId = true;
        }
        solrInputDocument.addField(fieldable.name(), fieldable.stringValue());
    }
}

private void handleAddLuceneWork(AddLuceneWork luceneWork, SolrInputDocument solrWork) {
    copyFields(luceneWork.getDocument(), solrWork);
}

private void handleUpdateLuceneWork(UpdateLuceneWork luceneWork, SolrInputDocument solrWork) {
    copyFields(luceneWork.getDocument(), solrWork);
}

private void softCommit() throws IOException, SolrServerException {
    UpdateRequest updateRequest = new UpdateRequest();
    updateRequest.setParam("soft-commit", "true");
    updateRequest.setAction(UpdateRequest.ACTION.COMMIT,false, false);
    updateRequest.process(solrServer);

}
}

And set Hibernate properties as

<persistence-unit name="JPAUnit">
    <provider>org.hibernate.ejb.HibernatePersistence</provider>
    <class>search.domain.Book</class>
    <properties>
    <property name="hibernate.search.default.directory_provider" value="filesystem"/>
    <property name="hibernate.search.default.worker.backend" value="search.adapter.HibernateSearchSolrWorkerBackend"/>
    </properties>
</persistence-unit>

And tried to index a document bu using following test method

@Test
@Transactional(propagation = Propagation.REQUIRES_NEW)
@Rollback(false)
public void saveBooks() {
    Book bk1 = new Book(1L, "book1", "book1 description", 100.0);   
    Book bk2 = new Book(2L, "book2", "book2 description", 100.0);
    bookRepository.save(bk1);
    bookRepository.save(bk2);
}

This save records to the DB .If I remove

<property name="hibernate.search.default.worker.backend" value="search.adapter.HibernateSearchSolrWorkerBackend"/>

and give the index location for hibernate search in the configuration file it create the index properly and perform search successfully. But when I add the custom worker backend as apache solr it will not create any indexes within apache solr core data folder.