]> gerrit.simantics Code Review - simantics/platform.git/blobdiff - bundles/org.simantics.db.indexing/src/org/simantics/db/indexing/IndexedRelationsSearcherBase.java
Removed contact application support prints
[simantics/platform.git] / bundles / org.simantics.db.indexing / src / org / simantics / db / indexing / IndexedRelationsSearcherBase.java
index 4afbb174e0eb565958f51a94c0fdfc836d2219bb..82f4abdd905d570600a9a10871e4d7f7e43cb414 100644 (file)
-/*******************************************************************************\r
- * Copyright (c) 2007, 2015 Association for Decentralized Information Management\r
- * in Industry THTH ry.\r
- * All rights reserved. This program and the accompanying materials\r
- * are made available under the terms of the Eclipse Public License v1.0\r
- * which accompanies this distribution, and is available at\r
- * http://www.eclipse.org/legal/epl-v10.html\r
- *\r
- * Contributors:\r
- *     VTT Technical Research Centre of Finland - initial API and implementation\r
- *     Semantum Oy - Fix for simantics issue #6053\r
- *******************************************************************************/\r
-package org.simantics.db.indexing;\r
-\r
-import java.io.File;\r
-import java.io.IOException;\r
-import java.util.ArrayList;\r
-import java.util.Collection;\r
-import java.util.Collections;\r
-import java.util.Iterator;\r
-import java.util.List;\r
-import java.util.Map;\r
-import java.util.concurrent.ExecutorService;\r
-import java.util.concurrent.Executors;\r
-import java.util.concurrent.Semaphore;\r
-import java.util.concurrent.ThreadFactory;\r
-import java.util.concurrent.atomic.AtomicReference;\r
-\r
-import org.apache.lucene.document.Document;\r
-import org.apache.lucene.document.DocumentStoredFieldVisitor;\r
-import org.apache.lucene.document.Field;\r
-import org.apache.lucene.document.FieldType;\r
-import org.apache.lucene.document.LongField;\r
-import org.apache.lucene.document.TextField;\r
-import org.apache.lucene.index.CorruptIndexException;\r
-import org.apache.lucene.index.DirectoryReader;\r
-import org.apache.lucene.index.FieldInfo;\r
-import org.apache.lucene.index.IndexNotFoundException;\r
-import org.apache.lucene.index.IndexReader;\r
-import org.apache.lucene.index.IndexWriter;\r
-import org.apache.lucene.index.IndexWriterConfig;\r
-import org.apache.lucene.index.IndexWriterConfig.OpenMode;\r
-import org.apache.lucene.index.IndexableField;\r
-import org.apache.lucene.index.StoredFieldVisitor;\r
-import org.apache.lucene.index.Term;\r
-import org.apache.lucene.queryparser.classic.ParseException;\r
-import org.apache.lucene.search.IndexSearcher;\r
-import org.apache.lucene.search.MatchAllDocsQuery;\r
-import org.apache.lucene.search.Query;\r
-import org.apache.lucene.search.ScoreDoc;\r
-import org.apache.lucene.search.TermQuery;\r
-import org.apache.lucene.search.TopDocs;\r
-import org.apache.lucene.store.Directory;\r
-import org.apache.lucene.store.FSDirectory;\r
-import org.apache.lucene.util.Version;\r
-import org.eclipse.core.runtime.IProgressMonitor;\r
-import org.eclipse.core.runtime.SubMonitor;\r
-import org.simantics.databoard.util.ObjectUtils;\r
-import org.simantics.db.ReadGraph;\r
-import org.simantics.db.RequestProcessor;\r
-import org.simantics.db.Resource;\r
-import org.simantics.db.Session;\r
-import org.simantics.db.common.request.ReadRequest;\r
-import org.simantics.db.common.request.SafeName;\r
-import org.simantics.db.common.utils.Logger;\r
-import org.simantics.db.exception.DatabaseException;\r
-import org.simantics.db.layer0.adapter.GenericRelation;\r
-import org.simantics.db.request.Read;\r
-import org.simantics.db.service.CollectionSupport;\r
-import org.simantics.db.service.SerialisationSupport;\r
-import org.simantics.utils.DataContainer;\r
-import org.simantics.utils.FileUtils;\r
-import org.simantics.utils.datastructures.Pair;\r
-\r
-import gnu.trove.map.hash.THashMap;\r
-\r
-/**\r
- * @author Tuukka Lehtonen\r
- * @author Antti Villberg\r
- */\r
-abstract public class IndexedRelationsSearcherBase {\r
-\r
-    protected enum State {\r
-        // No index is available\r
-        NONE, \r
-        // An index is available, but there is a problem with it\r
-        PROBLEM, \r
-        // An index is available but no reader or writer is ready\r
-        READY,\r
-        // A reader is ready\r
-        READ, \r
-        // A writer (and a reader) is ready\r
-        WRITE\r
-    }\r
-    \r
-    private State state = State.READY;\r
-    private Throwable exception;\r
-    \r
-    public Throwable getException() {\r
-       return exception;\r
-    }\r
-\r
-    public void setProblem(Throwable t) {\r
-       this.state = State.PROBLEM;\r
-       this.exception = t;\r
-    }\r
-    \r
-    public void setNone() {\r
-       this.state = State.NONE;\r
-    }\r
-    \r
-    public void setReady() {\r
-       this.state = State.READY;\r
-    }\r
-    \r
-    protected boolean checkState(State state) {\r
-       return this.state == state;\r
-    }\r
-    \r
-    protected void assertState(State state) throws AssertionError {\r
-\r
-       if(this.state != state) throw new AssertionError("Illegal state, expected " + state.name() + " but was in " + this.state.name());\r
-       \r
-    }\r
-    \r
-    public void changeState(IProgressMonitor monitor, Session session, State state) {\r
-       changeState(monitor, session, state, 0);\r
-    }\r
-\r
-    protected void changeState(IProgressMonitor monitor, Session session, State state, int depth) {\r
-\r
-       if(this.state == state) return;\r
-\r
-       if (IndexPolicy.TRACE_INDEX_MANAGEMENT)\r
-               System.err.println("Index state " + this.state.name() + " => " + state.name() + " " + this);\r
-\r
-       // Check transitions\r
-       \r
-       // Try to exit problem state\r
-       if (State.PROBLEM == this.state && depth > 0) {\r
-               Throwable t = bestEffortClear(monitor, session);\r
-               if(t != null) {\r
-                               exception = t;\r
-                               return;\r
-               }\r
-               // Managed to get into initial state\r
-               this.state = State.NONE;\r
-               return;\r
-       }\r
-\r
-       // Cannot move into read from no index\r
-       if (State.NONE ==  this.state && State.READ == state) return;\r
-       // Cannot move into write from no index\r
-       if (State.NONE ==  this.state && State.WRITE == state) return;\r
-       \r
-               boolean success = false;\r
-\r
-       try {\r
-\r
-               if (searcher != null) {\r
-                       searcher = null;\r
-               }\r
-               if (reader != null) {\r
-                       reader.close();\r
-                       reader = null;\r
-               }\r
-                       closeWriter(writer);\r
-                       directory = null;\r
-                       \r
-                       success = true;\r
-\r
-               // Enter new state\r
-               if (State.READ == state || State.WRITE == state) {\r
-                       \r
-                       success = false;\r
-                       \r
-                       boolean forWriting = State.WRITE == state;\r
-\r
-                       if (directory != null)\r
-                               throw new IllegalStateException(getDescriptor() + "Index already loaded");\r
-\r
-                       SubMonitor mon = SubMonitor.convert(monitor, 100);\r
-\r
-                       mon.beginTask("Loading index", 100);\r
-\r
-                       if (IndexPolicy.TRACE_INDEX_LOAD)\r
-                    System.out.println(getDescriptor() + "Loading Lucene index from " + indexPath + " for " + (forWriting ? "writing" : "reading"));\r
-\r
-                long start = System.nanoTime();\r
-\r
-                directory = getDirectory(session);\r
-\r
-                if (forWriting) {\r
-                    // Never overwrite an index that is about to be loaded.\r
-                    // TODO: could use OpenMode.CREATE_OR_APPEND but must test first\r
-                    IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_4_9, Queries.getAnalyzer()).setOpenMode(OpenMode.APPEND);\r
-                    try {\r
-                        // FIXME: platform #4676\r
-                        writer = new IndexWriter(directory, config);\r
-                    } catch (IndexNotFoundException e) {\r
-                        // There was no pre-existing index on disk. Create it now.\r
-                        writer = new IndexWriter(directory, config.setOpenMode(OpenMode.CREATE));\r
-                        writer.commit();\r
-                    }\r
-                    reader = DirectoryReader.open(directory);\r
-                    searcher = new IndexSearcher(reader);\r
-                } else {\r
-                    reader = DirectoryReader.open(directory);\r
-                    searcher = new IndexSearcher(reader);\r
-                }\r
-\r
-                long end = System.nanoTime();\r
-\r
-                mon.worked(100);\r
-\r
-                if (IndexPolicy.PERF_INDEX_LOAD) {\r
-                    double time = (end - start) * 1e-6;\r
-                    System.out.println(getDescriptor() + "Loaded Lucene index from " + indexPath + " for " + (forWriting ? "writing" : "reading") + " in " + time + " ms");\r
-                }\r
-\r
-                success = true;\r
-                \r
-               }\r
-                \r
-       } catch (Throwable t) {\r
-\r
-               setProblem(t);\r
-\r
-       } finally {\r
-\r
-               if(!success) {\r
-                       this.state = State.PROBLEM;\r
-                       changeState(monitor, session, State.NONE, depth+1);\r
-                       return;\r
-               }\r
-\r
-       }\r
-\r
-       this.state = state;\r
-       \r
-    }\r
-\r
-    public static final FieldType STRING_TYPE = new FieldType();\r
-\r
-    static {\r
-      STRING_TYPE.setIndexed(true);\r
-      STRING_TYPE.setStored(true);\r
-      STRING_TYPE.setTokenized(true);\r
-      STRING_TYPE.freeze();\r
-    }\r
-\r
-    protected static Field makeField(String fieldName, String fieldClass) throws DatabaseException {\r
-        switch (fieldClass) {\r
-        case "Long":   return new LongField(fieldName, 0L, Field.Store.YES);\r
-        case "String": return new Field    (fieldName, "", STRING_TYPE);\r
-        case "Text":   return new TextField(fieldName, "", Field.Store.YES);\r
-        default:\r
-            throw new DatabaseException("Can only index Long, String and Text fields, encountered field type " + fieldClass);\r
-        }\r
-    }\r
-\r
-    protected static Field[] makeFieldsForRelation(GenericRelation r, int boundLength, Document document) throws DatabaseException {\r
-        Pair<String, String>[] fields = r.getFields();\r
-        Field[] fs = new Field[Math.max(0, fields.length - boundLength)];\r
-        for (int i = boundLength; i < fields.length; i++) {\r
-            Field f = makeField(fields[i].first, fields[i].second);\r
-            fs[i - boundLength] = f;\r
-            if (document != null)\r
-                document.add(f);\r
-        }\r
-        return fs;\r
-    }\r
-\r
-    void insertIndex(IProgressMonitor monitor, GenericRelation r, int boundLength, Collection<Object[]> documentsData)\r
-    throws CorruptIndexException, IOException, DatabaseException {\r
-        assertAccessOpen(true);\r
-\r
-        if (IndexPolicy.TRACE_INDEX_UPDATE)\r
-            System.out.println(getDescriptor() + "Inserting " + documentsData.size() + " documents into index at " + indexPath);\r
-\r
-        long start = 0, end = 0;\r
-        if (IndexPolicy.PERF_INDEX_UPDATE)\r
-            start = System.nanoTime();\r
-\r
-        try {\r
-            Document document = new Document();\r
-            Field[] fs = makeFieldsForRelation(r, boundLength, document);\r
-\r
-            for (Object[] documentData : documentsData) {\r
-                for (int i = 0; i < documentData.length; i++) {\r
-                    Object value = documentData[i];\r
-                    if (value instanceof String) {\r
-                        fs[i].setStringValue((String) value);\r
-                    } else if (value instanceof Long) {\r
-                        fs[i].setLongValue((Long) value);\r
-                    } else {\r
-                        System.err.println("Can only index Long and String fields, encountered " + value);\r
-                        // FIXME: should throw an exception for illegal input data but this would leave the index in an incoherent state\r
-                        continue;\r
-                    }\r
-//                    System.out.println("index " + fs[i].name() + " = " + result[i]);\r
-                }\r
-\r
-                if (IndexPolicy.TRACE_INDEX_UPDATE)\r
-                    System.out.println(getDescriptor() + "Inserting document " + document);\r
-\r
-                writer.addDocument(document);\r
-            }\r
-\r
-            if (IndexPolicy.PERF_INDEX_UPDATE) {\r
-                end = System.nanoTime();\r
-                double ms = (end - start) * 1e-6;\r
-                System.out.println(getDescriptor() + "Inserted " + documentsData.size() + " documents into index at " + indexPath + " in " + ms + " ms");\r
-            }\r
-\r
-        } finally {\r
-        }\r
-    }\r
-\r
-    void removeIndex(IProgressMonitor monitor, GenericRelation r, RequestProcessor processor, String key, Collection<Object> keyValues) throws DatabaseException, CorruptIndexException, IOException {\r
-        assertAccessOpen(true);\r
-\r
-        if (IndexPolicy.TRACE_INDEX_UPDATE)\r
-            System.out.println(getDescriptor() + "Removing " + keyValues.size() + " documents from index at " + indexPath);\r
-\r
-        long start = 0, end = 0;\r
-        if (IndexPolicy.PERF_INDEX_UPDATE)\r
-            start = System.nanoTime();\r
-\r
-        try {\r
-            for (Object keyValue : keyValues) {\r
-                Term removedTerm = null;\r
-                if (keyValue instanceof Long) {\r
-                    removedTerm = IndexUtils.longTerm(key, (Long) keyValue);\r
-                } else if (keyValue instanceof String) {\r
-                    removedTerm = new Term(key, (String) keyValue);\r
-                } else {\r
-                    // FIXME: should throw an exception for illegal input data but this would leave the index in an incoherent state\r
-                    continue;\r
-                }\r
-\r
-                if (IndexPolicy.TRACE_INDEX_UPDATE)\r
-                    System.out.println(getDescriptor() + "Removing document with key " + removedTerm);\r
-                writer.deleteDocuments(removedTerm);\r
-            }\r
-\r
-            if (IndexPolicy.PERF_INDEX_UPDATE) {\r
-                end = System.nanoTime();\r
-                double ms = (end - start) * 1e-6;\r
-                System.out.println(getDescriptor() + "Removed " + keyValues.size() + " documents from index at " + indexPath + " in " + ms + " ms");\r
-            }\r
-\r
-        } finally {\r
-        }\r
-    }\r
-\r
-    void removeIndex(IProgressMonitor monitor) throws DatabaseException, CorruptIndexException, IOException {\r
-        assertAccessOpen(true);\r
-\r
-        long start = 0, end = 0;\r
-        if (IndexPolicy.PERF_INDEX_UPDATE)\r
-            start = System.nanoTime();\r
-\r
-        try {\r
-\r
-            writer.deleteAll();\r
-\r
-            if (IndexPolicy.PERF_INDEX_UPDATE) {\r
-                end = System.nanoTime();\r
-                double ms = (end - start) * 1e-6;\r
-                System.out.println(getDescriptor() + "Removed all documents from index at " + indexPath + " in " + ms + " ms");\r
-            }\r
-\r
-        } finally {\r
-        }\r
-    }\r
-    \r
-    boolean replaceIndex(IProgressMonitor monitor, String key, Collection<Object> keyValues, GenericRelation r, int boundLength, Collection<Object[]> documentsData) throws CorruptIndexException, IOException, DatabaseException {\r
-\r
-       boolean didReplace = false;\r
-       \r
-       assertAccessOpen(true);\r
-        if (keyValues.size() != documentsData.size())\r
-            throw new IllegalArgumentException("keyValues size does not match documents data size, " + keyValues.size() + " <> " + documentsData.size());\r
-\r
-        if (IndexPolicy.TRACE_INDEX_UPDATE)\r
-            System.out.println(getDescriptor() + "Replacing " + keyValues.size() + " documents from index at " + indexPath);\r
-\r
-        long start = 0, end = 0;\r
-        if (IndexPolicy.PERF_INDEX_UPDATE)\r
-            start = System.nanoTime();\r
-\r
-        try {\r
-            Iterator<Object> keyIt = keyValues.iterator();\r
-            Iterator<Object[]> documentDataIt = documentsData.iterator();\r
-\r
-            Document document = new Document();\r
-            Field[] fs = makeFieldsForRelation(r, boundLength, document);\r
-\r
-            nextDocument:\r
-                while (keyIt.hasNext()) {\r
-                    Object keyValue = keyIt.next();\r
-                    Object[] documentData = documentDataIt.next();\r
-\r
-                    Term removedTerm = null;\r
-                    if (keyValue instanceof Long) {\r
-                        removedTerm = IndexUtils.longTerm(key, (Long) keyValue);\r
-                    } else if (keyValue instanceof String) {\r
-                        removedTerm = new Term(key, (String) keyValue);\r
-                    } else {\r
-                        // FIXME: should throw an exception for illegal input data but this would leave the index in an incoherent state\r
-                        System.err.println("[" + getClass().getSimpleName() + "] Unrecognized document key to remove '" + keyValue + "', only " + String.class + " and " + Resource.class + " are supported.");\r
-                        continue nextDocument;\r
-                    }\r
-\r
-                    for (int i = 0; i < documentData.length; i++) {\r
-                        Object value = documentData[i];\r
-                        if (value instanceof String) {\r
-                            fs[i].setStringValue((String) value);\r
-                        } else if (keyValue instanceof Long) {\r
-                            fs[i].setLongValue((Long)value);\r
-                        } else {\r
-                            // FIXME: should throw an exception for illegal input data but this would leave the index in an incoherent state\r
-                            System.err.println("[" + getClass().getSimpleName() + "] Unrecognized document value '" + value + "' for field '" + fs[i].toString() + "', only " + String.class + " and " + Resource.class + " are supported.");\r
-                            continue nextDocument;\r
-                        }\r
-                    }\r
-\r
-                    if (IndexPolicy.TRACE_INDEX_UPDATE)\r
-                        System.out.println(getDescriptor() + "Replacing document with key " + removedTerm + " with " + document);\r
-\r
-                    boolean done = false;\r
-                    if(requireChangeInfoOnReplace()) {\r
-                           TopDocs exist = searcher.search(new TermQuery(removedTerm), null, 2);\r
-                           if(exist.scoreDocs.length == 1 && requireChangeInfoOnReplace()) {\r
-                               Document doc = reader.document(exist.scoreDocs[0].doc);\r
-                               if(!areSame(doc, document)) {\r
-                                   writer.deleteDocuments(removedTerm);\r
-                                   writer.addDocument(document);\r
-                                   didReplace |= true;\r
-                                   if (IndexPolicy.TRACE_INDEX_UPDATE)\r
-                                       System.out.println("-replaced single existing");\r
-                               } else {\r
-                                   if (IndexPolicy.TRACE_INDEX_UPDATE)\r
-                                       System.out.println("-was actually same than single existing");\r
-                               }\r
-                               done = true;\r
-                           } \r
-                    }\r
-                    if(!done) {\r
-                       writer.deleteDocuments(removedTerm);\r
-                       writer.addDocument(document);\r
-                       didReplace |= true;\r
-                       if (IndexPolicy.TRACE_INDEX_UPDATE)\r
-                               System.out.println("-had many or none - removed all existing");\r
-                    }\r
-                    \r
-                }\r
-\r
-            if (IndexPolicy.PERF_INDEX_UPDATE) {\r
-                end = System.nanoTime();\r
-                double ms = (end - start) * 1e-6;\r
-                System.out.println(getDescriptor() + "Replaced " + keyValues.size() + " documents from index at " + indexPath + " in " + ms + " ms");\r
-            }\r
-\r
-        } finally {\r
-        }\r
-        \r
-        return didReplace;\r
-        \r
-    }\r
-    \r
-    protected boolean requireChangeInfoOnReplace() {\r
-       return true;\r
-    }\r
-    \r
-    private boolean areSame(Document d1, Document d2) {\r
-       List<IndexableField> fs1 = d1.getFields();\r
-       List<IndexableField> fs2 = d2.getFields();\r
-       if(fs1.size() != fs2.size()) return false;\r
-       for(int i=0;i<fs1.size();i++) {\r
-               IndexableField f1 = fs1.get(i);\r
-               IndexableField f2 = fs2.get(i);\r
-               String s1 = f1.stringValue();\r
-               String s2 = f2.stringValue();\r
-            if (IndexPolicy.TRACE_INDEX_UPDATE)\r
-               System.err.println("areSame " + s1 + " vs. " + s2 );\r
-               if(!ObjectUtils.objectEquals(s1,s2)) return false;\r
-       }\r
-       return true;\r
-    }\r
-\r
-    final RequestProcessor session;\r
-\r
-    final Resource         relation;\r
-\r
-    /**\r
-     * The schema of the index, i.e. the fields that will be indexed per\r
-     * document for the specified relation. Since the relation stays the same\r
-     * throughout the lifetime of this class, the index schema is also assumed\r
-     * to the same. This means that {@link GenericRelation#getFields()} is\r
-     * assumed to stay the same.\r
-     */\r
-    final IndexSchema      schema;\r
-\r
-    Resource         input;\r
-\r
-    File             indexPath;\r
-\r
-    Directory        directory;\r
-\r
-    IndexReader      reader;\r
-\r
-    IndexWriter      writer;\r
-\r
-    IndexSearcher    searcher;\r
-\r
-    IndexedRelationsSearcherBase(RequestProcessor session, Resource relation, Resource input) {\r
-        this.session = session;\r
-        this.relation = relation;\r
-        this.input = input;\r
-        this.indexPath = getIndexDirectory(session.getSession(), relation, input);\r
-        if(isIndexAvailable()) {\r
-               state = State.READY;\r
-        } else {\r
-               state = State.NONE;\r
-        }\r
-        this.schema = IndexSchema.readFromRelation(session, relation);\r
-    }\r
-\r
-    Directory getDirectory(Session session) throws IOException {\r
-        return FSDirectory.open(indexPath);\r
-    }\r
-\r
-    abstract String getDescriptor();\r
-    \r
-    /**\r
-     * Ensures that searcher is in read or write state.\r
-     * \r
-     * @param forWriting <code>true</code> to open index for writing,\r
-     *        <code>false</code> for reading\r
-     * @return true is required state was reached       \r
-     *        \r
-     */\r
-    boolean startAccess(IProgressMonitor monitor, Session session, boolean forWriting) {\r
-       if(forWriting) {\r
-               changeState(monitor, session, State.WRITE);\r
-               return checkState(State.WRITE);\r
-       } else {\r
-               changeState(monitor, session, State.READ);\r
-               return checkState(State.READ);\r
-       }\r
-    }\r
-\r
-    boolean hasAccess(boolean forWriting) {\r
-       \r
-        if (forWriting)\r
-               return checkState(State.WRITE); \r
-        else\r
-               return checkState(State.WRITE) || checkState(State.READ);\r
-       \r
-    }\r
-    \r
-    void assertAccessOpen(boolean forWriting) {\r
-        if (forWriting)\r
-               if(!checkState(State.WRITE)) \r
-                throw new IllegalStateException("index not opened for writing (directory=" + directory + ", reader=" + reader + ")");\r
-        else\r
-               if(!(checkState(State.WRITE) || checkState(State.READ))) \r
-                throw new IllegalStateException("index not opened for reading (directory=" + directory + ", writer=" + writer + ")");\r
-    }\r
-    \r
-    void closeWriter(IndexWriter writer) throws CorruptIndexException, IOException {\r
-        if (writer == null)\r
-            return;\r
-\r
-        try {\r
-            // May throw OOME, see IndexWriter javadoc for the correct actions.\r
-            writer.close(false);\r
-        } catch (OutOfMemoryError e) {\r
-            writer.close();\r
-            throw e;\r
-        }\r
-    }\r
-\r
-    private static String getPattern(GenericRelation relation, int boundCount) {\r
-        String result = "";\r
-        for (int i = 0; i < boundCount; i++)\r
-            result += "b";\r
-        for (int i = 0; i < relation.getFields().length - boundCount; i++)\r
-            result += "f";\r
-        return result;\r
-    }\r
-    \r
-    private static final int INDEXING_THREAD_COUNT = 2;\r
-    \r
-    private static final ExecutorService executor = Executors.newFixedThreadPool(INDEXING_THREAD_COUNT, new ThreadFactory() {\r
-        @Override\r
-        public Thread newThread(Runnable r) {\r
-            Thread t = new Thread(r, "Lucene Index Creator");\r
-            if (t.isDaemon())\r
-                t.setDaemon(true);\r
-            if (t.getPriority() != Thread.NORM_PRIORITY)\r
-                t.setPriority(Thread.NORM_PRIORITY);\r
-            return t;\r
-        }\r
-    });\r
-\r
-    void initializeIndex(IProgressMonitor monitor, ReadGraph graph, final Object[] bound, boolean overwrite) throws IOException,\r
-    DatabaseException {\r
-\r
-        final SubMonitor mon = SubMonitor.convert(monitor, 100);\r
-\r
-        if (IndexPolicy.TRACE_INDEX_INIT)\r
-            System.out.println(getDescriptor() + "Initializing index at " + indexPath + " (overwrite = " + overwrite + ")");\r
-        mon.beginTask("Initializing Index", 100);\r
-\r
-        if (overwrite) {\r
-            mon.subTask("Erasing previous index");\r
-            FileUtils.deleteAll(indexPath);\r
-        }\r
-\r
-        final AtomicReference<FSDirectory> directory = new AtomicReference<FSDirectory>();\r
-        final AtomicReference<IndexWriter> writer = new AtomicReference<IndexWriter>();\r
-\r
-        try {\r
-            mon.subTask("Create index at " + indexPath.toString());\r
-            createDirectory(indexPath);\r
-\r
-            directory.set(FSDirectory.open(indexPath));\r
-            IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_4_9, Queries.getAnalyzer()).setOpenMode(OpenMode.CREATE);\r
-            writer.set(new IndexWriter(directory.get(), conf));\r
-\r
-            mon.worked(5);\r
-\r
-            final DataContainer<Long> start = new DataContainer<Long>();\r
-\r
-            graph.syncRequest(new ReadRequest() {\r
-\r
-                @Override\r
-                public void run(ReadGraph graph) throws DatabaseException {\r
-\r
-                    final GenericRelation r = graph.adapt(relation, GenericRelation.class);\r
-                    if (r == null)\r
-                        throw new DatabaseException("Given resource " + graph.syncRequest(new SafeName(relation))\r
-                                + "could not be adapted to GenericRelation.");\r
-\r
-                    mon.worked(45);\r
-\r
-                    GenericRelation selection = r.select(getPattern(r, bound.length), bound);\r
-                    \r
-                    long perfStart = 0;\r
-                    if (IndexPolicy.PERF_INDEX_INIT)\r
-                        perfStart = System.nanoTime();\r
-\r
-                    final List<Object[]> results = selection.realize(graph);\r
-\r
-                    if (IndexPolicy.PERF_INDEX_INIT)\r
-                        System.out.println(getDescriptor() + "Realized index at " + indexPath + " in " + (1e-9 * (System.nanoTime()-perfStart)) + " seconds.");\r
-                    \r
-                    if (IndexPolicy.TRACE_INDEX_INIT)\r
-                        System.out.println(getDescriptor() + "Indexed relation " + r + " produced " + results.size() + " results");\r
-\r
-                    if (IndexPolicy.PERF_INDEX_INIT)\r
-                        start.set(System.nanoTime());\r
-\r
-                    final Semaphore s = new Semaphore(0);\r
-\r
-                    for(int i=0;i<INDEXING_THREAD_COUNT;i++) {\r
-                       \r
-                       final int startIndex = i;\r
-                       \r
-                       executor.submit(new Runnable() {\r
-\r
-                                                       @Override\r
-                                                       public void run() {\r
-                                                               \r
-                                                               try {\r
-                                                                       \r
-                                                   final Document document = new Document();\r
-\r
-                                                                       Field[] fs = makeFieldsForRelation(r, bound.length, document);\r
-\r
-                                                                       for (int index = startIndex; index < results.size(); index+=INDEXING_THREAD_COUNT) {\r
-                                                                               Object[] result = results.get(index);\r
-                                                                               for (int i = 0; i < result.length; i++) {\r
-                                                                                       Object value = result[i];\r
-                                                                                       if (value instanceof String) {\r
-                                                                                               if (IndexPolicy.DEBUG_INDEX_INIT)\r
-                                                                                                       System.out.println(getDescriptor() + "index " + fs[i].name() + " = " + value + " : String");\r
-                                                                                               fs[i].setStringValue((String) value);\r
-                                                                                       } else if (value instanceof Long) {\r
-                                                                                               if (IndexPolicy.DEBUG_INDEX_INIT)\r
-                                                                                                       System.out.println(getDescriptor() + "index " + fs[i].name() + " = " + value + " : Long");\r
-                                                                                               fs[i].setLongValue((Long) value);\r
-                                                                                       }\r
-                                                                               }\r
-                                                                               try {\r
-                                                                                       writer.get().addDocument(document);\r
-                                                                               } catch (CorruptIndexException e) {\r
-                                                                                       throw new IllegalStateException(e);\r
-                                                                               } catch (IOException e) {\r
-                                                                                       throw new IllegalStateException(e);\r
-                                                                               } finally {\r
-                                                                                       mon.worked(1);\r
-                                                                               }\r
-                                                                       }\r
-                                                               } catch (DatabaseException e) {\r
-                                                                   Logger.defaultLogError("DatabaseException occured during initializing index", e);\r
-                                                               } catch (Throwable t) {\r
-                                                                   Logger.defaultLogError("Fatal error occured during initializing index", t);\r
-                                                               } finally {\r
-                                                                   s.release();\r
-                                                               }\r
-                                                       }\r
-                       });\r
-                    }\r
-                    try {\r
-                                               s.acquire(INDEXING_THREAD_COUNT);\r
-                                       } catch (InterruptedException e) {\r
-                                               e.printStackTrace();\r
-                                       }\r
-                }\r
-            });\r
-\r
-            // http://www.gossamer-threads.com/lists/lucene/java-dev/47895\r
-            // and http://lucene.apache.org/java/docs/index.html#27+November+2011+-+Lucene+Core+3.5.0\r
-            // advise against calling optimize at all. So let's not do it anymore.\r
-            //writer.get().optimize();\r
-            //writer.get().commit();\r
-\r
-            if (IndexPolicy.PERF_INDEX_INIT) {\r
-                long end = System.nanoTime();\r
-                System.out.println(getDescriptor() + "Wrote index at " + indexPath + " in " + (1e-9 * (end-start.get())) + " seconds.");\r
-            }\r
-\r
-        } catch (DatabaseException e) {\r
-            \r
-            Logger.defaultLogError(e);\r
-            \r
-        } finally {\r
-            try {\r
-                closeWriter(writer.getAndSet(null));\r
-            } finally {\r
-                directory.getAndSet(null).close();\r
-            }\r
-        }\r
-    }\r
-\r
-    \r
-    public List<Object[]> debugDocs(IProgressMonitor monitor) throws ParseException, IOException, DatabaseException {\r
-    \r
-           Query query = new MatchAllDocsQuery(); \r
-       \r
-           TopDocs td = searcher.search(query, Integer.MAX_VALUE);\r
-    \r
-           ScoreDoc[ ] scoreDocs = td.scoreDocs; \r
-           List<Object[]> result = new ArrayList<Object[]>(scoreDocs.length);\r
-       \r
-           for(ScoreDoc scoreDoc:scoreDocs) {\r
-       \r
-               try {\r
-       \r
-                   Document doc = reader.document(scoreDoc.doc);\r
-                   List<IndexableField> fs = doc.getFields();\r
-                   Object[] o = new Object[fs.size()];\r
-                   int index = 0; \r
-                   for (IndexableField f : fs) {\r
-                    o[index++] = f.stringValue();\r
-                   }\r
-                   result.add(o);\r
-       \r
-               } catch (CorruptIndexException e) {\r
-                   throw new DatabaseException(e);\r
-               } catch (IOException e) {\r
-                   throw new DatabaseException(e);\r
-               }\r
-\r
-           }\r
-           \r
-           return result;\r
-           \r
-    }\r
-\r
-    \r
-    List<Map<String, Object>> doSearch(IProgressMonitor monitor, RequestProcessor processor, String search, int maxResultCount) throws ParseException, IOException,\r
-    DatabaseException {\r
-\r
-        // An empty search string will crash QueryParser\r
-        // Just return no results for empty queries.\r
-        //System.out.println("search: '" + search + "'");\r
-        if (search.isEmpty())\r
-            return Collections.emptyList();\r
-\r
-        assertAccessOpen(false);\r
-\r
-        Query query = Queries.parse(search, schema);\r
-\r
-        long start = System.nanoTime();\r
-\r
-        maxResultCount = Math.min(maxResultCount, searcher.getIndexReader().numDocs());\r
-        if (maxResultCount == 0)\r
-            return Collections.emptyList();\r
-        \r
-        final TopDocs docs = searcher.search(query, null, maxResultCount);\r
-        \r
-//        for(Object[] o : debugDocs(monitor)) {\r
-//            System.err.println("-" + Arrays.toString(o));\r
-//        }\r
-        \r
-        if (IndexPolicy.PERF_INDEX_QUERY) {\r
-            long end = System.nanoTime();\r
-            System.out.println(getDescriptor() + "search(" + search + ", " + maxResultCount + ") into index at " + indexPath + " took " + (1e-9 * (end-start)) + " seconds.");\r
-        }\r
-\r
-        if (docs.totalHits == 0) {\r
-            return Collections.emptyList();\r
-        }\r
-\r
-        return processor.syncRequest(new Read<List<Map<String, Object>>>() {\r
-\r
-            @Override\r
-            public List<Map<String, Object>> perform(ReadGraph graph) throws DatabaseException {\r
-\r
-                GenericRelation r = graph.adapt(relation, GenericRelation.class);\r
-                if (r == null)\r
-                    throw new DatabaseException("Given resource " + graph.syncRequest(new SafeName(relation))\r
-                            + "could not be adapted to GenericRelation.");\r
-\r
-                SerialisationSupport support = graph.getService(SerialisationSupport.class);\r
-\r
-                List<Map<String, Object>> result = new ArrayList<Map<String, Object>>(docs.scoreDocs.length);\r
-                \r
-                final DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor();\r
-                \r
-                for (ScoreDoc scoreDoc : docs.scoreDocs) {\r
-\r
-                    try {\r
-\r
-                        reader.document(scoreDoc.doc, visitor);\r
-                        \r
-                        Document doc = visitor.getDocument();\r
-\r
-                        List<IndexableField> fs = doc.getFields();\r
-                        Map<String, Object> entry = new THashMap<String, Object>(fs.size());\r
-                        for (IndexableField f : fs) {\r
-                            IndexSchema.Type type = schema.typeMap.get(f.name());\r
-                            if (type == IndexSchema.Type.LONG) {\r
-                                entry.put(f.name(), support.getResource(f.numericValue().longValue()));\r
-                            } else {\r
-                                entry.put(f.name(), f.stringValue());\r
-                            }\r
-                        }\r
-                        \r
-                        result.add(entry);\r
-\r
-                    } catch (CorruptIndexException e) {\r
-                        throw new DatabaseException(e);\r
-                    } catch (IOException e) {\r
-                        throw new DatabaseException(e);\r
-                    }\r
-\r
-                }\r
-\r
-                return result;\r
-\r
-            }\r
-        });\r
-    }\r
-\r
-    static class ResourceVisitor extends StoredFieldVisitor {\r
-       \r
-       public long id;\r
-\r
-               @Override\r
-               public Status needsField(FieldInfo fieldInfo) throws IOException {\r
-                       if("Resource".equals(fieldInfo.name)) return Status.YES;\r
-                       return Status.NO;\r
-               }\r
-               \r
-               @Override\r
-               public void longField(FieldInfo fieldInfo, long value) throws IOException {\r
-                       id = value;\r
-               }\r
-       \r
-    };\r
-    \r
-    static class DumpVisitor extends StoredFieldVisitor {\r
-\r
-       public List<Object> values;\r
-       \r
-       DumpVisitor(List<Object> values) {\r
-               this.values = values;\r
-       }\r
-\r
-               @Override\r
-               public Status needsField(FieldInfo fieldInfo) throws IOException {\r
-                       return Status.YES;\r
-               }\r
-               \r
-               @Override\r
-               public void longField(FieldInfo fieldInfo, long value) throws IOException {\r
-                       values.add(value);\r
-               }\r
-               \r
-               @Override\r
-               public void stringField(FieldInfo fieldInfo, String value) throws IOException {\r
-                       values.add(value);\r
-               }\r
-\r
-    }\r
-\r
-    List<Resource> doSearchResources(IProgressMonitor monitor, RequestProcessor processor, String search, int maxResultCount) throws ParseException, IOException,\r
-    DatabaseException {\r
-\r
-        // An empty search string will crash QueryParser\r
-        // Just return no results for empty queries.\r
-        //System.out.println("search: '" + search + "'");\r
-        if (search.isEmpty())\r
-            return Collections.emptyList();\r
-\r
-        assertAccessOpen(false);\r
-\r
-        Query query = Queries.parse(search, schema);\r
-\r
-        long start = System.nanoTime();\r
-\r
-        maxResultCount = Math.min(maxResultCount, searcher.getIndexReader().numDocs());\r
-        if (maxResultCount == 0)\r
-            return Collections.emptyList();\r
-        \r
-        final TopDocs docs = searcher.search(query, null, maxResultCount);\r
-        \r
-//        for(Object[] o : debugDocs(monitor)) {\r
-//            System.err.println("-" + Arrays.toString(o));\r
-//        }\r
-        \r
-        if (IndexPolicy.PERF_INDEX_QUERY) {\r
-            long end = System.nanoTime();\r
-            System.out.println(getDescriptor() + "search(" + search + ", " + maxResultCount + ") into index at " + indexPath + " took " + (1e-9 * (end-start)) + " seconds.");\r
-        }\r
-\r
-        if (docs.totalHits == 0) {\r
-            return Collections.emptyList();\r
-        }\r
-        \r
-        return processor.syncRequest(new Read<List<Resource>>() {\r
-\r
-            @Override\r
-            public List<Resource> perform(ReadGraph graph) throws DatabaseException {\r
-\r
-               CollectionSupport cs = graph.getService(CollectionSupport.class);\r
-                SerialisationSupport support = graph.getService(SerialisationSupport.class);\r
-                \r
-               List<Resource> result = cs.createList();\r
-                \r
-               ResourceVisitor visitor = new ResourceVisitor();\r
-                \r
-                for (ScoreDoc scoreDoc : docs.scoreDocs) {\r
-\r
-                    try {\r
-                       \r
-                       reader.document(scoreDoc.doc, visitor);\r
-                       result.add(support.getResource(visitor.id));\r
-\r
-                    } catch (CorruptIndexException e) {\r
-                        throw new DatabaseException(e);\r
-                    } catch (IOException e) {\r
-                        throw new DatabaseException(e);\r
-                    }\r
-\r
-                }\r
-\r
-                return result;\r
-\r
-            }\r
-        });\r
-    }\r
-\r
-    List<Object> doList(IProgressMonitor monitor, RequestProcessor processor) throws ParseException, IOException,\r
-    DatabaseException {\r
-\r
-        assertAccessOpen(false);\r
-\r
-        Query query = new MatchAllDocsQuery(); \r
-\r
-        final TopDocs docs = searcher.search(query, Integer.MAX_VALUE);\r
-        \r
-        ArrayList<Object> result = new ArrayList<Object>();\r
-        \r
-        DumpVisitor visitor = new DumpVisitor(result);\r
-                \r
-        for (ScoreDoc scoreDoc : docs.scoreDocs) {\r
-\r
-               try {\r
-\r
-                       reader.document(scoreDoc.doc, visitor);\r
-\r
-               } catch (CorruptIndexException e) {\r
-                       throw new DatabaseException(e);\r
-               } catch (IOException e) {\r
-                       throw new DatabaseException(e);\r
-               }\r
-\r
-        }\r
-\r
-        return result;\r
-\r
-    }\r
-    \r
-    protected static File getIndexDirectory(Session session, Resource relation, Resource input) {\r
-        File path = DatabaseIndexing.getIndexLocation(session, relation, input);\r
-//        System.out.println("getIndexDirectory = " + path);\r
-        return path;\r
-    }\r
-\r
-    private static void createDirectory(File path) throws IOException {\r
-        if (path.exists() && !path.isDirectory())\r
-            throw new IOException("Could not create index directory " + path + ", a file by that name already exists");\r
-        path.mkdirs();\r
-        if (!path.exists())\r
-            throw new IOException("Could not create index directory " + path + " for an unknown reason");\r
-        if (!path.isDirectory())\r
-            throw new IOException("Could not create index directory " + path + ", a file by that name already exists");\r
-    }\r
-\r
-    File getIndexPath() {\r
-        return indexPath;\r
-    }\r
-\r
-    boolean isIndexAvailable() {\r
-        return (indexPath.exists() && indexPath.isDirectory());\r
-    }\r
-    \r
-    Throwable bestEffortClear(IProgressMonitor monitor, Session session) {\r
-       return null;\r
-    }\r
-\r
-    /*\r
-     * Start from scratch. Clear all caches and rebuild the index. \r
-     */\r
-    Throwable clearDirectory(IProgressMonitor monitor, Session session) {\r
-        \r
-               File file = getIndexPath();\r
-\r
-       try {\r
-\r
-               for(int i=0;i<15;i++) {\r
-                       FileUtils.deleteDir(file);\r
-                       if(!file.exists()) {\r
-                               return null;\r
-                       }\r
-                       try {\r
-                               Thread.sleep(i*100);\r
-                       } catch (InterruptedException e) {\r
-                       }\r
-               }\r
-\r
-       } catch (Throwable t) {\r
-\r
-               return t;\r
-               \r
-       }\r
-\r
-       return new IllegalStateException("Failed to delete directory " + file.getAbsolutePath());\r
-\r
-    }\r
-\r
-}\r
+/*******************************************************************************
+ * Copyright (c) 2007, 2015 Association for Decentralized Information Management
+ * in Industry THTH ry.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ *     VTT Technical Research Centre of Finland - initial API and implementation
+ *     Semantum Oy - Fix for simantics issue #6053
+ *******************************************************************************/
+package org.simantics.db.indexing;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.atomic.AtomicReference;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DocumentStoredFieldVisitor;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.LongField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.IndexNotFoundException;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.index.StoredFieldVisitor;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queryparser.classic.ParseException;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.util.Version;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.SubMonitor;
+import org.simantics.databoard.util.ObjectUtils;
+import org.simantics.db.ReadGraph;
+import org.simantics.db.RequestProcessor;
+import org.simantics.db.Resource;
+import org.simantics.db.Session;
+import org.simantics.db.common.request.SafeName;
+import org.simantics.db.common.utils.NameUtils;
+import org.simantics.db.exception.DatabaseException;
+import org.simantics.db.indexing.internal.IndexingJob;
+import org.simantics.db.layer0.adapter.GenericRelation;
+import org.simantics.db.layer0.genericrelation.IndexException;
+import org.simantics.db.request.Read;
+import org.simantics.db.service.CollectionSupport;
+import org.simantics.db.service.SerialisationSupport;
+import org.simantics.utils.FileUtils;
+import org.simantics.utils.datastructures.Pair;
+import org.simantics.utils.threads.ThreadUtils;
+import org.slf4j.Logger;
+
+import gnu.trove.map.hash.THashMap;
+
+/**
+ * @author Tuukka Lehtonen
+ * @author Antti Villberg
+ */
+abstract public class IndexedRelationsSearcherBase {
+
+    protected enum State {
+        // No index is available
+        NONE, 
+        // An index is available, but there is a problem with it
+        PROBLEM, 
+        // An index is available but no reader or writer is ready
+        READY,
+        // A reader is ready
+        READ, 
+        // A writer (and a reader) is ready
+        WRITE
+    }
+    
+    private State state = State.READY;
+    private Throwable exception;
+    
+    public Throwable getException() {
+       return exception;
+    }
+
+    public void setProblem(Throwable t) {
+        if (t != null)
+            getLogger().error("Setting problem for {} and previous state {}", this, this.state, t);
+       this.state = State.PROBLEM;
+       this.exception = t;
+    }
+    
+    public void setNone() {
+       this.state = State.NONE;
+    }
+    
+    public void setReady() {
+       this.state = State.READY;
+    }
+    
+    public State state() {
+       return state;
+    }
+    
+    protected boolean checkState(State state) {
+       return this.state == state;
+    }
+    
+    protected void assertState(State state) throws IndexException {
+        State s = this.state;
+        if (s != state)
+            throw new IndexException("Illegal index searcher state, expected " + state.name() + " but state was " + s.name());
+    }
+    
+    public void changeState(IProgressMonitor monitor, Session session, State state) {
+       changeState(monitor, session, state, 0);
+    }
+
+    protected void changeState(IProgressMonitor monitor, Session session, State state, int depth) {
+
+       if (this.state == state) {
+           if (getLogger().isDebugEnabled())
+               getLogger().debug("Trying to change state {} to the same as previous state {} in depth {} with {}", state, this.state, depth, this);
+           return;
+       }
+
+       if (IndexPolicy.TRACE_INDEX_MANAGEMENT)
+               System.err.println("Index state " + this.state.name() + " => " + state.name() + " " + this);
+
+       // Check transitions
+       
+       // Try to exit problem state
+       if (State.PROBLEM == this.state && depth > 0) {
+           getLogger().info("Try to exit problem state for {} and state {}", this, state);
+               Throwable t = bestEffortClear(monitor, session);
+               if(t != null) {
+                   getLogger().error("Best effort clear has failed for state {} and this {}", state, this, t);
+                               exception = t;
+                               return;
+               }
+               // Managed to get into initial state
+               this.state = State.NONE;
+               getLogger().info("Managed to get into initial state {}", this.state);
+               return;
+       }
+
+       // Cannot move into read from no index
+       if (State.NONE ==  this.state && State.READ == state) {
+           if (getLogger().isDebugEnabled())
+               getLogger().debug("Cannot move into read from no index in {} with state {}", this, state);
+           return;
+       }
+       // Cannot move into write from no index
+       if (State.NONE ==  this.state && State.WRITE == state) {
+           if (getLogger().isDebugEnabled())
+               getLogger().debug("Cannot move into write from no index in {} with state {}", this, state);
+           return;
+       }
+       
+               boolean success = false;
+
+       try {
+
+               if (searcher != null) {
+                       searcher = null;
+               }
+               if (reader != null) {
+                       reader.close();
+                       reader = null;
+               }
+                       closeWriter(writer);
+                       directory = null;
+                       
+                       success = true;
+
+               // Enter new state
+               if (State.READ == state || State.WRITE == state) {
+                       
+                       success = false;
+                       
+                       boolean forWriting = State.WRITE == state;
+
+                       if (directory != null)
+                               throw new IllegalStateException(getDescriptor() + "Index already loaded");
+
+                       SubMonitor mon = SubMonitor.convert(monitor, 100);
+
+                       mon.beginTask("Loading index", 100);
+
+                       if (IndexPolicy.TRACE_INDEX_LOAD)
+                    System.out.println(getDescriptor() + "Loading Lucene index from " + indexPath + " for " + (forWriting ? "writing" : "reading"));
+
+                long start = System.nanoTime();
+
+                directory = getDirectory(session);
+
+                if (forWriting) {
+                    // Never overwrite an index that is about to be loaded.
+                    // TODO: could use OpenMode.CREATE_OR_APPEND but must test first
+                    IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_4_9, Queries.getAnalyzer()).setOpenMode(OpenMode.APPEND);
+                    try {
+                        // FIXME: platform #4676
+                        writer = new IndexWriter(directory, config);
+                    } catch (IndexNotFoundException e) {
+                        // There was no pre-existing index on disk. Create it now.
+                        writer = new IndexWriter(directory, config.setOpenMode(OpenMode.CREATE));
+                        writer.commit();
+                    }
+                    reader = DirectoryReader.open(directory);
+                    searcher = new IndexSearcher(reader);
+                } else {
+                    reader = DirectoryReader.open(directory);
+                    searcher = new IndexSearcher(reader);
+                }
+
+                long end = System.nanoTime();
+
+                mon.worked(100);
+
+                if (IndexPolicy.PERF_INDEX_LOAD) {
+                    double time = (end - start) * 1e-6;
+                    System.out.println(getDescriptor() + "Loaded Lucene index from " + indexPath + " for " + (forWriting ? "writing" : "reading") + " in " + time + " ms");
+                }
+
+                success = true;
+                
+               }
+                
+       } catch (Throwable t) {
+               setProblem(t);
+       } finally {
+
+               if(!success) {
+                       this.state = State.PROBLEM;
+                       changeState(monitor, session, State.NONE, depth+1);
+                       return;
+               }
+
+       }
+
+       this.state = state;
+       
+    }
+
+    public static final FieldType STRING_TYPE = new FieldType();
+
+    static {
+      STRING_TYPE.setIndexed(true);
+      STRING_TYPE.setStored(true);
+      STRING_TYPE.setTokenized(true);
+      STRING_TYPE.freeze();
+    }
+
+    protected static Field makeField(String fieldName, String fieldClass) throws DatabaseException {
+        switch (fieldClass) {
+        case "Long":   return new LongField(fieldName, 0L, Field.Store.YES);
+        case "String": return new Field    (fieldName, "", STRING_TYPE);
+        case "Text":   return new TextField(fieldName, "", Field.Store.YES);
+        default:
+            throw new DatabaseException("Can only index Long, String and Text fields, encountered field type " + fieldClass);
+        }
+    }
+
+    protected static Field[] makeFieldsForRelation(GenericRelation r, int boundLength, Document document) throws DatabaseException {
+        Pair<String, String>[] fields = r.getFields();
+        Field[] fs = new Field[Math.max(0, fields.length - boundLength)];
+        for (int i = boundLength; i < fields.length; i++) {
+            Field f = makeField(fields[i].first, fields[i].second);
+            fs[i - boundLength] = f;
+            if (document != null)
+                document.add(f);
+        }
+        return fs;
+    }
+
+    void insertIndex(IProgressMonitor monitor, GenericRelation r, int boundLength, Collection<Object[]> documentsData)
+    throws CorruptIndexException, IOException, DatabaseException {
+        assertAccessOpen(true);
+
+        if (IndexPolicy.TRACE_INDEX_UPDATE)
+            System.out.println(getDescriptor() + "Inserting " + documentsData.size() + " documents into index at " + indexPath);
+
+        long start = 0, end = 0;
+        if (IndexPolicy.PERF_INDEX_UPDATE)
+            start = System.nanoTime();
+
+        try {
+            Document document = new Document();
+            Field[] fs = makeFieldsForRelation(r, boundLength, document);
+
+            for (Object[] documentData : documentsData) {
+                if (setFields(fs, documentData) == null)
+                    continue;
+
+                if (IndexPolicy.TRACE_INDEX_UPDATE)
+                    System.out.println(getDescriptor() + "Inserting document " + document);
+
+                writer.addDocument(document);
+            }
+
+            if (IndexPolicy.PERF_INDEX_UPDATE) {
+                end = System.nanoTime();
+                double ms = (end - start) * 1e-6;
+                System.out.println(getDescriptor() + "Inserted " + documentsData.size() + " documents into index at " + indexPath + " in " + ms + " ms");
+            }
+
+        } finally {
+        }
+    }
+
+    void removeIndex(IProgressMonitor monitor, GenericRelation r, RequestProcessor processor, String key, Collection<Object> keyValues) throws DatabaseException, CorruptIndexException, IOException {
+        assertAccessOpen(true);
+
+        if (IndexPolicy.TRACE_INDEX_UPDATE)
+            System.out.println(getDescriptor() + "Removing " + keyValues.size() + " documents from index at " + indexPath);
+
+        long start = 0, end = 0;
+        if (IndexPolicy.PERF_INDEX_UPDATE)
+            start = System.nanoTime();
+
+        try {
+            for (Object keyValue : keyValues) {
+                Term removedTerm = null;
+                if (keyValue instanceof Long) {
+                    removedTerm = IndexUtils.longTerm(key, (Long) keyValue);
+                } else if (keyValue instanceof String) {
+                    removedTerm = new Term(key, (String) keyValue);
+                } else {
+                    // FIXME: should throw an exception for illegal input data but this would leave the index in an incoherent state
+                    continue;
+                }
+
+                if (IndexPolicy.TRACE_INDEX_UPDATE)
+                    System.out.println(getDescriptor() + "Removing document with key " + removedTerm);
+                writer.deleteDocuments(removedTerm);
+            }
+
+            if (IndexPolicy.PERF_INDEX_UPDATE) {
+                end = System.nanoTime();
+                double ms = (end - start) * 1e-6;
+                System.out.println(getDescriptor() + "Removed " + keyValues.size() + " documents from index at " + indexPath + " in " + ms + " ms");
+            }
+
+        } finally {
+        }
+    }
+
+    void removeIndex(IProgressMonitor monitor) throws DatabaseException, CorruptIndexException, IOException {
+        assertAccessOpen(true);
+
+        long start = 0, end = 0;
+        if (IndexPolicy.PERF_INDEX_UPDATE)
+            start = System.nanoTime();
+
+        try {
+
+            writer.deleteAll();
+
+            if (IndexPolicy.PERF_INDEX_UPDATE) {
+                end = System.nanoTime();
+                double ms = (end - start) * 1e-6;
+                System.out.println(getDescriptor() + "Removed all documents from index at " + indexPath + " in " + ms + " ms");
+            }
+
+        } finally {
+        }
+    }
+    
+    boolean replaceIndex(IProgressMonitor monitor, String key, Collection<Object> keyValues, GenericRelation r, int boundLength, Collection<Object[]> documentsData) throws CorruptIndexException, IOException, DatabaseException {
+
+       boolean didReplace = false;
+       
+       assertAccessOpen(true);
+        if (keyValues.size() != documentsData.size())
+            throw new IllegalArgumentException("keyValues size does not match documents data size, " + keyValues.size() + " <> " + documentsData.size());
+
+        if (IndexPolicy.TRACE_INDEX_UPDATE)
+            System.out.println(getDescriptor() + "Replacing " + keyValues.size() + " documents from index at " + indexPath);
+
+        long start = 0, end = 0;
+        if (IndexPolicy.PERF_INDEX_UPDATE)
+            start = System.nanoTime();
+
+        try {
+            Iterator<Object> keyIt = keyValues.iterator();
+            Iterator<Object[]> documentDataIt = documentsData.iterator();
+
+            Document document = new Document();
+            Field[] fs = makeFieldsForRelation(r, boundLength, document);
+
+            nextDocument:
+                while (keyIt.hasNext()) {
+                    Object keyValue = keyIt.next();
+                    Object[] documentData = documentDataIt.next();
+
+                    Term removedTerm = null;
+                    if (keyValue instanceof Long) {
+                        removedTerm = IndexUtils.longTerm(key, (Long) keyValue);
+                    } else if (keyValue instanceof String) {
+                        removedTerm = new Term(key, (String) keyValue);
+                    } else {
+                        // FIXME: should throw an exception for illegal input data but this would leave the index in an incoherent state
+                        System.err.println("[" + getClass().getSimpleName() + "] Unrecognized document key to remove '" + keyValue + "', only " + String.class + " and " + Resource.class + " are supported.");
+                        continue nextDocument;
+                    }
+
+                    if (setFields(fs, documentData) == null)
+                        continue nextDocument;
+
+                    if (IndexPolicy.TRACE_INDEX_UPDATE)
+                        System.out.println(getDescriptor() + "Replacing document with key " + removedTerm + " with " + document);
+
+                    boolean done = false;
+                    if(requireChangeInfoOnReplace()) {
+                           TopDocs exist = searcher.search(new TermQuery(removedTerm), null, 2);
+                           if(exist.scoreDocs.length == 1 && requireChangeInfoOnReplace()) {
+                               Document doc = reader.document(exist.scoreDocs[0].doc);
+                               if(!areSame(doc, document)) {
+                                   writer.deleteDocuments(removedTerm);
+                                   writer.addDocument(document);
+                                   didReplace |= true;
+                                   if (IndexPolicy.TRACE_INDEX_UPDATE)
+                                       System.out.println("-replaced single existing");
+                               } else {
+                                   if (IndexPolicy.TRACE_INDEX_UPDATE)
+                                       System.out.println("-was actually same than single existing");
+                               }
+                               done = true;
+                           } 
+                    }
+                    if(!done) {
+                       writer.deleteDocuments(removedTerm);
+                       writer.addDocument(document);
+                       didReplace |= true;
+                       if (IndexPolicy.TRACE_INDEX_UPDATE)
+                               System.out.println("-had many or none - removed all existing");
+                    }
+                    
+                }
+
+            if (IndexPolicy.PERF_INDEX_UPDATE) {
+                end = System.nanoTime();
+                double ms = (end - start) * 1e-6;
+                System.out.println(getDescriptor() + "Replaced " + keyValues.size() + " documents from index at " + indexPath + " in " + ms + " ms");
+            }
+
+        } finally {
+        }
+        
+        return didReplace;
+        
+    }
+    
+    protected boolean requireChangeInfoOnReplace() {
+       return true;
+    }
+    
+    private boolean areSame(Document d1, Document d2) {
+       List<IndexableField> fs1 = d1.getFields();
+       List<IndexableField> fs2 = d2.getFields();
+       if(fs1.size() != fs2.size()) return false;
+       for(int i=0;i<fs1.size();i++) {
+               IndexableField f1 = fs1.get(i);
+               IndexableField f2 = fs2.get(i);
+               String s1 = f1.stringValue();
+               String s2 = f2.stringValue();
+            if (IndexPolicy.TRACE_INDEX_UPDATE)
+               System.err.println("areSame " + s1 + " vs. " + s2 );
+               if(!ObjectUtils.objectEquals(s1,s2)) return false;
+       }
+       return true;
+    }
+
+    final RequestProcessor session;
+
+    final Resource         relation;
+
+    /**
+     * The schema of the index, i.e. the fields that will be indexed per
+     * document for the specified relation. Since the relation stays the same
+     * throughout the lifetime of this class, the index schema is also assumed
+     * to the same. This means that {@link GenericRelation#getFields()} is
+     * assumed to stay the same.
+     */
+    final IndexSchema      schema;
+
+    Resource         input;
+
+    Path             indexPath;
+
+    Directory        directory;
+
+    IndexReader      reader;
+
+    IndexWriter      writer;
+
+    IndexSearcher    searcher;
+
+    IndexedRelationsSearcherBase(RequestProcessor session, Resource relation, Resource input) {
+        this.session = session;
+        this.relation = relation;
+        this.input = input;
+        this.indexPath = getIndexDirectory(session.getSession(), relation, input);
+        if(isIndexAvailable()) {
+               state = State.READY;
+        } else {
+               state = State.NONE;
+        }
+        this.schema = IndexSchema.readFromRelation(session, relation);
+    }
+
+    Directory getDirectory(Session session) throws IOException {
+        return FSDirectory.open(indexPath.toFile());
+    }
+
+    abstract String getDescriptor();
+    
+    /**
+     * Ensures that searcher is in read or write state.
+     * 
+     * @param forWriting <code>true</code> to open index for writing,
+     *        <code>false</code> for reading
+     * @return true is required state was reached       
+     *        
+     */
+    boolean startAccess(IProgressMonitor monitor, Session session, boolean forWriting) {
+       if(forWriting) {
+               changeState(monitor, session, State.WRITE);
+               return checkState(State.WRITE);
+       } else {
+               changeState(monitor, session, State.READ);
+               return checkState(State.READ);
+       }
+    }
+
+    boolean hasAccess(boolean forWriting) {
+       
+        if (forWriting)
+               return checkState(State.WRITE); 
+        else
+               return checkState(State.WRITE) || checkState(State.READ);
+       
+    }
+    
+    void assertAccessOpen(boolean forWriting) {
+        if (forWriting)
+               if(!checkState(State.WRITE)) 
+                throw new IllegalStateException("index not opened for writing (directory=" + directory + ", reader=" + reader + ")");
+        else
+               if(!(checkState(State.WRITE) || checkState(State.READ))) 
+                throw new IllegalStateException("index not opened for reading (directory=" + directory + ", writer=" + writer + ")");
+    }
+    
+    void closeWriter(IndexWriter writer) throws CorruptIndexException, IOException {
+        if (writer == null)
+            return;
+
+        try {
+            // May throw OOME, see IndexWriter javadoc for the correct actions.
+            writer.close(false);
+        } catch (OutOfMemoryError e) {
+            writer.close();
+            throw e;
+        }
+    }
+
+    public static String getPattern(GenericRelation relation, int boundCount) {
+        String result = "";
+        for (int i = 0; i < boundCount; i++)
+            result += "b";
+        for (int i = 0; i < relation.getFields().length - boundCount; i++)
+            result += "f";
+        return result;
+    }
+
+    void initializeIndex(IProgressMonitor monitor, ReadGraph graph, Object[] bound, boolean overwrite)
+            throws IOException, DatabaseException
+    {
+        IndexingJob.jobifyIfPossible(
+                monitor,
+                "Reindexing " + NameUtils.getSafeLabel(graph, input),
+                mon -> {
+                    try {
+                        GenericRelation r = graph.adapt(relation, GenericRelation.class);
+                        if (r == null)
+                            throw new DatabaseException("Given resource " + relation + "could not be adapted to GenericRelation.");
+
+                        GenericRelation selection = r.select(getPattern(r, bound.length), bound);
+
+                        List<Object[]> results = selection.realize(graph);
+                        initializeIndexImpl(new CompletableFuture<>(), mon, r, results, bound, overwrite);
+                    } catch (IOException e) {
+                        getLogger().error("Index is in problematic state! {}", this, e);
+                        throw new DatabaseException(e);
+                    }
+                });
+    }
+
+    private static final int INDEXING_THREAD_COUNT = 2; // this is quite good parallelism level for lucene
+
+    void initializeIndexImpl(CompletableFuture<?> result, IProgressMonitor monitor, GenericRelation r, List<Object[]> results, final Object[] bound, boolean overwrite) throws IOException {
+        try {
+            final SubMonitor mon = SubMonitor.convert(monitor, 100);
+    
+            if (IndexPolicy.TRACE_INDEX_INIT)
+                System.out.println(getDescriptor() + "Initializing index at " + indexPath + " (overwrite = " + overwrite + ")");
+            mon.beginTask("Initializing Index", 100);
+    
+            if (overwrite) {
+                if (Files.exists(indexPath)) {
+                    mon.subTask("Erasing previous index");
+                    if (getLogger().isDebugEnabled())
+                        getLogger().debug("Erasing previous index {}", indexPath.toAbsolutePath());
+                    FileUtils.emptyDirectory(indexPath);
+                }
+            }
+    
+            final AtomicReference<FSDirectory> directory = new AtomicReference<FSDirectory>();
+            final AtomicReference<IndexWriter> writer = new AtomicReference<IndexWriter>();
+    
+            try {
+                mon.subTask("Start index write");
+                Files.createDirectories(indexPath);
+    
+                directory.set(FSDirectory.open(indexPath.toFile()));
+                IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_4_9, Queries.getAnalyzer()).setOpenMode(OpenMode.CREATE);
+                writer.set(new IndexWriter(directory.get(), conf));
+    
+                mon.worked(5);
+    
+                long realizeStart = 0;
+                if (IndexPolicy.PERF_INDEX_INIT)
+                    realizeStart = System.nanoTime();
+    
+                mon.subTask("Calculating indexed content");
+                mon.worked(5);
+                
+                mon.worked(40);
+    
+                if (IndexPolicy.PERF_INDEX_INIT)
+                    System.out.println(getDescriptor() + "Realized index with " + results.size() + " entries at " + indexPath + " in " + (1e-9 * (System.nanoTime()-realizeStart)) + " seconds.");
+    
+                if (IndexPolicy.TRACE_INDEX_INIT)
+                    System.out.println(getDescriptor() + "Indexed relation " + r + " produced " + results.size() + " results");
+                
+                long start = IndexPolicy.PERF_INDEX_INIT ? System.nanoTime() : 0;
+    
+                mon.subTask("Indexing content");
+                final Semaphore s = new Semaphore(0);
+                mon.setWorkRemaining(results.size());
+                for (int i = 0; i < INDEXING_THREAD_COUNT; i++) {
+                    final int startIndex = i;
+                    ThreadUtils.getBlockingWorkExecutor().submit(() -> {
+                        try {
+                            Document document = new Document();
+                            Field[] fs = makeFieldsForRelation(r, bound.length, document);
+    
+                            for (int index = startIndex; index < results.size(); index += INDEXING_THREAD_COUNT) {
+                                if (setFields(fs, results.get(index)) == null)
+                                    continue;
+                                try {
+                                    writer.get().addDocument(document);
+                                } catch (CorruptIndexException e) {
+                                    getLogger().error("Index is corrupted! {}", this, e);
+                                    throw new IllegalStateException(e);
+                                } catch (IOException e) {
+                                    getLogger().error("Index is in problematic state! {}", this, e);
+                                    throw new IllegalStateException(e);
+                                } finally {
+                                    synchronized (mon) {
+                                        mon.worked(1);
+                                    }
+                                }
+                            }
+                        } catch (DatabaseException e) {
+                            throw new IllegalStateException(e);
+                        } finally {
+                            s.release();
+                        }
+                    });
+                }
+    
+                try {
+                    s.acquire(INDEXING_THREAD_COUNT);
+                } catch (InterruptedException e) {
+                    getLogger().error("Could not initialize index {}", this, e);
+                }
+    
+                // http://www.gossamer-threads.com/lists/lucene/java-dev/47895
+                // and http://lucene.apache.org/java/docs/index.html#27+November+2011+-+Lucene+Core+3.5.0
+                // advise against calling optimize at all. So let's not do it anymore.
+                //writer.get().optimize();
+                //writer.get().commit();
+    
+                mon.subTask("Flushing");
+    
+                if (IndexPolicy.PERF_INDEX_INIT)
+                    System.out.println(getDescriptor() + "Wrote index at " + indexPath + " in " + (1e-9 * (System.nanoTime()-start)) + " seconds.");
+                
+                result.complete(null);
+    //        } catch (DatabaseException e) {
+    //            getLogger().error("Could not initialize index due to db {}", this, e);
+            } finally {
+                try {
+                    closeWriter(writer.getAndSet(null));
+                } finally {
+                    FileUtils.uncheckedClose(directory.getAndSet(null));
+                }
+            }
+        } catch (Throwable t) {
+            getLogger().error("Could not initialize index", t);
+            result.completeExceptionally(t);
+        }
+    }
+
+    
+    public List<Object[]> debugDocs(IProgressMonitor monitor) throws ParseException, IOException, DatabaseException {
+    
+           Query query = new MatchAllDocsQuery(); 
+       
+           TopDocs td = searcher.search(query, Integer.MAX_VALUE);
+    
+           ScoreDoc[ ] scoreDocs = td.scoreDocs; 
+           List<Object[]> result = new ArrayList<Object[]>(scoreDocs.length);
+       
+           for(ScoreDoc scoreDoc:scoreDocs) {
+       
+               try {
+       
+                   Document doc = reader.document(scoreDoc.doc);
+                   List<IndexableField> fs = doc.getFields();
+                   Object[] o = new Object[fs.size()];
+                   int index = 0; 
+                   for (IndexableField f : fs) {
+                    o[index++] = f.stringValue();
+                   }
+                   result.add(o);
+       
+            } catch (CorruptIndexException e) {
+                getLogger().error("Index is corrupted! {}", this, e);
+                throw new DatabaseException(e);
+            } catch (IOException e) {
+                getLogger().error("Index is in problematic state! {}", this, e);
+                throw new DatabaseException(e);
+            }
+
+           }
+           
+           return result;
+           
+    }
+
+    
+    List<Map<String, Object>> doSearch(IProgressMonitor monitor, RequestProcessor processor, String search, int maxResultCount) throws ParseException, IOException,
+    DatabaseException {
+
+        // An empty search string will crash QueryParser
+        // Just return no results for empty queries.
+        //System.out.println("search: '" + search + "'");
+        if (search.isEmpty())
+            return Collections.emptyList();
+
+        assertAccessOpen(false);
+
+        Query query = Queries.parse(search, schema);
+
+        long start = System.nanoTime();
+
+        maxResultCount = Math.min(maxResultCount, searcher.getIndexReader().numDocs());
+        if (maxResultCount == 0)
+            return Collections.emptyList();
+        
+        final TopDocs docs = searcher.search(query, null, maxResultCount);
+        
+//        for(Object[] o : debugDocs(monitor)) {
+//            System.err.println("-" + Arrays.toString(o));
+//        }
+        
+        if (IndexPolicy.PERF_INDEX_QUERY) {
+            long end = System.nanoTime();
+            System.out.println(getDescriptor() + "search(" + search + ", " + maxResultCount + ") into index at " + indexPath + " took " + (1e-9 * (end-start)) + " seconds.");
+        }
+
+        if (docs.totalHits == 0) {
+            return Collections.emptyList();
+        }
+
+        return processor.syncRequest(new Read<List<Map<String, Object>>>() {
+
+            @Override
+            public List<Map<String, Object>> perform(ReadGraph graph) throws DatabaseException {
+
+                GenericRelation r = graph.adapt(relation, GenericRelation.class);
+                if (r == null)
+                    throw new DatabaseException("Given resource " + graph.syncRequest(new SafeName(relation))
+                            + "could not be adapted to GenericRelation.");
+
+                SerialisationSupport support = graph.getService(SerialisationSupport.class);
+
+                List<Map<String, Object>> result = new ArrayList<Map<String, Object>>(docs.scoreDocs.length);
+                
+                final DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor();
+                
+                for (ScoreDoc scoreDoc : docs.scoreDocs) {
+
+                    try {
+
+                        reader.document(scoreDoc.doc, visitor);
+                        
+                        Document doc = visitor.getDocument();
+
+                        List<IndexableField> fs = doc.getFields();
+                        Map<String, Object> entry = new THashMap<String, Object>(fs.size());
+                        for (IndexableField f : fs) {
+                            IndexSchema.Type type = schema.typeMap.get(f.name());
+                            if (type == IndexSchema.Type.LONG) {
+                                entry.put(f.name(), support.getResource(f.numericValue().longValue()));
+                            } else {
+                                entry.put(f.name(), f.stringValue());
+                            }
+                        }
+                        
+                        result.add(entry);
+
+                    } catch (CorruptIndexException e) {
+                        getLogger().error("Index is corrupted! {}", this, e);
+                        throw new DatabaseException(e);
+                    } catch (IOException e) {
+                        getLogger().error("Index is in problematic state! {}", this, e);
+                        throw new DatabaseException(e);
+                    }
+                }
+                return result;
+            }
+        });
+    }
+
+    static class ResourceVisitor extends StoredFieldVisitor {
+       
+       public long id;
+
+               @Override
+               public Status needsField(FieldInfo fieldInfo) throws IOException {
+                       if("Resource".equals(fieldInfo.name)) return Status.YES;
+                       return Status.NO;
+               }
+               
+               @Override
+               public void longField(FieldInfo fieldInfo, long value) throws IOException {
+                       id = value;
+               }
+       
+    };
+    
+    static class DumpVisitor extends StoredFieldVisitor {
+
+       public List<Object> values;
+       
+       DumpVisitor(List<Object> values) {
+               this.values = values;
+       }
+
+               @Override
+               public Status needsField(FieldInfo fieldInfo) throws IOException {
+                       return Status.YES;
+               }
+               
+               @Override
+               public void longField(FieldInfo fieldInfo, long value) throws IOException {
+                       values.add(value);
+               }
+               
+               @Override
+               public void stringField(FieldInfo fieldInfo, String value) throws IOException {
+                       values.add(value);
+               }
+
+    }
+
+    List<Resource> doSearchResources(IProgressMonitor monitor, RequestProcessor processor, String search, int maxResultCount) throws ParseException, IOException,
+    DatabaseException {
+
+        // An empty search string will crash QueryParser
+        // Just return no results for empty queries.
+        //System.out.println("search: '" + search + "'");
+        if (search.isEmpty())
+            return Collections.emptyList();
+
+        assertAccessOpen(false);
+
+        Query query = Queries.parse(search, schema);
+
+        long start = System.nanoTime();
+
+        maxResultCount = Math.min(maxResultCount, searcher.getIndexReader().numDocs());
+        if (maxResultCount == 0)
+            return Collections.emptyList();
+        
+        final TopDocs docs = searcher.search(query, null, maxResultCount);
+        
+//        for(Object[] o : debugDocs(monitor)) {
+//            System.err.println("-" + Arrays.toString(o));
+//        }
+        
+        if (IndexPolicy.PERF_INDEX_QUERY) {
+            long end = System.nanoTime();
+            System.out.println(getDescriptor() + "search(" + search + ", " + maxResultCount + ") into index at " + indexPath + " took " + (1e-9 * (end-start)) + " seconds.");
+        }
+
+        if (docs.totalHits == 0) {
+            return Collections.emptyList();
+        }
+        
+        return processor.syncRequest(new Read<List<Resource>>() {
+
+            @Override
+            public List<Resource> perform(ReadGraph graph) throws DatabaseException {
+
+               CollectionSupport cs = graph.getService(CollectionSupport.class);
+                SerialisationSupport support = graph.getService(SerialisationSupport.class);
+                
+               List<Resource> result = cs.createList();
+                
+               ResourceVisitor visitor = new ResourceVisitor();
+                
+                for (ScoreDoc scoreDoc : docs.scoreDocs) {
+                    try {
+                       reader.document(scoreDoc.doc, visitor);
+                       result.add(support.getResource(visitor.id));
+                    } catch (CorruptIndexException e) {
+                        getLogger().error("Index is corrupted! {}", this, e);
+                        throw new DatabaseException(e);
+                    } catch (IOException e) {
+                        getLogger().error("Index is in problematic state! {}", this, e);
+                        throw new DatabaseException(e);
+                    }
+                }
+                return result;
+            }
+        });
+    }
+
+    List<Object> doList(IProgressMonitor monitor, RequestProcessor processor) throws ParseException, IOException,
+    DatabaseException {
+
+        assertAccessOpen(false);
+
+        Query query = new MatchAllDocsQuery(); 
+
+        final TopDocs docs = searcher.search(query, Integer.MAX_VALUE);
+        
+        ArrayList<Object> result = new ArrayList<Object>();
+        
+        DumpVisitor visitor = new DumpVisitor(result);
+                
+        for (ScoreDoc scoreDoc : docs.scoreDocs) {
+
+               try {
+
+                       reader.document(scoreDoc.doc, visitor);
+
+               } catch (CorruptIndexException e) {
+                   getLogger().error("Index is corrupted! {}", this, e);
+                       throw new DatabaseException(e);
+               } catch (IOException e) {
+                   getLogger().error("Index is in problematic state! {}", this, e);
+                       throw new DatabaseException(e);
+               }
+
+        }
+
+        return result;
+
+    }
+    
+    protected static Path getIndexDirectory(Session session, Resource relation, Resource input) {
+        Path path = DatabaseIndexing.getIndexLocation(session, relation, input);
+//        System.out.println("getIndexDirectory = " + path);
+        return path;
+    }
+
+    Path getIndexPath() {
+        return indexPath;
+    }
+
+    boolean isIndexAvailable() {
+        return Files.isDirectory(indexPath);
+    }
+
+    abstract Throwable bestEffortClear(IProgressMonitor monitor, Session session);
+
+    /*
+     * Start from scratch. Clear all caches and rebuild the index. 
+     */
+    Throwable clearDirectory(IProgressMonitor monitor, Session session) {
+        
+               Path file = getIndexPath();
+
+       try {
+                       FileUtils.delete(file);
+       } catch (Throwable t) {
+               getLogger().error("Could not delete directory {}", file.toAbsolutePath(), t);
+               return t;
+       }
+       if (Files.exists(file))
+            return new IllegalStateException("Failed to delete directory " + file.toAbsolutePath());
+        return null;
+    }
+
+    private Field[] setFields(Field[] fs, Object[] result) {
+        for (int i = 0; i < result.length; i++) {
+            Object value = result[i];
+            if (value instanceof String) {
+                if (IndexPolicy.DEBUG_INDEX_INIT)
+                    System.out.println(getDescriptor() + "index " + fs[i].name() + " = " + value + " : String");
+                fs[i].setStringValue((String) value);
+            } else if (value instanceof Long) {
+                if (IndexPolicy.DEBUG_INDEX_INIT)
+                    System.out.println(getDescriptor() + "index " + fs[i].name() + " = " + value + " : Long");
+                fs[i].setLongValue((Long) value);
+            } else {
+                getLogger().error("Can only index Long and String fields, encountered " + value);
+                return null;
+            }
+        }
+        return fs;
+    }
+
+    protected abstract Logger getLogger();
+    
+    @Override
+    public String toString() {
+        return getClass().getSimpleName() + " [" + String.valueOf(schema) + ", " + String.valueOf(relation) + ", " + String.valueOf(input) + ", " + String.valueOf(indexPath) + ", " + String.valueOf(directory) + ", " + String.valueOf(state) + "]";
+    }
+}