diff --git a/editor.kit/nbproject/project.xml b/editor.kit/nbproject/project.xml --- a/editor.kit/nbproject/project.xml +++ b/editor.kit/nbproject/project.xml @@ -87,6 +87,12 @@ + org.netbeans.modules.parsing.lucene + + 1.0 + + + org.netbeans.modules.properties 1 diff --git a/java.source/nbproject/project.xml b/java.source/nbproject/project.xml --- a/java.source/nbproject/project.xml +++ b/java.source/nbproject/project.xml @@ -251,6 +251,14 @@ + org.netbeans.modules.parsing.lucene + + + + 1.0 + + + org.netbeans.modules.projectapi @@ -450,6 +458,11 @@ + org.netbeans.modules.parsing.lucene + + + + org.netbeans.modules.progress.ui diff --git a/java.source/src/org/netbeans/api/java/source/ClassIndex.java b/java.source/src/org/netbeans/api/java/source/ClassIndex.java --- a/java.source/src/org/netbeans/api/java/source/ClassIndex.java +++ b/java.source/src/org/netbeans/api/java/source/ClassIndex.java @@ -75,9 +75,10 @@ import org.netbeans.modules.java.source.usages.ClassIndexManagerEvent; import org.netbeans.modules.java.source.usages.ClassIndexManagerListener; import org.netbeans.modules.java.source.usages.DocumentUtil; -import org.netbeans.modules.java.source.usages.ResultConvertor; import org.netbeans.modules.parsing.impl.Utilities; import org.netbeans.modules.parsing.impl.indexing.PathRegistry; +import org.netbeans.modules.parsing.lucene.support.Convertor; +import org.netbeans.modules.parsing.lucene.support.Index; import org.netbeans.modules.parsing.spi.Parser.Result; import org.netbeans.modules.parsing.spi.ParserResultTask; import org.netbeans.modules.parsing.spi.Scheduler; @@ -294,13 +295,13 @@ final Iterable queries = this.getQueries (scope); final Set ut = encodeSearchKind(element.getKind(),searchKind); final String binaryName = element.getSignature()[0]; - final ResultConvertor> thConvertor = DocumentUtil.elementHandleConvertor(); + final Convertor> thConvertor = DocumentUtil.elementHandleConvertor(); try { if (!ut.isEmpty()) { for (ClassIndexImpl query : queries) { try { query.search(binaryName, ut, thConvertor, result); - } catch (ClassIndexImpl.IndexAlreadyClosedException e) { + } catch (Index.IndexClosedException e) { logClosedIndex (query); } catch (IOException e) { Exceptions.printStackTrace(e); @@ -333,10 +334,10 @@ try { if (!ut.isEmpty()) { for (ClassIndexImpl query : queries) { - final ResultConvertor foConvertor = DocumentUtil.fileObjectConvertor (query.getSourceRoots()); + final Convertor foConvertor = DocumentUtil.fileObjectConvertor (query.getSourceRoots()); try { query.search (binaryName, ut, foConvertor, result); - } catch (ClassIndexImpl.IndexAlreadyClosedException e) { + } catch (Index.IndexClosedException e) { logClosedIndex (query); } catch (IOException e) { Exceptions.printStackTrace(e); @@ -365,12 +366,12 @@ assert kind != null; final Set> result = new HashSet>(); final Iterable queries = this.getQueries (scope); - final ResultConvertor> thConvertor = DocumentUtil.elementHandleConvertor(); + final Convertor> thConvertor = DocumentUtil.elementHandleConvertor(); try { for (ClassIndexImpl query : queries) { try { query.getDeclaredTypes (name, kind, thConvertor, result); - } catch (ClassIndexImpl.IndexAlreadyClosedException e) { + } catch (Index.IndexClosedException e) { logClosedIndex (query); } catch (IOException e) { Exceptions.printStackTrace(e); @@ -401,7 +402,7 @@ for (ClassIndexImpl query : queries) { try { query.getPackageNames (prefix, directOnly, result); - } catch (ClassIndexImpl.IndexAlreadyClosedException e) { + } catch (Index.IndexClosedException e) { logClosedIndex (query); } catch (IOException e) { Exceptions.printStackTrace(e); diff --git a/java.source/src/org/netbeans/api/java/source/SourceUtils.java b/java.source/src/org/netbeans/api/java/source/SourceUtils.java --- a/java.source/src/org/netbeans/api/java/source/SourceUtils.java +++ b/java.source/src/org/netbeans/api/java/source/SourceUtils.java @@ -110,6 +110,7 @@ import org.netbeans.modules.parsing.api.UserTask; import org.netbeans.modules.parsing.api.indexing.IndexingManager; import org.netbeans.modules.parsing.impl.indexing.friendapi.IndexingController; +import org.netbeans.modules.parsing.lucene.support.IndexManager; import org.netbeans.spi.java.classpath.support.ClassPathSupport; import org.openide.filesystems.FileObject; @@ -511,7 +512,7 @@ private static FileObject findSource (final String binaryName, final FileObject... fos) throws IOException { final ClassIndexManager cim = ClassIndexManager.getDefault(); try { - return cim.readLock(new ClassIndexManager.ExceptionAction() { + return IndexManager.readAccess(new IndexManager.Action() { public FileObject run() throws IOException, InterruptedException { for (FileObject fo : fos) { diff --git a/java.source/src/org/netbeans/api/java/source/TreePathHandle.java b/java.source/src/org/netbeans/api/java/source/TreePathHandle.java --- a/java.source/src/org/netbeans/api/java/source/TreePathHandle.java +++ b/java.source/src/org/netbeans/api/java/source/TreePathHandle.java @@ -50,17 +50,13 @@ import com.sun.source.util.TreeScanner; import com.sun.tools.javac.code.Symbol; import com.sun.tools.javac.tree.JCTree; -import java.io.File; import java.io.IOException; -import java.lang.reflect.Field; -import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedList; import java.util.List; -import java.util.Map; import java.util.StringTokenizer; import java.util.logging.Logger; import java.util.logging.Level; @@ -72,7 +68,6 @@ import org.netbeans.api.annotations.common.CheckForNull; import org.netbeans.api.annotations.common.NonNull; import org.netbeans.modules.java.source.parsing.FileObjects; -import org.netbeans.modules.java.source.usages.Index; import org.openide.filesystems.FileObject; import org.openide.filesystems.FileStateInvalidException; import org.openide.filesystems.FileUtil; diff --git a/java.source/src/org/netbeans/modules/java/source/JBrowseModule.java b/java.source/src/org/netbeans/modules/java/source/JBrowseModule.java --- a/java.source/src/org/netbeans/modules/java/source/JBrowseModule.java +++ b/java.source/src/org/netbeans/modules/java/source/JBrowseModule.java @@ -56,10 +56,9 @@ import javax.management.NotCompliantMBeanException; import javax.management.ObjectName; import org.netbeans.modules.java.source.usages.ClassIndexManager; -import org.netbeans.modules.java.source.usages.LuceneIndexMBean; -import org.netbeans.modules.java.source.usages.LuceneIndexMBeanImpl; import org.netbeans.modules.java.source.util.LowMemoryNotifierMBean; import org.netbeans.modules.java.source.util.LowMemoryNotifierMBeanImpl; +import org.netbeans.modules.parsing.lucene.support.IndexManager; import org.openide.modules.ModuleInstall; import org.openide.util.Exceptions; @@ -94,8 +93,9 @@ public @Override void close () { super.close(); try { - ClassIndexManager.getDefault().takeWriteLock(new ClassIndexManager.ExceptionAction() { - public @Override Void run() throws IOException { + IndexManager.writeAccess(new IndexManager.Action() { + @Override + public Void run() throws IOException { ClassIndexManager.getDefault().close(); return null; } @@ -115,7 +115,6 @@ try { MBeanServer mgs = ManagementFactory.getPlatformMBeanServer(); mgs.registerMBean (new LowMemoryNotifierMBeanImpl(), new ObjectName (LowMemoryNotifierMBean.OBJECT_NAME)); - mgs.registerMBean( LuceneIndexMBeanImpl.getDefault(), new ObjectName (LuceneIndexMBean.OBJECT_NAME)); } catch (NotCompliantMBeanException e) { if (log.isLoggable(Level.SEVERE)) log.log(Level.SEVERE, e.getMessage(), e); @@ -138,7 +137,6 @@ try { MBeanServer mgs = ManagementFactory.getPlatformMBeanServer(); mgs.unregisterMBean (new ObjectName (LowMemoryNotifierMBean.OBJECT_NAME)); - mgs.unregisterMBean (new ObjectName (LuceneIndexMBean.OBJECT_NAME)); } catch (MalformedObjectNameException e) { if (log.isLoggable(Level.SEVERE)) log.log(Level.SEVERE, e.getMessage(), e); diff --git a/java.source/src/org/netbeans/modules/java/source/indexing/JavaBinaryIndexer.java b/java.source/src/org/netbeans/modules/java/source/indexing/JavaBinaryIndexer.java --- a/java.source/src/org/netbeans/modules/java/source/indexing/JavaBinaryIndexer.java +++ b/java.source/src/org/netbeans/modules/java/source/indexing/JavaBinaryIndexer.java @@ -59,6 +59,7 @@ import org.netbeans.modules.java.source.usages.ClassIndexImpl; import org.netbeans.modules.java.source.usages.ClassIndexManager; import org.netbeans.modules.parsing.impl.indexing.friendapi.IndexingController; +import org.netbeans.modules.parsing.lucene.support.IndexManager; import org.netbeans.modules.parsing.spi.indexing.BinaryIndexer; import org.netbeans.modules.parsing.spi.indexing.BinaryIndexerFactory; import org.netbeans.modules.parsing.spi.indexing.Context; @@ -77,7 +78,8 @@ LOG.log(Level.FINE, "index({0})", context.getRootURI()); try { final ClassIndexManager cim = ClassIndexManager.getDefault(); - cim.prepareWriteLock(new ClassIndexManager.ExceptionAction() { + cim.prepareWriteLock(new IndexManager.Action() { + @Override public Void run() throws IOException, InterruptedException { CachingArchiveProvider.getDefault().clearArchive(context.getRootURI()); File cacheFolder = JavaIndex.getClassFolder(context.getRootURI()); @@ -145,7 +147,8 @@ assert removedRoots != null; final ClassIndexManager cim = ClassIndexManager.getDefault(); try { - cim.prepareWriteLock(new ClassIndexManager.ExceptionAction() { + cim.prepareWriteLock(new IndexManager.Action() { + @Override public Void run() throws IOException, InterruptedException { //todo: for (URL removedRoot : removedRoots) { @@ -164,9 +167,11 @@ @Override public boolean scanStarted(final Context context) { try { - return ClassIndexManager.getDefault().prepareWriteLock(new ClassIndexManager.ExceptionAction() { + return ClassIndexManager.getDefault().prepareWriteLock(new IndexManager.Action() { + @Override public Boolean run() throws IOException, InterruptedException { - return ClassIndexManager.getDefault().takeWriteLock(new ClassIndexManager.ExceptionAction() { + return IndexManager.writeAccess(new IndexManager.Action() { + @Override public Boolean run() throws IOException, InterruptedException { final ClassIndexImpl uq = ClassIndexManager.getDefault().createUsagesQuery(context.getRootURI(), true); if (uq == null) { diff --git a/java.source/src/org/netbeans/modules/java/source/indexing/JavaCustomIndexer.java b/java.source/src/org/netbeans/modules/java/source/indexing/JavaCustomIndexer.java --- a/java.source/src/org/netbeans/modules/java/source/indexing/JavaCustomIndexer.java +++ b/java.source/src/org/netbeans/modules/java/source/indexing/JavaCustomIndexer.java @@ -96,6 +96,7 @@ import org.netbeans.modules.parsing.impl.indexing.FileObjectIndexable; import org.netbeans.modules.parsing.impl.indexing.SPIAccessor; import org.netbeans.modules.parsing.impl.indexing.friendapi.IndexingController; +import org.netbeans.modules.parsing.lucene.support.IndexManager; import org.netbeans.modules.parsing.spi.indexing.Context; import org.netbeans.modules.parsing.spi.indexing.CustomIndexer; import org.netbeans.modules.parsing.spi.indexing.CustomIndexerFactory; @@ -149,7 +150,7 @@ splitSources(files,javaSources), context.getRootURI()); - ClassIndexManager.getDefault().prepareWriteLock(new ClassIndexManager.ExceptionAction() { + ClassIndexManager.getDefault().prepareWriteLock(new IndexManager.Action() { @Override public Void run() throws IOException, InterruptedException { try { @@ -315,7 +316,7 @@ JavaIndex.LOG.fine("Ignoring request with no root"); //NOI18N return; } - ClassIndexManager.getDefault().prepareWriteLock(new ClassIndexManager.ExceptionAction() { + ClassIndexManager.getDefault().prepareWriteLock(new IndexManager.Action() { public Void run() throws IOException, InterruptedException { try { final JavaParsingContext javaContext = new JavaParsingContext(context); @@ -755,9 +756,9 @@ @Override public boolean scanStarted(final Context context) { try { - boolean classIndexConsistent = ClassIndexManager.getDefault().prepareWriteLock(new ClassIndexManager.ExceptionAction() { + boolean classIndexConsistent = ClassIndexManager.getDefault().prepareWriteLock(new IndexManager.Action() { public Boolean run() throws IOException, InterruptedException { - return ClassIndexManager.getDefault().takeWriteLock(new ClassIndexManager.ExceptionAction() { + return IndexManager.writeAccess(new IndexManager.Action() { public Boolean run() throws IOException, InterruptedException { final ClassIndexImpl uq = ClassIndexManager.getDefault().createUsagesQuery(context.getRootURI(), true); if (uq == null) { @@ -839,7 +840,7 @@ final ClassIndexManager cim = ClassIndexManager.getDefault(); final JavaFileFilterListener ffl = JavaFileFilterListener.getDefault(); try { - cim.prepareWriteLock(new ClassIndexManager.ExceptionAction() { + cim.prepareWriteLock(new IndexManager.Action() { public Void run() throws IOException, InterruptedException { for (URL removedRoot : removedRoots) { cim.removeRoot(removedRoot); diff --git a/java.source/src/org/netbeans/modules/java/source/parsing/JavacParser.java b/java.source/src/org/netbeans/modules/java/source/parsing/JavacParser.java --- a/java.source/src/org/netbeans/modules/java/source/parsing/JavacParser.java +++ b/java.source/src/org/netbeans/modules/java/source/parsing/JavacParser.java @@ -93,7 +93,6 @@ import javax.tools.Diagnostic; import javax.tools.DiagnosticListener; import javax.tools.JavaCompiler; -import javax.tools.JavaFileManager; import javax.tools.JavaFileObject; import javax.tools.ToolProvider; import org.netbeans.api.annotations.common.NonNull; @@ -124,10 +123,9 @@ import org.netbeans.modules.java.source.indexing.APTUtils; import org.netbeans.modules.java.source.indexing.FQN2Files; import org.netbeans.modules.java.source.indexing.JavaCustomIndexer; -import org.netbeans.modules.java.source.indexing.JavaIndex; import org.netbeans.modules.java.source.tasklist.CompilerSettings; +import org.netbeans.modules.java.source.usages.ClassIndexImpl; import org.netbeans.modules.java.source.usages.ClasspathInfoAccessor; -import org.netbeans.modules.java.source.usages.Index; import org.netbeans.modules.java.source.usages.Pair; import org.netbeans.modules.parsing.api.Snapshot; import org.netbeans.modules.parsing.api.Source; @@ -140,7 +138,6 @@ import org.netbeans.modules.parsing.spi.SourceModificationEvent; import org.openide.cookies.EditorCookie; import org.openide.filesystems.FileObject; -import org.openide.filesystems.FileStateInvalidException; import org.openide.filesystems.FileUtil; import org.openide.loaders.DataObject; import org.openide.loaders.DataObjectNotFoundException; @@ -435,7 +432,7 @@ } } if (reachedPhase.compareTo(requiredPhase)>=0) { - Index.cancel.set(canceled); + ClassIndexImpl.cancel.set(canceled); result = new JavacParserResult(JavaSourceAccessor.getINSTANCE().createCompilationInfo(ciImpl)); } } @@ -470,7 +467,7 @@ public void resultFinished (boolean isCancelable) { if (isCancelable) { - Index.cancel.remove(); + ClassIndexImpl.cancel.remove(); } } diff --git a/java.source/src/org/netbeans/modules/java/source/usages/ClassIndexImpl.java b/java.source/src/org/netbeans/modules/java/source/usages/ClassIndexImpl.java --- a/java.source/src/org/netbeans/modules/java/source/usages/ClassIndexImpl.java +++ b/java.source/src/org/netbeans/modules/java/source/usages/ClassIndexImpl.java @@ -54,10 +54,12 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; import javax.lang.model.element.TypeElement; import org.apache.lucene.document.Document; import org.netbeans.api.java.source.ClassIndex; import org.netbeans.api.java.source.ElementHandle; +import org.netbeans.modules.parsing.lucene.support.Convertor; import org.openide.filesystems.FileObject; import org.openide.util.Utilities; @@ -67,11 +69,11 @@ */ public abstract class ClassIndexImpl { - public final List> listeners = Collections.synchronizedList(new ArrayList> ()); - - private State state = State.NEW; - - + public static enum State { + NEW, + INITIALIZED, + } + public static enum UsageType { SUPER_CLASS( 0 ), @@ -90,20 +92,18 @@ return this.offset; } } + + public static final ThreadLocal cancel = new ThreadLocal (); + public static ClassIndexFactory FACTORY; - public static enum State { - NEW, - INITIALIZED, - } + private State state = State.NEW; + private final List> listeners = Collections.synchronizedList(new ArrayList> ()); - - public static ClassIndexFactory FACTORY; + public abstract void search (final String binaryName, final Set usageType, final Convertor convertor, final Set result) throws IOException, InterruptedException; - public abstract void search (final String binaryName, final Set usageType, final ResultConvertor convertor, final Set result) throws IOException, InterruptedException; + public abstract void getDeclaredTypes (String name, ClassIndex.NameKind kind, final Convertor convertor, final Set result) throws IOException, InterruptedException; - public abstract void getDeclaredTypes (String name, ClassIndex.NameKind kind, final ResultConvertor convertor, final Set result) throws IOException, InterruptedException; - - public abstract void getDeclaredElements (String ident, ClassIndex.NameKind kind, ResultConvertor convertor, Map> result) throws IOException, InterruptedException; + public abstract void getDeclaredElements (String ident, ClassIndex.NameKind kind, Convertor convertor, Map> result) throws IOException, InterruptedException; public abstract void getPackageNames (String prefix, boolean directOnly, Set result) throws IOException, InterruptedException; @@ -183,10 +183,7 @@ } this.state=state; } - - public static final class IndexAlreadyClosedException extends IOException { - } - + public static interface Writer { void clear() throws IOException; void deleteEnclosedAndStore (final List, Object[]>> refs, final Set> topLevels) throws IOException; diff --git a/java.source/src/org/netbeans/modules/java/source/usages/ClassIndexManager.java b/java.source/src/org/netbeans/modules/java/source/usages/ClassIndexManager.java --- a/java.source/src/org/netbeans/modules/java/source/usages/ClassIndexManager.java +++ b/java.source/src/org/netbeans/modules/java/source/usages/ClassIndexManager.java @@ -52,11 +52,10 @@ import java.util.IdentityHashMap; import java.util.Map; import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.locks.ReentrantReadWriteLock; import org.netbeans.modules.java.source.classpath.AptCacheForSourceQuery; import org.netbeans.modules.java.source.indexing.JavaIndex; -import org.netbeans.modules.parsing.impl.Utilities; +import org.netbeans.modules.parsing.lucene.support.IndexManager; +import org.netbeans.modules.parsing.lucene.support.IndexManager.Action; import org.openide.util.Exceptions; /** @@ -70,7 +69,6 @@ private static ClassIndexManager instance; private final Map instances = new HashMap (); - private final ReentrantReadWriteLock lock; private final InternalLock internalLock; private final Map listeners = Collections.synchronizedMap(new IdentityHashMap()); private boolean invalid; @@ -79,7 +77,6 @@ private int depth = 0; private ClassIndexManager() { - this.lock = new ReentrantReadWriteLock (false); this.internalLock = new InternalLock(); } @@ -94,17 +91,18 @@ } @Deprecated - public T writeLock (final ExceptionAction r) throws IOException, InterruptedException { + public T writeLock (final Action r) throws IOException, InterruptedException { //Ugly, in scala much more cleaner. return prepareWriteLock( - new ExceptionAction() { - public T run() throws IOException, InterruptedException { - return takeWriteLock(r); - } - }); + new Action() { + @Override + public T run() throws IOException, InterruptedException { + return IndexManager.writeAccess(r); + } + }); } - public T prepareWriteLock(final ExceptionAction r) throws IOException, InterruptedException { + public T prepareWriteLock(final Action r) throws IOException, InterruptedException { synchronized (internalLock) { depth++; if (depth == 1) { @@ -143,61 +141,7 @@ } } } - - public T takeWriteLock(final ExceptionAction r) throws IOException, InterruptedException { - this.lock.writeLock().lock(); - try { - return Utilities.runPriorityIO(new Callable() { - @Override - public T call() throws Exception { - return r.run(); - } - }); - } catch (IOException ioe) { - //rethrow ioe - throw ioe; - } catch (InterruptedException ie) { - //rethrow ioe - throw ie; - } catch (RuntimeException re) { - //rethrow ioe - throw re; - } catch (Exception e) { - throw new IOException(e); - } finally { - this.lock.writeLock().unlock(); - } - } - - public T readLock (final ExceptionAction r) throws IOException, InterruptedException { - this.lock.readLock().lock(); - try { - return Utilities.runPriorityIO(new Callable() { - @Override - public T call() throws Exception { - return r.run(); - } - }); - } catch (IOException ioe) { - //rethrow ioe - throw ioe; - } catch (InterruptedException ie) { - //rethrow ioe - throw ie; - } catch (RuntimeException re) { - //rethrow ioe - throw re; - } catch (Exception e) { - throw new IOException(e); - } finally { - this.lock.readLock().unlock(); - } - } - - public boolean holdsWriteLock () { - return this.lock.isWriteLockedByCurrentThread(); - } - + public ClassIndexImpl getUsagesQuery (URL root) { synchronized (internalLock) { assert root != null; @@ -264,11 +208,7 @@ } } } - - public static interface ExceptionAction { - public T run () throws IOException, InterruptedException; - } - + private void fire (final Set roots, final byte op) { if (!this.listeners.isEmpty()) { ClassIndexManagerListener[] _listeners; diff --git a/java.source/src/org/netbeans/modules/java/source/usages/DocumentUtil.java b/java.source/src/org/netbeans/modules/java/source/usages/DocumentUtil.java --- a/java.source/src/org/netbeans/modules/java/source/usages/DocumentUtil.java +++ b/java.source/src/org/netbeans/modules/java/source/usages/DocumentUtil.java @@ -52,12 +52,14 @@ import javax.lang.model.element.ElementKind; import javax.lang.model.element.TypeElement; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.KeywordAnalyzer; +import org.apache.lucene.analysis.PerFieldAnalyzerWrapper; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.WhitespaceAnalyzer; import org.apache.lucene.analysis.WhitespaceTokenizer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldSelector; -import org.apache.lucene.document.FieldSelectorResult; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; @@ -66,10 +68,12 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.WildcardQuery; +import org.netbeans.api.java.source.ClassIndex; import org.netbeans.api.java.source.ElementHandle; import org.netbeans.modules.java.source.ElementHandleAccessor; import org.netbeans.modules.java.source.parsing.FileObjects; -import org.netbeans.modules.java.source.usages.ResultConvertor.Stop; +import org.netbeans.modules.parsing.lucene.support.Convertor; +import org.netbeans.modules.parsing.lucene.support.Queries; import org.openide.filesystems.FileObject; import org.openide.util.Exceptions; @@ -110,33 +114,41 @@ private DocumentUtil () { } + public static Analyzer createAnalyzer() { + final PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new KeywordAnalyzer()); + analyzer.addAnalyzer(DocumentUtil.FIELD_IDENTS, new WhitespaceAnalyzer()); + analyzer.addAnalyzer(DocumentUtil.FIELD_FEATURE_IDENTS, new WhitespaceAnalyzer()); + analyzer.addAnalyzer(DocumentUtil.FIELD_CASE_INSENSITIVE_FEATURE_IDENTS, new DocumentUtil.LCWhitespaceAnalyzer()); + return analyzer; + } + //Convertor factories - public static ResultConvertor fileObjectConvertor (final FileObject... roots) { + public static Convertor fileObjectConvertor (final FileObject... roots) { assert roots != null; return new FileObjectConvertor (roots); } - public static ResultConvertor> elementHandleConvertor () { + public static Convertor> elementHandleConvertor () { return new ElementHandleConvertor (); } - public static ResultConvertor binaryNameConvertor () { + public static Convertor binaryNameConvertor () { return new BinaryNameConvertor (); } - static ResultConvertor sourceNameConvertor () { + static Convertor sourceNameConvertor () { return new SourceNameConvertor(); } - static ResultConvertor,Object[]>,Document> documentConvertor() { + static Convertor,Object[]>,Document> documentConvertor() { return new DocumentConvertor(); } - static ResultConvertor,Query> queryClassWithEncConvertor() { + static Convertor,Query> queryClassWithEncConvertor() { return new QueryClassesWithEncConvertor(); } - static ResultConvertor,Query> queryClassConvertor() { + static Convertor,Query> queryClassConvertor() { return new QueryClassConvertor(); } @@ -344,37 +356,27 @@ static FieldSelector declaredTypesFieldSelector () { - return new FieldSelectorImpl(FIELD_PACKAGE_NAME,FIELD_BINARY_NAME); + return Queries.createFieldSelector(FIELD_PACKAGE_NAME,FIELD_BINARY_NAME); } static FieldSelector sourceNameFieldSelector () { - return new FieldSelectorImpl(FIELD_SOURCE); + return Queries.createFieldSelector(FIELD_SOURCE); + } + + static Queries.QueryKind translateQueryKind(final ClassIndex.NameKind kind) { + switch (kind) { + case SIMPLE_NAME: return Queries.QueryKind.EXACT; + case PREFIX: return Queries.QueryKind.PREFIX; + case CASE_INSENSITIVE_PREFIX: return Queries.QueryKind.CASE_INSENSITIVE_PREFIX; + case CAMEL_CASE: return Queries.QueryKind.CAMEL_CASE; + case CAMEL_CASE_INSENSITIVE: return Queries.QueryKind.CASE_INSENSITIVE_CAMEL_CASE; + case REGEXP: return Queries.QueryKind.REGEXP; + case CASE_INSENSITIVE_REGEXP: return Queries.QueryKind.CASE_INSENSITIVE_REGEXP; + default: throw new IllegalArgumentException(); + } } - // - private static class FieldSelectorImpl implements FieldSelector { - - private final Term[] terms; - - FieldSelectorImpl(String... fieldNames) { - terms = new Term[fieldNames.length]; - for (int i=0; i< fieldNames.length; i++) { - terms[i] = new Term (fieldNames[i],""); //NOI18N - } - } - - @Override - public FieldSelectorResult accept(String fieldName) { - for (Term t : terms) { - if (fieldName == t.field()) { - return FieldSelectorResult.LOAD; - } - } - return FieldSelectorResult.NO_LOAD; - } - } - - + // private static class LCWhitespaceTokenizer extends WhitespaceTokenizer { LCWhitespaceTokenizer (final Reader r) { super (r); @@ -395,7 +397,7 @@ // - private static class FileObjectConvertor implements ResultConvertor { + private static class FileObjectConvertor implements Convertor { private FileObject[] roots; @@ -470,7 +472,7 @@ } } - private static class ElementHandleConvertor implements ResultConvertor> { + private static class ElementHandleConvertor implements Convertor> { private final ElementKind[] kindHolder = new ElementKind[1]; @@ -485,7 +487,7 @@ } } - private static class BinaryNameConvertor implements ResultConvertor { + private static class BinaryNameConvertor implements Convertor { @Override public String convert (final Document doc) { @@ -493,7 +495,7 @@ } } - private static class SourceNameConvertor implements ResultConvertor { + private static class SourceNameConvertor implements Convertor { @Override public String convert(Document doc) { @@ -502,9 +504,9 @@ } } - private static class DocumentConvertor implements ResultConvertor,Object[]>,Document> { + private static class DocumentConvertor implements Convertor,Object[]>,Document> { @Override - public Document convert(Pair, Object[]> entry) throws Stop { + public Document convert(Pair, Object[]> entry) { final Pair pair = entry.first; final String cn = pair.first; final String srcName = pair.second; @@ -516,9 +518,9 @@ } } - private static class QueryClassesWithEncConvertor implements ResultConvertor,Query> { + private static class QueryClassesWithEncConvertor implements Convertor,Query> { @Override - public Query convert(Pair p) throws Stop { + public Query convert(Pair p) { return createClassWithEnclosedQuery(p); } @@ -549,9 +551,9 @@ } - private static class QueryClassConvertor implements ResultConvertor,Query> { + private static class QueryClassConvertor implements Convertor,Query> { @Override - public Query convert(Pair p) throws Stop { + public Query convert(Pair p) { return binaryNameSourceNamePairQuery(p); } diff --git a/java.source/src/org/netbeans/modules/java/source/usages/Index.java b/java.source/src/org/netbeans/modules/java/source/usages/Index.java deleted file mode 100644 --- a/java.source/src/org/netbeans/modules/java/source/usages/Index.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. - * - * Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved. - * - * Oracle and Java are registered trademarks of Oracle and/or its affiliates. - * Other names may be trademarks of their respective owners. - * - * The contents of this file are subject to the terms of either the GNU - * General Public License Version 2 only ("GPL") or the Common - * Development and Distribution License("CDDL") (collectively, the - * "License"). You may not use this file except in compliance with the - * License. You can obtain a copy of the License at - * http://www.netbeans.org/cddl-gplv2.html - * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the - * specific language governing permissions and limitations under the - * License. When distributing the software, include this License Header - * Notice in each file and include the License file at - * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this - * particular file as subject to the "Classpath" exception as provided - * by Oracle in the GPL Version 2 section of the License file that - * accompanied this code. If applicable, add the following below the - * License Header, with the fields enclosed by brackets [] replaced by - * your own identifying information: - * "Portions Copyrighted [year] [name of copyright owner]" - * - * Contributor(s): - * - * The Original Software is NetBeans. The Initial Developer of the Original - * Software is Sun Microsystems, Inc. Portions Copyright 1997-2007 Sun - * Microsystems, Inc. All Rights Reserved. - * - * If you wish your version of this file to be governed by only the CDDL - * or only the GPL Version 2, indicate your decision by adding - * "[Contributor] elects to include this software in this distribution - * under the [CDDL or GPL Version 2] license." If you do not indicate a - * single choice of license, a recipient has the option to distribute - * your version of this file under either the CDDL, the GPL Version 2 or - * to extend the choice of license to its licensees as provided above. - * However, if you add GPL Version 2 code and therefore, elected the GPL - * Version 2 license, then the option applies only if the new code is - * made subject to such option by the copyright holder. - */ -package org.netbeans.modules.java.source.usages; - -import java.io.IOException; -import java.util.Collection; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.FieldSelector; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Query; -import org.netbeans.api.annotations.common.NonNull; -import org.netbeans.api.annotations.common.NullAllowed; - -/** - * Index SPI. Represents an index for usages data - * @author Tomas Zezula - */ -public abstract class Index { - - public static final ThreadLocal cancel = new ThreadLocal () { - protected synchronized @Override AtomicBoolean initialValue() { - return new AtomicBoolean (); - } - }; - - public abstract boolean exists (); - public abstract boolean isValid (boolean tryOpen) throws IOException; - public abstract void query (Collection result, @NonNull ResultConvertor convertor, @NonNull FieldSelector selector, @NonNull Query... queries) throws IOException, InterruptedException; - public abstract void queryDocTerms(Map> result, @NonNull ResultConvertor convertor, @NonNull ResultConvertor termConvertor,@NonNull FieldSelector selector, @NonNull Query... queries) throws IOException, InterruptedException; - public abstract void queryTerms(@NonNull Collection result, @NullAllowed Term start, @NonNull ResultConvertor filter) throws IOException, InterruptedException; - public abstract void store (@NonNull Collection toAdd, @NonNull Collection toDelete, @NonNull ResultConvertor docConvertor, @NonNull ResultConvertor queryConvertor, boolean optimize) throws IOException; - public abstract void clear () throws IOException; - public abstract void close () throws IOException; - -} diff --git a/java.source/src/org/netbeans/modules/java/source/usages/IndexFactory.java b/java.source/src/org/netbeans/modules/java/source/usages/IndexFactory.java deleted file mode 100644 --- a/java.source/src/org/netbeans/modules/java/source/usages/IndexFactory.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. - * - * Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved. - * - * Oracle and Java are registered trademarks of Oracle and/or its affiliates. - * Other names may be trademarks of their respective owners. - * - * The contents of this file are subject to the terms of either the GNU - * General Public License Version 2 only ("GPL") or the Common - * Development and Distribution License("CDDL") (collectively, the - * "License"). You may not use this file except in compliance with the - * License. You can obtain a copy of the License at - * http://www.netbeans.org/cddl-gplv2.html - * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the - * specific language governing permissions and limitations under the - * License. When distributing the software, include this License Header - * Notice in each file and include the License file at - * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this - * particular file as subject to the "Classpath" exception as provided - * by Oracle in the GPL Version 2 section of the License file that - * accompanied this code. If applicable, add the following below the - * License Header, with the fields enclosed by brackets [] replaced by - * your own identifying information: - * "Portions Copyrighted [year] [name of copyright owner]" - * - * Contributor(s): - * - * Portions Copyrighted 2007 Sun Microsystems, Inc. - */ -package org.netbeans.modules.java.source.usages; - -import java.io.File; -import java.io.IOException; - -/** - * - * @author Tomas Zezula - */ -public interface IndexFactory { - - public Index create (File cacheRoot) throws IOException; -} diff --git a/java.source/src/org/netbeans/modules/java/source/usages/LuceneIndex.java b/java.source/src/org/netbeans/modules/java/source/usages/LuceneIndex.java deleted file mode 100644 --- a/java.source/src/org/netbeans/modules/java/source/usages/LuceneIndex.java +++ /dev/null @@ -1,909 +0,0 @@ -/* - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. - * - * Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved. - * - * Oracle and Java are registered trademarks of Oracle and/or its affiliates. - * Other names may be trademarks of their respective owners. - * - * The contents of this file are subject to the terms of either the GNU - * General Public License Version 2 only ("GPL") or the Common - * Development and Distribution License("CDDL") (collectively, the - * "License"). You may not use this file except in compliance with the - * License. You can obtain a copy of the License at - * http://www.netbeans.org/cddl-gplv2.html - * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the - * specific language governing permissions and limitations under the - * License. When distributing the software, include this License Header - * Notice in each file and include the License file at - * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this - * particular file as subject to the "Classpath" exception as provided - * by Oracle in the GPL Version 2 section of the License file that - * accompanied this code. If applicable, add the following below the - * License Header, with the fields enclosed by brackets [] replaced by - * your own identifying information: - * "Portions Copyrighted [year] [name of copyright owner]" - * - * Contributor(s): - * - * The Original Software is NetBeans. The Initial Developer of the Original - * Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun - * Microsystems, Inc. All Rights Reserved. - * - * If you wish your version of this file to be governed by only the CDDL - * or only the GPL Version 2, indicate your decision by adding - * "[Contributor] elects to include this software in this distribution - * under the [CDDL or GPL Version 2] license." If you do not indicate a - * single choice of license, a recipient has the option to distribute - * your version of this file under either the CDDL, the GPL Version 2 or - * to extend the choice of license to its licensees as provided above. - * However, if you add GPL Version 2 code and therefore, elected the GPL - * Version 2 license, then the option applies only if the new code is - * made subject to such option by the copyright holder. - */ - -package org.netbeans.modules.java.source.usages; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.lang.ref.SoftReference; -import java.lang.reflect.Field; -import java.net.MalformedURLException; -import java.net.URL; -import java.util.Arrays; -import java.util.BitSet; -import java.util.Collection; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.logging.Level; -import java.util.logging.Logger; -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.KeywordAnalyzer; -import org.apache.lucene.analysis.PerFieldAnalyzerWrapper; -import org.apache.lucene.analysis.WhitespaceAnalyzer; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.FieldSelector; -import org.apache.lucene.index.CorruptIndexException; -import org.apache.lucene.index.FilterIndexReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermEnum; -import org.apache.lucene.search.Collector; -import org.apache.lucene.search.DefaultSimilarity; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Searcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.FSDirectory; -import org.apache.lucene.store.RAMDirectory; -import org.netbeans.api.annotations.common.NonNull; -import org.netbeans.api.annotations.common.NullAllowed; -import org.netbeans.modules.java.source.usages.ResultConvertor.Stop; -import org.netbeans.modules.java.source.util.LMListener; -import org.netbeans.modules.parsing.impl.indexing.lucene.IndexCacheFactory; -import org.netbeans.modules.parsing.impl.indexing.lucene.util.Evictable; -import org.openide.util.Exceptions; -import org.openide.util.Parameters; -import org.openide.util.RequestProcessor; -import org.openide.util.Utilities; - -/** - * - * @author Tomas Zezula - */ -//@NotTreadSafe -class LuceneIndex extends Index { - - private static final String PROP_INDEX_POLICY = "java.index.useMemCache"; //NOI18N - private static final String PROP_CACHE_SIZE = "java.index.size"; //NOI18N - private static final boolean debugIndexMerging = Boolean.getBoolean("java.index.debugMerge"); // NOI18N - private static final CachePolicy DEFAULT_CACHE_POLICY = CachePolicy.DYNAMIC; - private static final float DEFAULT_CACHE_SIZE = 0.05f; - private static final CachePolicy cachePolicy = getCachePolicy(); - private static final String REFERENCES = "refs"; // NOI18N - private static final Logger LOGGER = Logger.getLogger(LuceneIndex.class.getName()); - private static final Analyzer analyzer; //Analyzer used to store documents - - static { - final PerFieldAnalyzerWrapper _analyzer = new PerFieldAnalyzerWrapper(new KeywordAnalyzer()); - _analyzer.addAnalyzer(DocumentUtil.FIELD_IDENTS, new WhitespaceAnalyzer()); - _analyzer.addAnalyzer(DocumentUtil.FIELD_FEATURE_IDENTS, new WhitespaceAnalyzer()); - _analyzer.addAnalyzer(DocumentUtil.FIELD_CASE_INSENSITIVE_FEATURE_IDENTS, new DocumentUtil.LCWhitespaceAnalyzer()); - analyzer = _analyzer; - } - - private final DirCache dirCache; - - static Index create (final File cacheRoot) throws IOException { - assert cacheRoot != null && cacheRoot.exists() && cacheRoot.canRead() && cacheRoot.canWrite(); - return new LuceneIndex (getReferencesCacheFolder(cacheRoot)); - } - - /** Creates a new instance of LuceneIndex */ - private LuceneIndex (final File refCacheRoot) throws IOException { - assert refCacheRoot != null; - this.dirCache = new DirCache(refCacheRoot,cachePolicy); - } - - @Override - public void query ( - final @NonNull Collection result, - final @NonNull ResultConvertor convertor, - final @NonNull FieldSelector selector, - final @NonNull Query... queries - ) throws IOException, InterruptedException { - Parameters.notNull("queries", queries); //NOI18N - Parameters.notNull("selector", selector); //NOI18N - Parameters.notNull("convertor", convertor); //NOI18N - Parameters.notNull("result", result); //NOI18N - - final IndexReader in = dirCache.getReader(); - if (in == null) { - LOGGER.fine(String.format("LuceneIndex[%s] is invalid!\n", this.toString())); - return; - } - final AtomicBoolean _cancel = cancel.get(); - assert _cancel != null; - final BitSet bs = new BitSet(in.maxDoc()); - final Collector c = QueryUtil.createBitSetCollector(bs); - final Searcher searcher = new IndexSearcher(in); - try { - for (Query q : queries) { - if (_cancel.get()) { - throw new InterruptedException (); - } - searcher.search(q, c); - } - } finally { - searcher.close(); - } - for (int docNum = bs.nextSetBit(0); docNum >= 0; docNum = bs.nextSetBit(docNum+1)) { - if (_cancel.get()) { - throw new InterruptedException (); - } - final Document doc = in.document(docNum, selector); - try { - final T value = convertor.convert(doc); - if (value != null) { - result.add (value); - } - } catch (ResultConvertor.Stop stop) { - //Stop not supported not needed - } - } - } - - @Override - public void queryTerms( - final @NonNull Collection result, - final @NullAllowed Term seekTo, - final @NonNull ResultConvertor filter) throws IOException, InterruptedException { - - final IndexReader in = dirCache.getReader(); - if (in == null) { - return; - } - final AtomicBoolean _cancel = cancel.get(); - assert _cancel != null; - - final TermEnum terms = seekTo == null ? in.terms () : in.terms (seekTo); - try { - do { - if (_cancel.get()) { - throw new InterruptedException (); - } - final Term currentTerm = terms.term(); - if (currentTerm != null) { - final T vote = filter.convert(currentTerm); - if (vote != null) { - result.add(vote); - } - } - } while (terms.next()); - } catch (ResultConvertor.Stop stop) { - //Stop iteration of TermEnum - } finally { - terms.close(); - } - } - - @Override - public void queryDocTerms( - final @NonNull Map> result, - final @NonNull ResultConvertor convertor, - final @NonNull ResultConvertor termConvertor, - final @NonNull FieldSelector selector, - final @NonNull Query... queries) throws IOException, InterruptedException { - Parameters.notNull("queries", queries); //NOI18N - Parameters.notNull("slector", selector); //NOI18N - Parameters.notNull("convertor", convertor); //NOI18N - Parameters.notNull("termConvertor", termConvertor); //NOI18N - Parameters.notNull("result", result); //NOI18N - final IndexReader in = dirCache.getReader(); - if (in == null) { - LOGGER.fine(String.format("LuceneIndex[%s] is invalid!\n", this.toString())); //NOI18N - return; - } - final AtomicBoolean _cancel = cancel.get(); - assert _cancel != null; - final BitSet bs = new BitSet(in.maxDoc()); - final Collector c = QueryUtil.createBitSetCollector(bs); - final Searcher searcher = new IndexSearcher(in); - final TermCollector termCollector = new TermCollector(); - try { - for (Query q : queries) { - if (_cancel.get()) { - throw new InterruptedException (); - } - if (q instanceof TermCollector.TermCollecting) { - ((TermCollector.TermCollecting)q).attach(termCollector); - } else { - throw new IllegalArgumentException ( - String.format("Query: %s does not implement TermCollecting", //NOI18N - q.getClass().getName())); - } - searcher.search(q, c); - } - } finally { - searcher.close(); - } - - for (int docNum = bs.nextSetBit(0); docNum >= 0; docNum = bs.nextSetBit(docNum+1)) { - if (_cancel.get()) { - throw new InterruptedException (); - } - final Document doc = in.document(docNum, selector); - try { - final T value = convertor.convert(doc); - if (value != null) { - final Set terms = termCollector.get(docNum); - result.put (value, convertTerms(termConvertor, terms)); - } - } catch (ResultConvertor.Stop stop) { - //Stop not supported not needed - } - } - } - - private static Set convertTerms(final ResultConvertor convertor, final Set terms) { - final Set result = new HashSet(terms.size()); - for (Term term : terms) { - try { - result.add(convertor.convert(term)); - } catch (Stop ex) { - //Not thrown, ignore - } - } - return result; - } - - @Override - public void store ( - final @NonNull Collection toAdd, - final @NonNull Collection toDelete, - final @NonNull ResultConvertor docConvertor, - final @NonNull ResultConvertor queryConvertor, - final boolean optimize) throws IOException{ - try { - ClassIndexManager.getDefault().takeWriteLock(new ClassIndexManager.ExceptionAction() { - @Override - public Void run() throws IOException, InterruptedException { - _store(toAdd, toDelete, docConvertor, queryConvertor, optimize); - return null; - } - }); - } catch (InterruptedException ie) { - throw new IOException("Interrupted"); //NOI18N - } - } - - private void _store ( - final @NonNull Collection data, - final @NonNull Collection toDelete, - final @NonNull ResultConvertor docConvertor, - final @NonNull ResultConvertor queryConvertor, - final boolean optimize) throws IOException { - assert ClassIndexManager.getDefault().holdsWriteLock(); - boolean create = !exists(); - final IndexWriter out = dirCache.getWriter(create); - try { - if (!create) { - for (S td : toDelete) { - try { - out.deleteDocuments(queryConvertor.convert(td)); - } catch (Stop ex) { - //Never thrown - Exceptions.printStackTrace(ex); - } - } - } - storeData(out, data, docConvertor, optimize); - } finally { - try { - out.close(); - } finally { - dirCache.refreshReader(); - } - } - } - - private void storeData ( - final IndexWriter out, - final @NonNull Collection data, - final @NonNull ResultConvertor convertor, - final boolean optimize) throws IOException { - if (debugIndexMerging) { - out.setInfoStream (System.err); - } - final LuceneIndexMBean indexSettings = LuceneIndexMBeanImpl.getDefault(); - if (indexSettings != null) { - out.setMergeFactor(indexSettings.getMergeFactor()); - out.setMaxMergeDocs(indexSettings.getMaxMergeDocs()); - out.setMaxBufferedDocs(indexSettings.getMaxBufferedDocs()); - } - final LMListener lmListener = new LMListener (); - Directory memDir = null; - IndexWriter activeOut = null; - if (lmListener.isLowMemory()) { - activeOut = out; - } - else { - memDir = new RAMDirectory (); - activeOut = new IndexWriter (memDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED); - } - for (Iterator it = data.iterator(); it.hasNext();) { - try { - T entry = it.next(); - it.remove(); - final Document doc = convertor.convert(entry); - activeOut.addDocument(doc); - if (memDir != null && lmListener.isLowMemory()) { - activeOut.close(); - out.addIndexesNoOptimize(new Directory[] {memDir}); - memDir = new RAMDirectory (); - activeOut = new IndexWriter (memDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED); - } - } catch (Stop ex) { - //Never thrown but log - Exceptions.printStackTrace(ex); - } - } - if (memDir != null) { - activeOut.close(); - out.addIndexesNoOptimize(new Directory[] {memDir}); - activeOut = null; - memDir = null; - } - if (optimize) { - out.optimize(false); - } - } - - @Override - public boolean isValid (boolean force) throws IOException { - return dirCache.isValid(force); - } - - @Override - public void clear () throws IOException { - try { - ClassIndexManager.getDefault().takeWriteLock(new ClassIndexManager.ExceptionAction() { - @Override - public Void run() throws IOException, InterruptedException { - dirCache.clear(); - return null; - } - }); - } catch (InterruptedException ex) { - throw new IOException(ex); - } - } - - @Override - public boolean exists () { - return this.dirCache.exists(); - } - - @Override - public void close () throws IOException { - if (LOGGER.isLoggable(Level.FINEST)) { - LOGGER.log(Level.FINEST, "Closing index: {0} {1}", //NOI18N - new Object[]{ - this.dirCache.toString(), - Thread.currentThread().getStackTrace()}); - } - dirCache.close(true); - } - - - @Override - public String toString () { - return getClass().getSimpleName()+"["+this.dirCache.toString()+"]"; //NOI18N - } - - private static File getReferencesCacheFolder (final File cacheRoot) throws IOException { - File refRoot = new File (cacheRoot,REFERENCES); - if (!refRoot.exists()) { - refRoot.mkdir(); - } - return refRoot; - } - - private static CachePolicy getCachePolicy() { - final String value = System.getProperty(PROP_INDEX_POLICY); //NOI18N - if (Boolean.TRUE.toString().equals(value) || - CachePolicy.ALL.getSystemName().equals(value)) { - return CachePolicy.ALL; - } - if (Boolean.FALSE.toString().equals(value) || - CachePolicy.NONE.getSystemName().equals(value)) { - return CachePolicy.NONE; - } - if (CachePolicy.DYNAMIC.getSystemName().equals(value)) { - return CachePolicy.DYNAMIC; - } - return DEFAULT_CACHE_POLICY; - } - - - // - private static class NoNormsReader extends FilterIndexReader { - - //@GuardedBy (this) - private byte[] norms; - - public NoNormsReader (final IndexReader reader) { - super (reader); - } - - @Override - public byte[] norms(String field) throws IOException { - byte[] _norms = fakeNorms (); - return _norms; - } - - @Override - public void norms(String field, byte[] norm, int offset) throws IOException { - byte[] _norms = fakeNorms (); - System.arraycopy(_norms, 0, norm, offset, _norms.length); - } - - @Override - public boolean hasNorms(String field) throws IOException { - return false; - } - - @Override - protected void doSetNorm(int doc, String field, byte norm) throws CorruptIndexException, IOException { - //Ignore - } - - @Override - protected void doClose() throws IOException { - synchronized (this) { - this.norms = null; - } - super.doClose(); - } - - @Override - public IndexReader reopen() throws IOException { - final IndexReader newIn = in.reopen(); - if (newIn == in) { - return this; - } - return new NoNormsReader(newIn); - } - - /** - * Expert: Fakes norms, norms are not needed for Netbeans index. - */ - private synchronized byte[] fakeNorms() { - if (this.norms == null) { - this.norms = new byte[maxDoc()]; - Arrays.fill(this.norms, DefaultSimilarity.encodeNorm(1.0f)); - } - return this.norms; - } - } - - private enum CachePolicy { - - NONE("none", false), //NOI18N - DYNAMIC("dynamic", true), //NOI18N - ALL("all", true); //NOI18N - - private final String sysName; - private final boolean hasMemCache; - - CachePolicy(final String sysName, final boolean hasMemCache) { - assert sysName != null; - this.sysName = sysName; - this.hasMemCache = hasMemCache; - } - - String getSystemName() { - return sysName; - } - - boolean hasMemCache() { - return hasMemCache; - } - } - - private static final class DirCache implements Evictable { - - private static final String CACHE_LOCK_PREFIX = "nb-lock"; //NOI18N - private static final RequestProcessor RP = new RequestProcessor(LuceneIndex.class.getName(), 1); - private static final long maxCacheSize = getCacheSize(); - private static volatile long currentCacheSize; - - private final File folder; - private final CachePolicy cachePolicy; - private FSDirectory fsDir; - private RAMDirectory memDir; - private CleanReference ref; - private IndexReader reader; - private volatile boolean closed; - private volatile Boolean validCache; - - private DirCache(final @NonNull File folder, final @NonNull CachePolicy cachePolicy) throws IOException { - assert folder != null; - assert cachePolicy != null; - this.folder = folder; - this.fsDir = createFSDirectory(folder); - this.cachePolicy = cachePolicy; - } - - synchronized void clear() throws IOException { - checkPreconditions(); - close (false); - try { - final String[] content = fsDir.listAll(); - boolean dirty = false; - if (content != null) { - for (String file : content) { - try { - fsDir.deleteFile(file); - } catch (IOException e) { - //Some temporary files - if (fsDir.fileExists(file)) { - dirty = true; - } - } - } - } - if (dirty) { - //Try to delete dirty files and log what's wrong - final File cacheDir = fsDir.getFile(); - final File[] children = cacheDir.listFiles(); - if (children != null) { - for (final File child : children) { - if (!child.delete()) { - final Class c = fsDir.getClass(); - int refCount = -1; - try { - final Field field = c.getDeclaredField("refCount"); //NOI18N - field.setAccessible(true); - refCount = field.getInt(fsDir); - } catch (NoSuchFieldException e) {/*Not important*/} - catch (IllegalAccessException e) {/*Not important*/} - final Map sts = Thread.getAllStackTraces(); - throw new IOException("Cannot delete: " + child.getAbsolutePath() + "(" + //NOI18N - child.exists() +","+ //NOI18N - child.canRead() +","+ //NOI18N - child.canWrite() +","+ //NOI18N - cacheDir.canRead() +","+ //NOI18N - cacheDir.canWrite() +","+ //NOI18N - refCount +","+ //NOI18N - sts +")"); //NOI18N - } - } - } - } - } finally { - //Need to recreate directory, see issue: #148374 - this.close(true); - this.fsDir = createFSDirectory(this.folder); - closed = false; - } - } - - synchronized void close (final boolean closeFSDir) throws IOException { - try { - try { - if (this.reader != null) { - this.reader.close(); - this.reader = null; - } - } finally { - if (memDir != null) { - assert cachePolicy.hasMemCache(); - if (this.ref != null) { - this.ref.clear(); - } - final Directory tmpDir = this.memDir; - memDir = null; - tmpDir.close(); - } - } - } finally { - if (closeFSDir) { - this.closed = true; - this.fsDir.close(); - } - } - } - - boolean exists() { - try { - return IndexReader.indexExists(this.fsDir); - } catch (IOException e) { - return false; - } catch (RuntimeException e) { - LOGGER.log(Level.INFO, "Broken index: " + folder.getAbsolutePath(), e); - return false; - } - } - - boolean isValid(boolean force) throws IOException { - checkPreconditions(); - Boolean valid = validCache; - if (force || valid == null) { - final Collection locks = getOrphanLock(); - boolean res = false; - if (!locks.isEmpty()) { - LOGGER.log(Level.WARNING, "Broken (locked) index folder: {0}", folder.getAbsolutePath()); //NOI18N - for (String lockName : locks) { - fsDir.deleteFile(lockName); - } - if (force) { - clear(); - } - } else { - res = exists(); - if (res && force) { - try { - getReader(); - } catch (java.io.IOException e) { - res = false; - clear(); - } catch (RuntimeException e) { - res = false; - clear(); - } - } - } - valid = res; - validCache = valid; - } - return valid; - } - - IndexWriter getWriter (final boolean create) throws IOException { - checkPreconditions(); - hit(); - //Issue #149757 - logging - try { - return new IndexWriter (this.fsDir, analyzer, create, IndexWriter.MaxFieldLength.LIMITED); - } catch (IOException ioe) { - throw annotateException (ioe); - } - } - - synchronized IndexReader getReader () throws IOException { - checkPreconditions(); - hit(); - if (this.reader == null) { - if (validCache == Boolean.FALSE) { - return null; - } - //Issue #149757 - logging - try { - Directory source; - if (cachePolicy.hasMemCache()) { - memDir = new RAMDirectory(fsDir); - if (cachePolicy == CachePolicy.DYNAMIC) { - ref = new CleanReference (new RAMDirectory[] {this.memDir}); - } - source = memDir; - } else { - source = fsDir; - } - assert source != null; - this.reader = new NoNormsReader(IndexReader.open(source,true)); - } catch (final FileNotFoundException fnf) { - //pass - returns null - } catch (IOException ioe) { - throw annotateException (ioe); - } - } - return this.reader; - } - - - synchronized void refreshReader() throws IOException { - try { - if (cachePolicy.hasMemCache()) { - close(false); - } else { - if (reader != null) { - final IndexReader newReader = reader.reopen(); - if (newReader != reader) { - reader.close(); - reader = newReader; - } - } - } - } finally { - validCache = true; - } - } - - @Override - public String toString() { - return this.folder.getAbsolutePath(); - } - - @Override - public void evicted() { - //When running from memory cache no need to close the reader, it does not own file handler. - if (!cachePolicy.hasMemCache()) { - //Threading: The called may own the CIM.readAccess, perform by dedicated worker to prevent deadlock - RP.post(new Runnable() { - @Override - public void run () { - try { - ClassIndexManager.getDefault().takeWriteLock(new ClassIndexManager.ExceptionAction() { - @Override - public Void run() throws IOException, InterruptedException { - close(false); - LOGGER.log(Level.FINE, "Evicted index: {0}", folder.getAbsolutePath()); //NOI18N - return null; - } - }); - } catch (IOException ex) { - Exceptions.printStackTrace(ex); - } catch (InterruptedException ie) { - Exceptions.printStackTrace(ie); - } - } - }); - } else if ((ref != null && currentCacheSize > maxCacheSize)) { - ref.clearHRef(); - } - } - - private synchronized void hit() { - if (!cachePolicy.hasMemCache()) { - try { - final URL url = folder.toURI().toURL(); - IndexCacheFactory.getDefault().getCache().put(url, this); - } catch (MalformedURLException e) { - Exceptions.printStackTrace(e); - } - } else if (ref != null) { - ref.get(); - } - } - - private Collection getOrphanLock () { - final List locks = new LinkedList(); - final String[] content = folder.list(); - if (content != null) { - for (String name : content) { - if (name.startsWith(CACHE_LOCK_PREFIX)) { - locks.add(name); - } - } - } - return locks; - } - - private void checkPreconditions () throws ClassIndexImpl.IndexAlreadyClosedException{ - if (closed) { - throw new ClassIndexImpl.IndexAlreadyClosedException(); - } - } - - private IOException annotateException (final IOException ioe) { - String message; - File[] children = folder.listFiles(); - if (children == null) { - message = "Non existing index folder"; - } - else { - StringBuilder b = new StringBuilder(); - for (File c : children) { - b.append(c.getName()).append(" f: ").append(c.isFile()). - append(" r: ").append(c.canRead()). - append(" w: ").append(c.canWrite()).append("\n"); //NOI18N - } - message = b.toString(); - } - return Exceptions.attachMessage(ioe, message); - } - - private static FSDirectory createFSDirectory (final File indexFolder) throws IOException { - assert indexFolder != null; - FSDirectory directory = FSDirectory.open(indexFolder); - directory.getLockFactory().setLockPrefix(CACHE_LOCK_PREFIX); - return directory; - } - - private static long getCacheSize() { - float per = -1.0f; - final String propVal = System.getProperty(PROP_CACHE_SIZE); - if (propVal != null) { - try { - per = Float.parseFloat(propVal); - } catch (NumberFormatException nfe) { - //Handled below - } - } - if (per<0) { - per = DEFAULT_CACHE_SIZE; - } - return (long) (per * Runtime.getRuntime().maxMemory()); - } - - private final class CleanReference extends SoftReference implements Runnable { - - @SuppressWarnings("VolatileArrayField") - private volatile Directory[] hardRef; //clearHRef may be called by more concurrently (read lock). - private final AtomicLong size = new AtomicLong(); //clearHRef may be called by more concurrently (read lock). - - private CleanReference(final RAMDirectory[] dir) { - super (dir, Utilities.activeReferenceQueue()); - boolean doHardRef = currentCacheSize < maxCacheSize; - if (doHardRef) { - this.hardRef = dir; - long _size = dir[0].sizeInBytes(); - size.set(_size); - currentCacheSize+=_size; - } - LOGGER.log(Level.FINEST, "Caching index: {0} cache policy: {1}", //NOI18N - new Object[]{ - folder.getAbsolutePath(), - cachePolicy.getSystemName() - }); - } - - @Override - public void run() { - try { - LOGGER.log(Level.FINEST, "Dropping cache index: {0} cache policy: {1}", //NOI18N - new Object[] { - folder.getAbsolutePath(), - cachePolicy.getSystemName() - }); - close(false); - } catch (IOException ex) { - Exceptions.printStackTrace(ex); - } - } - - @Override - public void clear() { - clearHRef(); - super.clear(); - } - - void clearHRef() { - this.hardRef = null; - long mySize = size.getAndSet(0); - currentCacheSize-=mySize; - } - } - } - // - -} diff --git a/java.source/src/org/netbeans/modules/java/source/usages/LuceneIndexFactory.java b/java.source/src/org/netbeans/modules/java/source/usages/LuceneIndexFactory.java deleted file mode 100644 --- a/java.source/src/org/netbeans/modules/java/source/usages/LuceneIndexFactory.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. - * - * Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved. - * - * Oracle and Java are registered trademarks of Oracle and/or its affiliates. - * Other names may be trademarks of their respective owners. - * - * The contents of this file are subject to the terms of either the GNU - * General Public License Version 2 only ("GPL") or the Common - * Development and Distribution License("CDDL") (collectively, the - * "License"). You may not use this file except in compliance with the - * License. You can obtain a copy of the License at - * http://www.netbeans.org/cddl-gplv2.html - * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the - * specific language governing permissions and limitations under the - * License. When distributing the software, include this License Header - * Notice in each file and include the License file at - * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this - * particular file as subject to the "Classpath" exception as provided - * by Oracle in the GPL Version 2 section of the License file that - * accompanied this code. If applicable, add the following below the - * License Header, with the fields enclosed by brackets [] replaced by - * your own identifying information: - * "Portions Copyrighted [year] [name of copyright owner]" - * - * Contributor(s): - * - * Portions Copyrighted 2007 Sun Microsystems, Inc. - */ -package org.netbeans.modules.java.source.usages; - -import java.io.File; -import java.io.IOException; - -/** - * - * @author Tomas Zezula - * Stateless => Single instance - */ -public class LuceneIndexFactory implements IndexFactory { - - //Guarded by class loader. - private static final LuceneIndexFactory instance = new LuceneIndexFactory(); - - private LuceneIndexFactory() { - } - - public Index create(File cacheRoot) throws IOException { - return LuceneIndex.create(cacheRoot); - } - - public static LuceneIndexFactory getInstance () { - return instance; - } - -} diff --git a/java.source/src/org/netbeans/modules/java/source/usages/LuceneIndexMBean.java b/java.source/src/org/netbeans/modules/java/source/usages/LuceneIndexMBean.java deleted file mode 100644 --- a/java.source/src/org/netbeans/modules/java/source/usages/LuceneIndexMBean.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. - * - * Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved. - * - * Oracle and Java are registered trademarks of Oracle and/or its affiliates. - * Other names may be trademarks of their respective owners. - * - * The contents of this file are subject to the terms of either the GNU - * General Public License Version 2 only ("GPL") or the Common - * Development and Distribution License("CDDL") (collectively, the - * "License"). You may not use this file except in compliance with the - * License. You can obtain a copy of the License at - * http://www.netbeans.org/cddl-gplv2.html - * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the - * specific language governing permissions and limitations under the - * License. When distributing the software, include this License Header - * Notice in each file and include the License file at - * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this - * particular file as subject to the "Classpath" exception as provided - * by Oracle in the GPL Version 2 section of the License file that - * accompanied this code. If applicable, add the following below the - * License Header, with the fields enclosed by brackets [] replaced by - * your own identifying information: - * "Portions Copyrighted [year] [name of copyright owner]" - * - * Contributor(s): - * - * The Original Software is NetBeans. The Initial Developer of the Original - * Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun - * Microsystems, Inc. All Rights Reserved. - * - * If you wish your version of this file to be governed by only the CDDL - * or only the GPL Version 2, indicate your decision by adding - * "[Contributor] elects to include this software in this distribution - * under the [CDDL or GPL Version 2] license." If you do not indicate a - * single choice of license, a recipient has the option to distribute - * your version of this file under either the CDDL, the GPL Version 2 or - * to extend the choice of license to its licensees as provided above. - * However, if you add GPL Version 2 code and therefore, elected the GPL - * Version 2 license, then the option applies only if the new code is - * made subject to such option by the copyright holder. - */ - -package org.netbeans.modules.java.source.usages; - -/** - * - * @author Tomas Zezula - */ -public interface LuceneIndexMBean { - - public static final String OBJECT_NAME = "org.netbeans.modules.java.source:type=LuceneIndex"; //NOI18N - - public int getMergeFactor (); - - public void setMergeFactor (int mf); - - public int getMaxMergeDocs (); - - public void setMaxMergeDocs (int nd); - - public int getMaxBufferedDocs (); - - public void setMaxBufferedDocs (int nd); -} diff --git a/java.source/src/org/netbeans/modules/java/source/usages/LuceneIndexMBeanImpl.java b/java.source/src/org/netbeans/modules/java/source/usages/LuceneIndexMBeanImpl.java deleted file mode 100644 --- a/java.source/src/org/netbeans/modules/java/source/usages/LuceneIndexMBeanImpl.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. - * - * Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved. - * - * Oracle and Java are registered trademarks of Oracle and/or its affiliates. - * Other names may be trademarks of their respective owners. - * - * The contents of this file are subject to the terms of either the GNU - * General Public License Version 2 only ("GPL") or the Common - * Development and Distribution License("CDDL") (collectively, the - * "License"). You may not use this file except in compliance with the - * License. You can obtain a copy of the License at - * http://www.netbeans.org/cddl-gplv2.html - * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the - * specific language governing permissions and limitations under the - * License. When distributing the software, include this License Header - * Notice in each file and include the License file at - * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this - * particular file as subject to the "Classpath" exception as provided - * by Oracle in the GPL Version 2 section of the License file that - * accompanied this code. If applicable, add the following below the - * License Header, with the fields enclosed by brackets [] replaced by - * your own identifying information: - * "Portions Copyrighted [year] [name of copyright owner]" - * - * Contributor(s): - * - * The Original Software is NetBeans. The Initial Developer of the Original - * Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun - * Microsystems, Inc. All Rights Reserved. - * - * If you wish your version of this file to be governed by only the CDDL - * or only the GPL Version 2, indicate your decision by adding - * "[Contributor] elects to include this software in this distribution - * under the [CDDL or GPL Version 2] license." If you do not indicate a - * single choice of license, a recipient has the option to distribute - * your version of this file under either the CDDL, the GPL Version 2 or - * to extend the choice of license to its licensees as provided above. - * However, if you add GPL Version 2 code and therefore, elected the GPL - * Version 2 license, then the option applies only if the new code is - * made subject to such option by the copyright holder. - */ - -package org.netbeans.modules.java.source.usages; -import javax.management.NotCompliantMBeanException; -import javax.management.StandardMBean; -import org.apache.lucene.index.IndexWriter; - -import java.util.logging.Logger; -import java.util.logging.Level; - -/** - * - * @author Tomas Zezula - */ -public class LuceneIndexMBeanImpl extends StandardMBean implements LuceneIndexMBean { - - private static LuceneIndexMBeanImpl instance; - - private int mergeFactor = IndexWriter.DEFAULT_MERGE_FACTOR; - private int maxMergeDocs = IndexWriter.DEFAULT_MAX_MERGE_DOCS; - private int maxBufferedDocs = IndexWriter.DEFAULT_MAX_BUFFERED_DOCS; - - private LuceneIndexMBeanImpl () throws NotCompliantMBeanException { - super (LuceneIndexMBean.class); - } - - public int getMergeFactor () { - return this.mergeFactor; - } - - public void setMergeFactor (int mf) { - this.mergeFactor = mf; - } - - public int getMaxMergeDocs () { - return this.maxMergeDocs; - } - - public void setMaxMergeDocs (int nd) { - this.maxMergeDocs = nd; - } - - public int getMaxBufferedDocs () { - return this.maxBufferedDocs; - } - - public void setMaxBufferedDocs (int nd) { - this.maxBufferedDocs = nd; - } - - private static Logger log = Logger.getLogger(LuceneIndexMBeanImpl.class.getName()); - public static synchronized LuceneIndexMBeanImpl getDefault () { - if (instance == null) { - try { - instance = new LuceneIndexMBeanImpl (); - } catch (NotCompliantMBeanException e) { - if (log.isLoggable(Level.SEVERE)) - log.log(Level.SEVERE, e.getMessage(), e); - } - } - return instance; - } - -} diff --git a/java.source/src/org/netbeans/modules/java/source/usages/PersistentClassIndex.java b/java.source/src/org/netbeans/modules/java/source/usages/PersistentClassIndex.java --- a/java.source/src/org/netbeans/modules/java/source/usages/PersistentClassIndex.java +++ b/java.source/src/org/netbeans/modules/java/source/usages/PersistentClassIndex.java @@ -70,7 +70,11 @@ import org.netbeans.api.java.source.JavaSource; import org.netbeans.api.java.source.JavaSource.Phase; import org.netbeans.modules.java.source.JavaSourceAccessor; -import org.netbeans.modules.java.source.usages.ResultConvertor.Stop; +import org.netbeans.modules.parsing.lucene.support.Convertor; +import org.netbeans.modules.parsing.lucene.support.Index; +import org.netbeans.modules.parsing.lucene.support.IndexManager; +import org.netbeans.modules.parsing.lucene.support.Queries; +import org.netbeans.modules.parsing.lucene.support.StoppableConvertor; import org.openide.filesystems.FileObject; import org.openide.filesystems.URLMapper; import org.openide.util.Exceptions; @@ -89,16 +93,15 @@ //@GuardedBy("this") private Set rootPkgCache; private static final Logger LOGGER = Logger.getLogger(PersistentClassIndex.class.getName()); - private static IndexFactory indexFactory = LuceneIndexFactory.getInstance(); + private static final String REFERENCES = "refs"; // NOI18N /** Creates a new instance of ClassesAndMembersUQ */ private PersistentClassIndex(final URL root, final File cacheRoot, final boolean source) throws IOException, IllegalArgumentException { assert root != null; this.root = root; - assert indexFactory != null; this.cacheRoot = cacheRoot; - this.index = indexFactory.create(cacheRoot); + this.index = IndexManager.createIndex(getReferencesCacheFolder(cacheRoot), DocumentUtil.createAnalyzer()); this.isSource = source; } @@ -120,7 +123,7 @@ @Override public boolean isEmpty () { try { - return ClassIndexManager.getDefault().readLock(new ClassIndexManager.ExceptionAction() { + return IndexManager.readAccess(new IndexManager.Action() { @Override public Boolean run() throws IOException, InterruptedException { return !PersistentClassIndex.this.index.exists(); @@ -161,7 +164,7 @@ public String getSourceName (final String binaryName) throws IOException, InterruptedException { final Query q = DocumentUtil.binaryNameQuery(binaryName); Set names = new HashSet(); - index.query(names, DocumentUtil.sourceNameConvertor(), DocumentUtil.sourceNameFieldSelector(), q); + index.query(names, DocumentUtil.sourceNameConvertor(), DocumentUtil.sourceNameFieldSelector(), cancel.get(), q); return names.isEmpty() ? null : names.iterator().next(); } @@ -175,14 +178,14 @@ // Implementation of UsagesQueryImpl --------------------------------------- @Override - public void search (final String binaryName, final Set usageType, final ResultConvertor convertor, final Set result) throws InterruptedException, IOException { + public void search (final String binaryName, final Set usageType, final Convertor convertor, final Set result) throws InterruptedException, IOException { updateDirty(); if (BinaryAnalyser.OBJECT.equals(binaryName)) { this.getDeclaredTypes("", ClassIndex.NameKind.PREFIX, convertor, result); return; } - ClassIndexManager.getDefault().readLock(new ClassIndexManager.ExceptionAction () { + IndexManager.readAccess(new IndexManager.Action () { @Override public Void run () throws IOException, InterruptedException { usages(binaryName, usageType, convertor, result); @@ -193,42 +196,45 @@ @Override - public void getDeclaredTypes (final String simpleName, final ClassIndex.NameKind kind, final ResultConvertor convertor, final Set result) throws InterruptedException, IOException { + public void getDeclaredTypes (final String simpleName, final ClassIndex.NameKind kind, final Convertor convertor, final Set result) throws InterruptedException, IOException { updateDirty(); - ClassIndexManager.getDefault().readLock(new ClassIndexManager.ExceptionAction () { + IndexManager.readAccess(new IndexManager.Action () { @Override public Void run () throws IOException, InterruptedException { - final Query query = QueryUtil.createQuery( - Pair.of(DocumentUtil.FIELD_SIMPLE_NAME,DocumentUtil.FIELD_CASE_INSENSITIVE_NAME), + final Query query = Queries.createQuery( + DocumentUtil.FIELD_SIMPLE_NAME, + DocumentUtil.FIELD_CASE_INSENSITIVE_NAME, simpleName, - kind); - index.query(result, convertor, DocumentUtil.declaredTypesFieldSelector(), query); + DocumentUtil.translateQueryKind(kind)); + index.query(result, convertor, DocumentUtil.declaredTypesFieldSelector(), cancel.get(), query); return null; } }); } @Override - public void getDeclaredElements (final String ident, final ClassIndex.NameKind kind, final ResultConvertor convertor, final Map> result) throws InterruptedException, IOException { + public void getDeclaredElements (final String ident, final ClassIndex.NameKind kind, final Convertor convertor, final Map> result) throws InterruptedException, IOException { updateDirty(); - ClassIndexManager.getDefault().readLock(new ClassIndexManager.ExceptionAction() { + IndexManager.readAccess(new IndexManager.Action() { @Override public Void run () throws IOException, InterruptedException { - final Query[] queries = new Query[] {QueryUtil.createTermCollectingQuery( - Pair.of(DocumentUtil.FIELD_FEATURE_IDENTS,DocumentUtil.FIELD_CASE_INSENSITIVE_FEATURE_IDENTS), + final Query query = Queries.createTermCollectingQuery( + DocumentUtil.FIELD_FEATURE_IDENTS, + DocumentUtil.FIELD_CASE_INSENSITIVE_FEATURE_IDENTS, ident, - kind)}; + DocumentUtil.translateQueryKind(kind)); index.queryDocTerms( result, convertor, - new ResultConvertor(){ + new Convertor(){ @Override - public String convert(Term p) throws Stop { + public String convert(Term p) { return p.text(); } }, DocumentUtil.declaredTypesFieldSelector(), - queries); + cancel.get(), + query); return null; } }); @@ -237,7 +243,7 @@ @Override public void getPackageNames (final String prefix, final boolean directOnly, final Set result) throws InterruptedException, IOException { - ClassIndexManager.getDefault().readLock(new ClassIndexManager.ExceptionAction() { + IndexManager.readAccess(new IndexManager.Action() { @Override public Void run () throws IOException, InterruptedException { final boolean cacheOp = directOnly && prefix.length() == 0; @@ -255,8 +261,8 @@ } else { collectInto = result; } - final Pair,Term> filter = QueryUtil.createPackageFilter(prefix,directOnly); - index.queryTerms(collectInto, filter.second, filter.first); + final Pair,Term> filter = QueryUtil.createPackageFilter(prefix,directOnly); + index.queryTerms(collectInto, filter.second, filter.first, cancel.get()); if (cacheOp) { synchronized (PersistentClassIndex.this) { if (rootPkgCache == null) { @@ -278,12 +284,7 @@ public @Override String toString () { return "PersistentClassIndex["+this.root.toExternalForm()+"]"; // NOI18N } - - //Unit test methods - public static void setIndexFactory (final IndexFactory factory) { - indexFactory = (factory == null ? LuceneIndexFactory.getInstance() : factory); - } - + //Protected methods -------------------------------------------------------- @Override protected final void close () throws IOException { @@ -293,6 +294,14 @@ // Private methods --------------------------------------------------------- + private static File getReferencesCacheFolder (final File cacheRoot) throws IOException { + File refRoot = new File (cacheRoot,REFERENCES); + if (!refRoot.exists()) { + refRoot.mkdir(); + } + return refRoot; + } + private void updateDirty () { final URL url = this.dirty; if (url != null) { @@ -308,8 +317,8 @@ @Override public void run (final CompilationController controller) { try { - ClassIndexManager.getDefault().takeWriteLock( - new ClassIndexManager.ExceptionAction() { + IndexManager.writeAccess( + new IndexManager.Action() { @Override public Void run () throws IOException { if (controller.toPhase(Phase.RESOLVED).compareTo(Phase.RESOLVED)<0) { @@ -333,7 +342,7 @@ return null; } }); - } catch (IndexAlreadyClosedException e) { + } catch (Index.IndexClosedException e) { //A try to store to closed index, safe to ignore. //Data will be scanned when project is reopened. LOGGER.info("Ignoring store into closed index"); @@ -357,9 +366,9 @@ } } - private void usages (final String binaryName, final Set usageType, ResultConvertor convertor, Set result) throws InterruptedException, IOException { + private void usages (final String binaryName, final Set usageType, Convertor convertor, Set result) throws InterruptedException, IOException { final Query usagesQuery = QueryUtil.createUsagesQuery(binaryName, usageType, Occur.SHOULD); - this.index.query(result, convertor, DocumentUtil.declaredTypesFieldSelector(), usagesQuery); + this.index.query(result, convertor, DocumentUtil.declaredTypesFieldSelector(), cancel.get(), usagesQuery); } private synchronized void resetPkgCache() { diff --git a/java.source/src/org/netbeans/modules/java/source/usages/QueryUtil.java b/java.source/src/org/netbeans/modules/java/source/usages/QueryUtil.java --- a/java.source/src/org/netbeans/modules/java/source/usages/QueryUtil.java +++ b/java.source/src/org/netbeans/modules/java/source/usages/QueryUtil.java @@ -42,35 +42,17 @@ package org.netbeans.modules.java.source.usages; -import java.io.IOException; -import java.util.BitSet; import java.util.EnumSet; import java.util.Set; -import java.util.regex.Pattern; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermDocs; -import org.apache.lucene.index.TermEnum; -import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.Collector; -import org.apache.lucene.search.DocIdSet; -import org.apache.lucene.search.Filter; -import org.apache.lucene.search.FilteredQuery; -import org.apache.lucene.search.FilteredTermEnum; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.PrefixQuery; -import org.apache.lucene.search.PrefixTermEnum; import org.apache.lucene.search.Query; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.util.OpenBitSet; import org.netbeans.api.annotations.common.NonNull; import org.netbeans.api.annotations.common.NullAllowed; -import org.netbeans.api.java.source.ClassIndex; -import org.netbeans.modules.java.source.usages.ResultConvertor.Stop; +import org.netbeans.modules.parsing.lucene.support.Convertor; +import org.netbeans.modules.parsing.lucene.support.StoppableConvertor; import org.openide.util.Parameters; /** @@ -78,92 +60,8 @@ * @author Tomas Zezula */ class QueryUtil { + - static Collector createBitSetCollector(final BitSet bits) { - return new BitSetCollector(bits); - } - - static Query createQuery ( - final @NonNull Pair termNames, - final @NonNull String value, - final @NonNull ClassIndex.NameKind kind) { - Parameters.notNull("termNames", termNames); //NOI18N - Parameters.notNull("termNames.first", termNames.first); //NOI18N - Parameters.notNull("termNames.second", termNames.second); //NOI18N - Parameters.notNull("value", value); //NOI18N - Parameters.notNull("kind", kind); //NOI18N - return createQueryImpl(termNames, value, kind, new StandardQueryFactory()); - } - - static Query createTermCollectingQuery( - final @NonNull Pair termNames, - final @NonNull String value, - final @NonNull ClassIndex.NameKind kind) { - Parameters.notNull("termNames", termNames); //NOI18N - Parameters.notNull("termNames.first", termNames.first); //NOI18N - Parameters.notNull("termNames.second", termNames.second); //NOI18N - Parameters.notNull("value", value); //NOI18N - Parameters.notNull("kind", kind); //NOI18N - return createQueryImpl(termNames, value, kind, new TCQueryFactory()); - } - - private static Query createQueryImpl( - final @NonNull Pair termNames, - final @NonNull String value, - final @NonNull ClassIndex.NameKind kind, - final @NonNull QueryFactory f) { - switch (kind) { - case SIMPLE_NAME: - return f.createTermQuery(termNames.first, value); - case PREFIX: - if (value.length() == 0) { - return f.createAllDocsQuery(); - } - else { - return f.createPrefixQuery(termNames.first, value); - } - case CASE_INSENSITIVE_PREFIX: - if (value.length() == 0) { - return f.createAllDocsQuery(); - } - else { - return f.createPrefixQuery(termNames.second, value.toLowerCase()); - } - case CAMEL_CASE: - if (value.length() == 0) { - throw new IllegalArgumentException (); - } else { - return f.createRegExpQuery(termNames.first,createCamelCaseRegExp(value, true), true); - } - case CASE_INSENSITIVE_REGEXP: - if (value.length() == 0) { - throw new IllegalArgumentException (); - } else { - return f.createRegExpQuery(termNames.second, value.toLowerCase(), false); - } - case REGEXP: - if (value.length() == 0) { - throw new IllegalArgumentException (); - } else { - return f.createRegExpQuery(termNames.first, value, true); - } - case CAMEL_CASE_INSENSITIVE: - if (value.length() == 0) { - //Special case (all) handle in different way - return f.createAllDocsQuery(); - } - else { - final Query pq = f.createPrefixQuery(termNames.second, value.toLowerCase()); - final Query fq = f.createRegExpQuery(termNames.second, createCamelCaseRegExp(value, false), false); - final BooleanQuery result = f.createBooleanQuery(); - result.add(pq, Occur.SHOULD); - result.add(fq, Occur.SHOULD); - return result; - } - default: - throw new UnsupportedOperationException (kind.toString()); - } - } static Query createUsagesQuery( final @NonNull String resourceName, @@ -186,284 +84,18 @@ } } - static Pair,Term> createPackageFilter( + static Pair,Term> createPackageFilter( final @NullAllowed String prefix, final boolean directOnly) { final Term startTerm = new Term (DocumentUtil.FIELD_PACKAGE_NAME, prefix); - final ResultConvertor filter = new PackageFilter(startTerm, directOnly); + final StoppableConvertor filter = new PackageFilter(startTerm, directOnly); return Pair.of(filter,startTerm); } // - - private static String createCamelCaseRegExp(final String camel, final boolean caseSensitive) { - final StringBuilder sb = new StringBuilder(); - int lastIndex = 0; - int index; - do { - index = findNextUpper(camel, lastIndex + 1); - String token = camel.substring(lastIndex, index == -1 ? camel.length(): index); - sb.append(Pattern.quote(caseSensitive ? token : token.toLowerCase())); - sb.append( index != -1 ? "[\\p{javaLowerCase}\\p{Digit}_\\$]*" : ".*"); // NOI18N - lastIndex = index; - } while(index != -1); - return sb.toString(); - } - - private static int findNextUpper(String text, int offset ) { - for( int i = offset; i < text.length(); i++ ) { - if ( Character.isUpperCase(text.charAt(i)) ) { - return i; - } - } - return -1; - } - - private static abstract class DocumentVisitor { - - public void generate(IndexReader reader, TermEnum enumerator) throws IOException { - final int[] docs = new int[32]; - final int[] freqs = new int[32]; - final TermDocs termDocs = reader.termDocs(); - try { - do { - final Term term = enumerator.term(); - if (term == null) { - break; - } - termDocs.seek(term); - while (true) { - final int count = termDocs.read(docs, freqs); - if (count != 0) { - for (int i = 0; i < count; i++) { - visit(term, docs[i]); - } - } else { - break; - } - } - } while (enumerator.next()); - } finally { - termDocs.close(); - } - } - - abstract public void visit(Term term, int doc); - } - - private static abstract class TCFilter extends Filter { - public abstract void attach (TermCollector collector); - } - - private static abstract class AbstractTCFilter extends TCFilter { - - private TermCollector termCollector; - - @Override - public final BitSet bits(IndexReader reader) throws IOException { - final FilteredTermEnum enumerator = getTermEnum(reader); - try { - final BitSet bitSet = new BitSet(reader.maxDoc()); - new DocumentVisitor() { - @Override - public void visit(Term term, int doc) { - bitSet.set(doc); - if (termCollector != null) { - termCollector.add(doc, term); - } - } - }.generate(reader, enumerator); - return bitSet; - } finally { - enumerator.close(); - } - } - - @Override - public final DocIdSet getDocIdSet(IndexReader reader) throws IOException { - final FilteredTermEnum enumerator = getTermEnum(reader); - try { - // if current term in enum is null, the enum is empty -> shortcut - if (enumerator.term() == null) { - return DocIdSet.EMPTY_DOCIDSET; - } - // else fill into a OpenBitSet - final OpenBitSet bitSet = new OpenBitSet(reader.maxDoc()); - new DocumentVisitor() { - @Override - public void visit(Term term, int doc) { - bitSet.set(doc); - if (termCollector != null) { - termCollector.add(doc, term); - } - } - }.generate(reader, enumerator); - return bitSet; - } finally { - enumerator.close(); - } - } - - @Override - public final void attach(final TermCollector tc) { - this.termCollector = tc; - } - - protected abstract FilteredTermEnum getTermEnum(IndexReader reader) throws IOException; - - } - - private static class RegexpTermEnum extends FilteredTermEnum { - - private final String fieldName; - private final String startPrefix; - private final Pattern pattern; - private boolean endEnum; - - public RegexpTermEnum( - final IndexReader in, - final String fieldName, - final Pattern pattern, - final String startPrefix) throws IOException { - final Term term = new Term(fieldName,startPrefix); - this.fieldName = term.field(); - this.pattern = pattern; - this.startPrefix = startPrefix; - setEnum(in.terms(term)); - } - - @Override - protected boolean termCompare(Term term) { - if (fieldName == term.field()) { - String searchText = term.text(); - if (searchText.startsWith(startPrefix)) { - return pattern.matcher(term.text()).matches(); - } - } - endEnum = true; - return false; - } - - @Override - public float difference() { - return 1.0f; - } - - @Override - protected boolean endEnum() { - return endEnum; - } - } - - private static class RegexpFilter extends AbstractTCFilter { - - private final String fieldName; - private final String startPrefix; - private final Pattern pattern; - - public RegexpFilter(final String fieldName, final String regexp, final boolean caseSensitive) { - this.fieldName = fieldName; - this.pattern = caseSensitive ? Pattern.compile(regexp) : Pattern.compile(regexp, Pattern.CASE_INSENSITIVE); - this.startPrefix = getStartText(regexp); - } - - protected FilteredTermEnum getTermEnum(final @NonNull IndexReader reader) throws IOException { - return new RegexpTermEnum(reader, fieldName, pattern, startPrefix); - } - - private static String getStartText(final String regexp) { - if (!Character.isJavaIdentifierStart(regexp.charAt(0))) { - return ""; //NOI18N - } - final StringBuilder startBuilder = new StringBuilder (); - startBuilder.append(regexp.charAt(0)); - for (int i=1; i { + + + private static class PackageFilter implements StoppableConvertor { private static final Stop STOP = new Stop(); @@ -499,114 +131,6 @@ } - private static class TCFilteredQuery extends FilteredQuery implements TermCollector.TermCollecting { - private TCFilteredQuery(final Query query, final TCFilter filter) { - super (query, filter); - } - - @Override - public void attach(TermCollector collector) { - ((TCFilter)getFilter()).attach(collector); - } - } - - private static class TCBooleanQuery extends BooleanQuery implements TermCollector.TermCollecting { - - private TermCollector collector; - - @Override - public void attach(TermCollector collector) { - this.collector = collector; - if (this.collector != null) { - attach(this, this.collector); - } - } - - @Override - public Query rewrite(IndexReader reader) throws IOException { - final Query result = super.rewrite(reader); - if (this.collector != null) { - attach(this,this.collector); - } - return result; - } - - private static void attach (final BooleanQuery query, final TermCollector collector) { - for (BooleanClause clause : query.getClauses()) { - if (!(clause instanceof TermCollector.TermCollecting)) { - throw new IllegalArgumentException(); - } - ((TermCollector.TermCollecting)clause.getQuery()).attach(collector); - } - } - - } - - private static interface QueryFactory { - Query createTermQuery(@NonNull String name, @NonNull String value); - Query createPrefixQuery(@NonNull String name, @NonNull String value); - Query createRegExpQuery(@NonNull String name, @NonNull String value, boolean caseSensitive); - Query createAllDocsQuery(); - BooleanQuery createBooleanQuery(); - } - - private static class StandardQueryFactory implements QueryFactory { - - @Override - public Query createTermQuery(final @NonNull String name, final @NonNull String value) { - return new TermQuery(new Term (name, value)); - } - - @Override - public Query createPrefixQuery(final @NonNull String name, final @NonNull String value) { - final PrefixQuery pq = new PrefixQuery(new Term(name, value)); - pq.setRewriteMethod(PrefixQuery.CONSTANT_SCORE_FILTER_REWRITE); - return pq; - } - - @Override - public Query createRegExpQuery(final @NonNull String name, final @NonNull String value, final boolean caseSensitive) { - return new FilteredQuery(new MatchAllDocsQuery(), new RegexpFilter(name, value, caseSensitive)); - } - - @Override - public Query createAllDocsQuery() { - return new MatchAllDocsQuery(); - } - - @Override - public BooleanQuery createBooleanQuery() { - return new BooleanQuery(); - } - } - - private static class TCQueryFactory implements QueryFactory { - - @Override - public Query createTermQuery(final @NonNull String name, final @NonNull String value) { - return new TCFilteredQuery(new MatchAllDocsQuery(), new TermFilter(name,value)); - } - - @Override - public Query createPrefixQuery(final @NonNull String name, final @NonNull String value) { - return new TCFilteredQuery(new MatchAllDocsQuery(), new PrefixFilter(name, value)); - } - - @Override - public Query createRegExpQuery(final @NonNull String name, final @NonNull String value, final boolean caseSensitive) { - return new TCFilteredQuery(new MatchAllDocsQuery(), new RegexpFilter(name, value, caseSensitive)); - } - - @Override - public Query createAllDocsQuery() { - throw new IllegalArgumentException (); - } - - @Override - public BooleanQuery createBooleanQuery() { - return new TCBooleanQuery(); - } - } // } diff --git a/java.source/src/org/netbeans/modules/java/source/usages/ResultConvertor.java b/java.source/src/org/netbeans/modules/java/source/usages/ResultConvertor.java deleted file mode 100644 --- a/java.source/src/org/netbeans/modules/java/source/usages/ResultConvertor.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. - * - * Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved. - * - * Oracle and Java are registered trademarks of Oracle and/or its affiliates. - * Other names may be trademarks of their respective owners. - * - * The contents of this file are subject to the terms of either the GNU - * General Public License Version 2 only ("GPL") or the Common - * Development and Distribution License("CDDL") (collectively, the - * "License"). You may not use this file except in compliance with the - * License. You can obtain a copy of the License at - * http://www.netbeans.org/cddl-gplv2.html - * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the - * specific language governing permissions and limitations under the - * License. When distributing the software, include this License Header - * Notice in each file and include the License file at - * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this - * particular file as subject to the "Classpath" exception as provided - * by Oracle in the GPL Version 2 section of the License file that - * accompanied this code. If applicable, add the following below the - * License Header, with the fields enclosed by brackets [] replaced by - * your own identifying information: - * "Portions Copyrighted [year] [name of copyright owner]" - * - * Contributor(s): - * - * The Original Software is NetBeans. The Initial Developer of the Original - * Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun - * Microsystems, Inc. All Rights Reserved. - * - * If you wish your version of this file to be governed by only the CDDL - * or only the GPL Version 2, indicate your decision by adding - * "[Contributor] elects to include this software in this distribution - * under the [CDDL or GPL Version 2] license." If you do not indicate a - * single choice of license, a recipient has the option to distribute - * your version of this file under either the CDDL, the GPL Version 2 or - * to extend the choice of license to its licensees as provided above. - * However, if you add GPL Version 2 code and therefore, elected the GPL - * Version 2 license, then the option applies only if the new code is - * made subject to such option by the copyright holder. - */ -package org.netbeans.modules.java.source.usages; - -/** - * - * @author Tomas Zezula - */ -public interface ResultConvertor { - public static final class Stop extends Exception {}; - public abstract T convert (P p) throws Stop; -} diff --git a/java.source/test/unit/src/org/netbeans/api/java/source/ClassIndexTest.java b/java.source/test/unit/src/org/netbeans/api/java/source/ClassIndexTest.java --- a/java.source/test/unit/src/org/netbeans/api/java/source/ClassIndexTest.java +++ b/java.source/test/unit/src/org/netbeans/api/java/source/ClassIndexTest.java @@ -69,6 +69,7 @@ import org.netbeans.modules.java.source.usages.ClassIndexManager; import org.netbeans.modules.java.source.usages.IndexUtil; import org.netbeans.modules.parsing.api.indexing.IndexingManager; +import org.netbeans.modules.parsing.lucene.support.IndexManager; import org.netbeans.spi.java.classpath.ClassPathFactory; import org.netbeans.spi.java.classpath.ClassPathImplementation; import org.netbeans.spi.java.classpath.ClassPathProvider; @@ -258,37 +259,37 @@ public void testholdsWriteLock () throws Exception { //Test basics final ClassIndexManager m = ClassIndexManager.getDefault(); - m.readLock(new ClassIndexManager.ExceptionAction() { + IndexManager.readAccess(new IndexManager.Action() { public Void run() throws IOException, InterruptedException { - assertFalse(m.holdsWriteLock()); + assertFalse(IndexManager.holdsWriteLock()); return null; } }); - m.writeLock(new ClassIndexManager.ExceptionAction() { + m.writeLock(new IndexManager.Action() { public Void run() throws IOException, InterruptedException { - assertTrue(m.holdsWriteLock()); + assertTrue(IndexManager.holdsWriteLock()); return null; } }); //Test nesting of [write|read] lock in write lock //the opposite is forbidden - m.writeLock(new ClassIndexManager.ExceptionAction() { + m.writeLock(new IndexManager.Action() { public Void run() throws IOException, InterruptedException { - assertTrue(m.holdsWriteLock()); - m.writeLock(new ClassIndexManager.ExceptionAction() { + assertTrue(IndexManager.holdsWriteLock()); + m.writeLock(new IndexManager.Action() { public Void run() throws IOException, InterruptedException { - assertTrue(m.holdsWriteLock()); + assertTrue(IndexManager.holdsWriteLock()); return null; } }); - assertTrue(m.holdsWriteLock()); - m.writeLock(new ClassIndexManager.ExceptionAction() { + assertTrue(IndexManager.holdsWriteLock()); + m.writeLock(new IndexManager.Action() { public Void run() throws IOException, InterruptedException { - assertTrue(m.holdsWriteLock()); + assertTrue(IndexManager.holdsWriteLock()); return null; } }); - assertTrue(m.holdsWriteLock()); + assertTrue(IndexManager.holdsWriteLock()); return null; } }); diff --git a/java.source/test/unit/src/org/netbeans/api/java/source/JavaSourceTest.java b/java.source/test/unit/src/org/netbeans/api/java/source/JavaSourceTest.java --- a/java.source/test/unit/src/org/netbeans/api/java/source/JavaSourceTest.java +++ b/java.source/test/unit/src/org/netbeans/api/java/source/JavaSourceTest.java @@ -89,22 +89,22 @@ import java.util.concurrent.TimeUnit; import java.util.logging.Handler; import javax.swing.text.BadLocationException; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.FieldSelector; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; -import org.netbeans.api.annotations.common.NonNull; import org.netbeans.api.java.classpath.ClassPath; import org.netbeans.api.java.classpath.GlobalPathRegistry; import org.netbeans.api.java.lexer.JavaTokenId; -import org.netbeans.api.java.source.ClassIndex.NameKind; import org.netbeans.junit.NbTestCase; import org.netbeans.junit.NbTestSuite; import org.netbeans.modules.java.source.parsing.DocPositionRegion; import org.netbeans.modules.java.source.parsing.JavaFileObjectProvider; import org.netbeans.modules.java.source.parsing.SourceFileObject; -import org.netbeans.modules.java.source.usages.Index; import org.netbeans.modules.java.source.usages.Pair; -import org.netbeans.modules.java.source.usages.ResultConvertor; +import org.netbeans.modules.parsing.lucene.IndexFactory; +import org.netbeans.modules.parsing.lucene.support.Convertor; +import org.netbeans.modules.parsing.lucene.support.Index; import org.netbeans.spi.java.classpath.support.ClassPathSupport; import org.openide.cookies.EditorCookie; import org.openide.cookies.SaveCookie; @@ -125,11 +125,11 @@ import org.netbeans.modules.java.source.classpath.CacheClassPath; import org.netbeans.modules.java.source.parsing.CompilationInfoImpl; import org.netbeans.modules.java.source.parsing.JavacParser; -import org.netbeans.modules.java.source.usages.IndexFactory; import org.netbeans.modules.java.source.usages.IndexUtil; -import org.netbeans.modules.java.source.usages.PersistentClassIndex; import org.netbeans.modules.parsing.api.TestUtil; import org.netbeans.modules.parsing.api.indexing.IndexingManager; +import org.netbeans.modules.parsing.lucene.support.IndexManagerTestUtilities; +import org.netbeans.modules.parsing.lucene.support.StoppableConvertor; import org.netbeans.spi.java.classpath.ClassPathProvider; import org.openide.util.Mutex.ExceptionAction; /** @@ -1286,8 +1286,9 @@ public void testIndexCancel() throws Exception { + final IndexFactory oldFactory = IndexManagerTestUtilities.getIndexFactory(); final TestIndexFactory factory = new TestIndexFactory(); - PersistentClassIndex.setIndexFactory(factory); + IndexManagerTestUtilities.setIndexFactory(factory); try { FileObject test = createTestFile ("Test1"); final ClassPath bootPath = createBootPath (); @@ -1369,7 +1370,7 @@ regs.unregister(ClassPath.SOURCE, new ClassPath[]{sourcePath}); } } finally { - PersistentClassIndex.setIndexFactory(null); + IndexManagerTestUtilities.setIndexFactory(oldFactory); } } @@ -1929,13 +1930,14 @@ final TestIndex instance = new TestIndex(); - public Index create(File cacheRoot) throws IOException { + @Override + public Index createIndex(File cacheFolder, Analyzer analyzer) { return instance; } } - private static class TestIndex extends Index { + private static class TestIndex implements Index { //Activate the TestIndex.await after scan is done //during the scan the prebuildArgs may call the index //and cause deadlock @@ -1945,10 +1947,12 @@ public TestIndex () { } + @Override public boolean isValid(boolean tryOpen) throws IOException { return true; } + @Override public boolean exists() { return true; } @@ -1956,51 +1960,55 @@ @Override public void query( Collection result, - ResultConvertor convertor, + Convertor convertor, FieldSelector selector, + AtomicBoolean cancel, Query... queries) throws IOException, InterruptedException { - await(); + await(cancel); } @Override public void queryTerms( Collection result, Term start, - ResultConvertor filter) throws IOException, InterruptedException { - await (); + StoppableConvertor filter, + AtomicBoolean cancel) throws IOException, InterruptedException { + await (cancel); } @Override public void queryDocTerms( Map> result, - ResultConvertor convertor, - ResultConvertor termConvertor, + Convertor convertor, + Convertor termConvertor, FieldSelector selector, + AtomicBoolean cancel, Query... queries) throws IOException, InterruptedException { - await(); + await(cancel); } @Override - public void store(Collection toAdd, Collection toDelete, ResultConvertor docConvertor, ResultConvertor queryConvertor, boolean optimize) throws IOException { + public void store(Collection toAdd, Collection toDelete, Convertor docConvertor, Convertor queryConvertor, boolean optimize) throws IOException { } public boolean isUpToDate(String resourceName, long timeStamp) throws IOException { return true; } + @Override public void clear() throws IOException { } + @Override public void close() throws IOException { } - private void await () throws InterruptedException { + private void await (final AtomicBoolean cancel) throws InterruptedException { if (!active) { return; } - AtomicBoolean cancel = this.cancel.get(); while (true) { - if (cancel.get()) { + if (cancel != null && cancel.get()) { throw new InterruptedException (); } Thread.sleep(100); diff --git a/java.source/test/unit/src/org/netbeans/api/java/source/TestUtilities.java b/java.source/test/unit/src/org/netbeans/api/java/source/TestUtilities.java --- a/java.source/test/unit/src/org/netbeans/api/java/source/TestUtilities.java +++ b/java.source/test/unit/src/org/netbeans/api/java/source/TestUtilities.java @@ -68,6 +68,7 @@ import org.netbeans.modules.java.source.usages.ClassIndexImpl; import org.netbeans.modules.java.source.usages.ClassIndexManager; import org.netbeans.modules.java.source.usages.IndexUtil; +import org.netbeans.modules.parsing.lucene.support.IndexManager; import org.netbeans.spi.java.classpath.support.ClassPathSupport; import org.openide.filesystems.FileObject; import org.openide.filesystems.FileUtil; @@ -249,7 +250,7 @@ public void run(CompilationController parameter) throws Exception { for (final URL url : urls) { final ClassIndexImpl cii = mgr.createUsagesQuery(url, false); - ClassIndexManager.getDefault().writeLock(new ClassIndexManager.ExceptionAction() { + ClassIndexManager.getDefault().writeLock(new IndexManager.Action() { public Void run() throws IOException, InterruptedException { BinaryAnalyser ba = cii.getBinaryAnalyser(); ba.start(url, new AtomicBoolean(false), new AtomicBoolean(false)); diff --git a/java.source/test/unit/src/org/netbeans/modules/java/source/classpath/SourcePathTest.java b/java.source/test/unit/src/org/netbeans/modules/java/source/classpath/SourcePathTest.java --- a/java.source/test/unit/src/org/netbeans/modules/java/source/classpath/SourcePathTest.java +++ b/java.source/test/unit/src/org/netbeans/modules/java/source/classpath/SourcePathTest.java @@ -50,6 +50,7 @@ import org.netbeans.api.java.source.TestUtilities; import org.netbeans.junit.NbTestCase; import org.netbeans.modules.java.source.usages.ClassIndexManager; +import org.netbeans.modules.parsing.lucene.support.IndexManager; import org.netbeans.spi.java.classpath.support.ClassPathSupport; /** @@ -85,7 +86,7 @@ final ClassPath sp2 = SourcePath.sources(base, false); assertTrue (sp2.entries().isEmpty()); - ClassIndexManager.getDefault().writeLock(new ClassIndexManager.ExceptionAction() { + ClassIndexManager.getDefault().writeLock(new IndexManager.Action() { public Void run() throws IOException, InterruptedException { ClassIndexManager.getDefault().createUsagesQuery(base.entries().get(0).getURL(), true); return null; @@ -93,7 +94,7 @@ }); assertEquals(1,sp2.entries().size()); assertEquals(base.entries().get(0).getURL(), sp2.entries().get(0).getURL()); - ClassIndexManager.getDefault().writeLock(new ClassIndexManager.ExceptionAction() { + ClassIndexManager.getDefault().writeLock(new IndexManager.Action() { public Void run() throws IOException, InterruptedException { ClassIndexManager.getDefault().createUsagesQuery(base.entries().get(1).getURL(), true); return null; diff --git a/java.source/test/unit/src/org/netbeans/modules/java/source/usages/BinaryAnalyserTest.java b/java.source/test/unit/src/org/netbeans/modules/java/source/usages/BinaryAnalyserTest.java --- a/java.source/test/unit/src/org/netbeans/modules/java/source/usages/BinaryAnalyserTest.java +++ b/java.source/test/unit/src/org/netbeans/modules/java/source/usages/BinaryAnalyserTest.java @@ -62,6 +62,8 @@ import org.netbeans.modules.java.source.ElementHandleAccessor; import org.netbeans.modules.java.source.usages.BinaryAnalyser.Result; import org.netbeans.modules.java.source.usages.ClassIndexImpl.UsageType; +import org.netbeans.modules.parsing.lucene.support.Index; +import org.netbeans.modules.parsing.lucene.support.IndexManager; import org.openide.filesystems.FileObject; import org.openide.filesystems.FileUtil; @@ -81,14 +83,14 @@ } public void testAnnotationsIndexed() throws Exception { - ClassIndexManager.getDefault().writeLock(new ClassIndexManager.ExceptionAction() { + ClassIndexManager.getDefault().writeLock(new IndexManager.Action() { @Override public Void run() throws IOException, InterruptedException { FileObject workDir = SourceUtilsTestUtil.makeScratchDir(BinaryAnalyserTest.this); FileObject indexDir = workDir.createFolder("index"); File binaryAnalyzerDataDir = new File(getDataDir(), "Annotations.jar"); - final Index index = LuceneIndex.create(FileUtil.toFile(indexDir)); + final Index index = IndexManager.createIndex(FileUtil.toFile(indexDir), DocumentUtil.createAnalyzer()); BinaryAnalyser a = new BinaryAnalyser(new ClassIndexImpl.Writer() { @Override public void clear() throws IOException { @@ -184,6 +186,7 @@ result, DocumentUtil.binaryNameConvertor(), DocumentUtil.declaredTypesFieldSelector(), + null, QueryUtil.createUsagesQuery(refered, EnumSet.of(UsageType.TYPE_REFERENCE), Occur.SHOULD)); assertTrue(result.containsAll(Arrays.asList(in))); } diff --git a/java.source/test/unit/src/org/netbeans/modules/java/source/usages/LuceneIndexTest.java b/java.source/test/unit/src/org/netbeans/modules/java/source/usages/LuceneIndexTest.java deleted file mode 100644 --- a/java.source/test/unit/src/org/netbeans/modules/java/source/usages/LuceneIndexTest.java +++ /dev/null @@ -1,278 +0,0 @@ -/* - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. - * - * Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved. - * - * Oracle and Java are registered trademarks of Oracle and/or its affiliates. - * Other names may be trademarks of their respective owners. - * - * The contents of this file are subject to the terms of either the GNU - * General Public License Version 2 only ("GPL") or the Common - * Development and Distribution License("CDDL") (collectively, the - * "License"). You may not use this file except in compliance with the - * License. You can obtain a copy of the License at - * http://www.netbeans.org/cddl-gplv2.html - * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the - * specific language governing permissions and limitations under the - * License. When distributing the software, include this License Header - * Notice in each file and include the License file at - * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this - * particular file as subject to the "Classpath" exception as provided - * by Oracle in the GPL Version 2 section of the License file that - * accompanied this code. If applicable, add the following below the - * License Header, with the fields enclosed by brackets [] replaced by - * your own identifying information: - * "Portions Copyrighted [year] [name of copyright owner]" - * - * Contributor(s): - * - * The Original Software is NetBeans. The Initial Developer of the Original - * Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun - * Microsystems, Inc. All Rights Reserved. - * - * If you wish your version of this file to be governed by only the CDDL - * or only the GPL Version 2, indicate your decision by adding - * "[Contributor] elects to include this software in this distribution - * under the [CDDL or GPL Version 2] license." If you do not indicate a - * single choice of license, a recipient has the option to distribute - * your version of this file under either the CDDL, the GPL Version 2 or - * to extend the choice of license to its licensees as provided above. - * However, if you add GPL Version 2 code and therefore, elected the GPL - * Version 2 license, then the option applies only if the new code is - * made subject to such option by the copyright holder. - */ - -package org.netbeans.modules.java.source.usages; - -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import org.apache.lucene.analysis.KeywordAnalyzer; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Hit; -import org.apache.lucene.search.Hits; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.PrefixQuery; -import org.apache.lucene.search.Searcher; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.FSDirectory; -import org.netbeans.junit.NbTestCase; -import org.netbeans.modules.java.source.usages.ClassIndexImpl.UsageType; - -/** - * - * @author Tomas Zezula - */ -public class LuceneIndexTest extends NbTestCase { - - //Copied from DocumentUtil, nneds to be synchronized when changed - private static final String FIELD_RESOURCE_NAME = "resourceName"; //NOI18N - private static final String FIELD_REFERENCES = "references"; //NOI18N - - private static final int REF_SIZE = UsageType.values().length; - - private File indexFolder1; - private File indexFolder2; - - public LuceneIndexTest (String testName) { - super (testName); - } - - protected @Override void setUp() throws Exception { - super.setUp(); - this.clearWorkDir(); - //Prepare indeces - } - - public void testIndeces () throws Exception { - if (indexFolder1 != null && indexFolder2 != null) { - assertTrue (indexFolder1.isDirectory()); - assertTrue (indexFolder1.canRead()); - assertTrue (IndexReader.indexExists(indexFolder1)); - assertTrue (indexFolder2.isDirectory()); - assertTrue (indexFolder2.canRead()); - assertTrue (IndexReader.indexExists(indexFolder2)); - compareIndeces (indexFolder1, indexFolder2); - } - } - - public void testHitsBug () throws Exception { - File root = getWorkDir(); - root.mkdirs(); - Directory dir = FSDirectory.getDirectory(root); - final IndexWriter w = new IndexWriter(dir, new KeywordAnalyzer(),true); - try { - for (int i=0; i<200; i++) { - Field field = new Field("KEY","value"+i,Field.Store.YES,Field.Index.NO_NORMS); - Document doc = new Document(); - doc.add(field); - w.addDocument(doc); - } - } finally { - w.close(); - } - final IndexReader in = IndexReader.open(dir); - try { - final Searcher searcher = new IndexSearcher (in); - try { - Hits hits = searcher.search(new PrefixQuery((new Term("KEY", "value1")))); - for (int j=0; j m1 = fill (f1); - Map m2 = fill (f2); - for (Map.Entry e : m1.entrySet()) { - String key = e.getKey(); - String value1 = e.getValue(); - String value2= m2.get(key); - assertNotNull("Unknown reference: " + key,value2); - assertEquals("Different usage types",value1,value2); - } - } - - private static Map fill (Field[] fs) { - Map m1 = new HashMap (); - for (Field f : fs) { - String ru = f.stringValue(); - int index = ru.length() - REF_SIZE; - String key = ru.substring(0,index); - String value = ru.substring(index); - m1.put (key, value); - } - return m1; - } - - public void testIsValid() throws Exception { - final File wd = getWorkDir(); - final File cache = new File(wd,"cache"); - final File indexFolder = new File (cache,"refs"); - cache.mkdirs(); - final LuceneIndex index = (LuceneIndex) LuceneIndex.create(cache); - //Empty index => invalid - assertFalse(index.isValid(true)); - - clearValidityCache(index); - List,Object[]>> refs = new ArrayList,Object[]>>(); - List xref = new LinkedList(); - String sym = ""; - String ident = ""; - refs.add(Pair.,Object[]>of(Pair.of("A", null), new Object[]{xref,sym,ident})); - Set> toDel = new HashSet>(); - index.store(refs, toDel, DocumentUtil.documentConvertor(), DocumentUtil.queryClassWithEncConvertor(),true); - //Existing index => valid - assertTrue(index.isValid(true)); - assertTrue(indexFolder.listFiles().length>0); - - clearValidityCache(index); - createLock(index); - //Index with orphan lock => invalid - assertFalse(index.isValid(true)); - assertTrue(indexFolder.listFiles().length==0); - - clearValidityCache(index); - index.store(refs, toDel, DocumentUtil.documentConvertor(), DocumentUtil.queryClassWithEncConvertor(),true); - assertTrue(index.isValid(true)); - assertTrue(indexFolder.listFiles().length>0); - - //Broken index => invalid - clearValidityCache(index); - File bt = null;; - for (File file : indexFolder.listFiles()) { - if (file.getName().endsWith(".cfs")) { - bt = file; - break; - } - } - assertNotNull(bt); - FileOutputStream out = new FileOutputStream(bt); - try { - out.write(new byte[] {0,0,0,0,0,0,0,0,0,0}, 0, 10); - } finally { - out.close(); - } - assertFalse(index.isValid(true)); - assertTrue(indexFolder.listFiles().length==0); - - } - - - private void createLock(final LuceneIndex index) throws NoSuchFieldException, IllegalArgumentException, IllegalAccessException, IOException { - final Class li = LuceneIndex.class; - final java.lang.reflect.Field directory = li.getDeclaredField("directory"); //NOI18N - directory.setAccessible(true); - Directory dir = (Directory) directory.get(index); - dir.makeLock("test").obtain(); //NOI18N - } - - - private void clearValidityCache(final LuceneIndex index) throws NoSuchFieldException, IllegalArgumentException, IllegalAccessException, IOException { - final Class li = LuceneIndex.class; - final java.lang.reflect.Field reader = li.getDeclaredField("reader"); //NOI18N - reader.setAccessible(true); - IndexReader r = (IndexReader) reader.get(index); - if (r != null) { - r.close(); - } - reader.set(index,null); - } - -} diff --git a/java.source/test/unit/src/org/netbeans/modules/java/source/usages/LucenePerformanceTest.java b/java.source/test/unit/src/org/netbeans/modules/java/source/usages/LucenePerformanceTest.java --- a/java.source/test/unit/src/org/netbeans/modules/java/source/usages/LucenePerformanceTest.java +++ b/java.source/test/unit/src/org/netbeans/modules/java/source/usages/LucenePerformanceTest.java @@ -65,9 +65,12 @@ import javax.lang.model.element.TypeElement; import org.apache.lucene.index.Term; import org.apache.lucene.store.FSDirectory; -import org.netbeans.api.java.source.ClassIndex.NameKind; import org.netbeans.api.java.source.ElementHandle; import org.netbeans.junit.NbTestCase; +import org.netbeans.modules.parsing.lucene.support.Index; +import org.netbeans.modules.parsing.lucene.support.IndexManager; +import org.netbeans.modules.parsing.lucene.support.Queries; +import org.netbeans.modules.parsing.lucene.support.StoppableConvertor; /** * @@ -96,7 +99,7 @@ public void testPerformance () throws Exception { final File indexDir = new File (this.getWorkDir(),"index"); indexDir.mkdirs(); - final Index index = LuceneIndex.create (indexDir); + final Index index = IndexManager.createIndex(indexDir, DocumentUtil.createAnalyzer()); List,Object[]>> data = prepareData(20000,1000,50); // Map> data = loadData(new File ("/tmp/data")); // storeData(new File ("/tmp/data"),data); @@ -112,8 +115,8 @@ Set result = new HashSet(); startTime = System.currentTimeMillis(); - final Pair,Term> filter = QueryUtil.createPackageFilter("", true); - index.queryTerms(result, filter.second, filter.first); + final Pair,Term> filter = QueryUtil.createPackageFilter("", true); + index.queryTerms(result, filter.second, filter.first, null); endTime = System.currentTimeMillis(); delta = (endTime-startTime); System.out.println("Packages: " + delta); @@ -128,7 +131,8 @@ result2, DocumentUtil.elementHandleConvertor(), DocumentUtil.declaredTypesFieldSelector(), - QueryUtil.createQuery(Pair.of(DocumentUtil.FIELD_SIMPLE_NAME,DocumentUtil.FIELD_CASE_INSENSITIVE_NAME),"",NameKind.PREFIX)); + null, + Queries.createQuery(DocumentUtil.FIELD_SIMPLE_NAME,DocumentUtil.FIELD_CASE_INSENSITIVE_NAME,"",Queries.QueryKind.PREFIX)); endTime = System.currentTimeMillis(); delta = (endTime-startTime); System.out.println("All classes: " + delta); @@ -147,7 +151,8 @@ result2, DocumentUtil.elementHandleConvertor(), DocumentUtil.declaredTypesFieldSelector(), - QueryUtil.createQuery(Pair.of(DocumentUtil.FIELD_SIMPLE_NAME,DocumentUtil.FIELD_CASE_INSENSITIVE_NAME),"Class7",NameKind.PREFIX)); + null, + Queries.createQuery(DocumentUtil.FIELD_SIMPLE_NAME,DocumentUtil.FIELD_CASE_INSENSITIVE_NAME,"Class7",Queries.QueryKind.PREFIX)); endTime = System.currentTimeMillis(); delta = (endTime-startTime); System.out.println("Prefix classes: " + delta + " size: " + result.size()); diff --git a/java.sourceui/nbproject/project.xml b/java.sourceui/nbproject/project.xml --- a/java.sourceui/nbproject/project.xml +++ b/java.sourceui/nbproject/project.xml @@ -120,6 +120,14 @@ + org.netbeans.modules.parsing.lucene + + + + 1.0 + + + org.netbeans.modules.projectapi diff --git a/masterfs/manifest.mf b/masterfs/manifest.mf --- a/masterfs/manifest.mf +++ b/masterfs/manifest.mf @@ -1,7 +1,7 @@ Manifest-Version: 1.0 OpenIDE-Module: org.netbeans.modules.masterfs/2 OpenIDE-Module-Localizing-Bundle: org/netbeans/modules/masterfs/resources/Bundle.properties -OpenIDE-Module-Specification-Version: 2.28 +OpenIDE-Module-Specification-Version: 2.29 AutoUpdate-Show-In-Client: false AutoUpdate-Essential-Module: true diff --git a/masterfs/nbproject/project.xml b/masterfs/nbproject/project.xml --- a/masterfs/nbproject/project.xml +++ b/masterfs/nbproject/project.xml @@ -129,6 +129,7 @@ org.netbeans.modules.javafx.source org.netbeans.modules.parsing.api + org.netbeans.modules.parsing.lucene org.netbeans.modules.versioning org.netbeans.modules.masterfs.providers diff --git a/nbbuild/cluster.properties b/nbbuild/cluster.properties --- a/nbbuild/cluster.properties +++ b/nbbuild/cluster.properties @@ -385,6 +385,7 @@ o.openidex.util,\ options.editor,\ parsing.api,\ + parsing.lucene,\ print.editor,\ project.ant,\ project.libraries,\ diff --git a/nbbuild/javadoctools/links.xml b/nbbuild/javadoctools/links.xml --- a/nbbuild/javadoctools/links.xml +++ b/nbbuild/javadoctools/links.xml @@ -201,3 +201,4 @@ + diff --git a/nbbuild/javadoctools/properties.xml b/nbbuild/javadoctools/properties.xml --- a/nbbuild/javadoctools/properties.xml +++ b/nbbuild/javadoctools/properties.xml @@ -199,3 +199,4 @@ + diff --git a/nbbuild/javadoctools/replaces.xml b/nbbuild/javadoctools/replaces.xml --- a/nbbuild/javadoctools/replaces.xml +++ b/nbbuild/javadoctools/replaces.xml @@ -199,3 +199,4 @@ + diff --git a/parsing.api/nbproject/project.xml b/parsing.api/nbproject/project.xml --- a/parsing.api/nbproject/project.xml +++ b/parsing.api/nbproject/project.xml @@ -105,6 +105,14 @@ + org.netbeans.modules.parsing.lucene + + + + 1.0 + + + org.netbeans.modules.projectapi diff --git a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/IndexDocumentImpl.java b/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/IndexDocumentImpl.java --- a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/IndexDocumentImpl.java +++ b/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/IndexDocumentImpl.java @@ -42,8 +42,6 @@ package org.netbeans.modules.parsing.impl.indexing; -import org.netbeans.modules.parsing.spi.indexing.Indexable; - /** * * @author Tomas Zezula diff --git a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/IndexImpl.java b/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/IndexImpl.java --- a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/IndexImpl.java +++ b/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/IndexImpl.java @@ -62,7 +62,7 @@ public void store (boolean optimize, Iterable indexedIndexables) throws IOException; - public Collection query (String fieldName, String value, QuerySupport.Kind kind, String... fieldsToLoad) throws IOException; + public Collection query (String fieldName, String value, QuerySupport.Kind kind, String... fieldsToLoad) throws IOException, InterruptedException; public void fileModified(String relativePath); diff --git a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/LuceneIndex.java b/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/DocumentBasedIndex.java rename from parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/LuceneIndex.java rename to parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/DocumentBasedIndex.java --- a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/LuceneIndex.java +++ b/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/DocumentBasedIndex.java @@ -43,54 +43,35 @@ package org.netbeans.modules.parsing.impl.indexing.lucene; import java.io.File; -import java.io.FileNotFoundException; import java.io.IOException; -import java.lang.reflect.Field; import java.net.URISyntaxException; import java.net.URL; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashSet; -import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; -import java.util.TreeSet; import java.util.logging.Level; import java.util.logging.Logger; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import org.apache.lucene.analysis.KeywordAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.FieldSelector; -import org.apache.lucene.index.CorruptIndexException; -import org.apache.lucene.index.FilterIndexReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermDocs; -import org.apache.lucene.index.TermEnum; -import org.apache.lucene.search.DefaultSimilarity; -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.FSDirectory; -import org.apache.lucene.store.LockObtainFailedException; -import org.apache.lucene.store.RAMDirectory; +import org.apache.lucene.search.Query; import org.netbeans.modules.parsing.impl.indexing.IndexDocumentImpl; import org.netbeans.modules.parsing.impl.indexing.IndexImpl; -import org.netbeans.modules.parsing.impl.indexing.lucene.util.Evictable; +import org.netbeans.modules.parsing.lucene.support.Convertor; +import org.netbeans.modules.parsing.lucene.support.Index; +import org.netbeans.modules.parsing.lucene.support.IndexManager; +import org.netbeans.modules.parsing.lucene.support.Queries; import org.netbeans.modules.parsing.spi.indexing.Indexable; import org.netbeans.modules.parsing.spi.indexing.support.QuerySupport; -import org.openide.util.Exceptions; -import org.openide.util.RequestProcessor; /** * * @author Tomas Zezula */ -public class LuceneIndex implements IndexImpl, Evictable { +public class DocumentBasedIndex implements IndexImpl { - private static final RequestProcessor RP = new RequestProcessor(LuceneIndex.class.getName(),1); // ----------------------------------------------------------------------- // IndexImpl implementation @@ -100,12 +81,14 @@ * Adds document * @param document */ + @Override public void addDocument(final IndexDocumentImpl document) { final boolean forceFlush; synchronized (this) { assert document instanceof LuceneDocument; toAdd.add((LuceneDocument) document); + toRemove.add(document.getSourceName()); forceFlush = lmListener.isLowMemory(); } @@ -115,7 +98,7 @@ store(false, null); System.gc(); } catch (IOException ioe) { - LOGGER.log(Level.WARNING, null, annotateException(ioe, indexFolder)); + LOGGER.log(Level.WARNING, null, ioe); } } } @@ -124,6 +107,7 @@ * Removes all documents for given path * @param relativePath */ + @Override public void removeDocument(final String relativePath) { final boolean forceFlush; @@ -137,813 +121,182 @@ LOGGER.fine("Extra flush forced"); //NOI18N store(false, null); } catch (IOException ioe) { - LOGGER.log(Level.WARNING, null, annotateException(ioe, indexFolder)); + LOGGER.log(Level.WARNING, null, ioe); } } } - public void store(final boolean optimize, final Iterable indexedIndexables) throws IOException { - LuceneIndexManager.getDefault().writeAccess(new LuceneIndexManager.Action() { - public Void run() throws IOException { - checkPreconditions(); + @Override + public void store(final boolean optimize, final Iterable indexedIndexables) throws IOException { + final List toAdd; + final List toRemove; - final List toAdd; - final List toRemove; + synchronized (this) { + toAdd = new LinkedList(this.toAdd); + toRemove = new LinkedList(this.toRemove); - synchronized (LuceneIndex.this) { - toAdd = new LinkedList(LuceneIndex.this.toAdd); - toRemove = new LinkedList(LuceneIndex.this.toRemove); + this.toAdd.clear(); + this.toRemove.clear(); - LuceneIndex.this.toAdd.clear(); - LuceneIndex.this.toRemove.clear(); + if (!staleFiles.isEmpty()) { + if (indexedIndexables != null) { + for(Indexable i : indexedIndexables) { + this.staleFiles.remove(i.getRelativePath()); + } + } else { + for(LuceneDocument ldoc : toAdd) { + this.staleFiles.remove(ldoc.getSourceName()); + } + this.staleFiles.removeAll(toRemove); + } + } + } - if (!staleFiles.isEmpty()) { - if (indexedIndexables != null) { - for(Indexable i : indexedIndexables) { - LuceneIndex.this.staleFiles.remove(i.getRelativePath()); - } - } else { - for(LuceneDocument ldoc : toAdd) { - LuceneIndex.this.staleFiles.remove(ldoc.getSourceName()); - } - LuceneIndex.this.staleFiles.removeAll(toRemove); - } - } - } - - if (toAdd.size() > 0 || toRemove.size() > 0) { - flush(indexFolder, toAdd, toRemove, LuceneIndex.this.directory, lmListener, optimize); - } - return null; - } - }); + if (toAdd.size() > 0 || toRemove.size() > 0) { + LOGGER.log(Level.FINE, "Flushing: {0}", indexFolder); //NOI18N + luceneIndex.store( + toAdd, + toRemove, + ADD_CONVERTOR, + REMOVE_CONVERTOR, + optimize); + } + } + @Override public Collection query( final String fieldName, final String value, final QuerySupport.Kind kind, final String... fieldsToLoad - ) throws IOException { + ) throws IOException, InterruptedException { assert fieldName != null; assert value != null; assert kind != null; - - return LuceneIndexManager.getDefault().readAccess(new LuceneIndexManager.Action>() { - public List run() throws IOException { - checkPreconditions(); - if (empty) { - if (LOGGER.isLoggable(Level.FINE)) { - LOGGER.log(Level.FINE, "Ignoring empty index: {0}", indexFolder.getAbsolutePath()); - } - return Collections.emptyList(); - } - final IndexReader r = getReader(); - if (r != null) { - // index exists - return _query(r, fieldName, value, kind, fieldsToLoad); - } else { - // no index - return Collections.emptyList(); - } - } - }); + final List result = new LinkedList(); + final Query query = Queries.createQuery(fieldName, fieldName, value, translateQueryKind(kind)); + FieldSelector selector = null; + if (fieldsToLoad != null && fieldsToLoad.length > 0) { + final String[] fieldsWithSource = new String[fieldsToLoad.length+1]; + System.arraycopy(fieldsToLoad, 0, fieldsWithSource, 0, fieldsToLoad.length); + fieldsWithSource[fieldsToLoad.length] = DocumentUtil.FIELD_SOURCE_NAME; + selector = Queries.createFieldSelector(fieldsWithSource); + } + luceneIndex.query(result, QUERY_CONVERTOR, selector, null, query); + return result; } + @Override public void fileModified(String relativePath) { synchronized (this) { if (LOGGER.isLoggable(Level.FINE)) { - LOGGER.fine(this + ", adding stale file: " + relativePath); //NOI18N + LOGGER.log(Level.FINE, "{0}, adding stale file: {1}", new Object[]{this, relativePath}); //NOI18N } staleFiles.add(relativePath); } } + @Override public Collection getStaleFiles() { synchronized (this) { if (LOGGER.isLoggable(Level.FINE)) { - LOGGER.fine(this + ", stale files: " + staleFiles); //NOI18N + LOGGER.log(Level.FINE, "{0}, stale files: {1}", new Object[]{this, staleFiles}); //NOI18N } return new LinkedList(staleFiles); } } + /** + * Checks if the lucene index is valid. + * @return true when index is valid + * @throws IOException when index is already closed + */ + @Override + public boolean isValid () throws IOException { + return luceneIndex.isValid(true); + } + // ----------------------------------------------------------------------- // Public implementation // ----------------------------------------------------------------------- - public LuceneIndex(final URL indexFolderUrl) throws IOException { + public DocumentBasedIndex(final URL indexFolderUrl) throws IOException { assert indexFolderUrl != null; try { - this.indexFolderUrl = indexFolderUrl; indexFolder = new File(indexFolderUrl.toURI()); - directory = createDirectory(indexFolder); + luceneIndex = IndexManager.createIndex(indexFolder, new KeywordAnalyzer()); } catch (URISyntaxException e) { - IOException ioe = new IOException(); - ioe.initCause(e); - throw ioe; + throw new IOException(e); } } public void clear() throws IOException { - checkPreconditions(); - LuceneIndexManager.getDefault().writeAccess(new LuceneIndexManager.Action() { - public Void run() throws IOException { - _clear(); - return null; - } - }); - } - - /** - * Checks if the lucene index is valid. - * @return true when index is valid - * @throws IOException when index is already closed - */ - public boolean isValid () throws IOException { - checkPreconditions(); - boolean res = LuceneIndexManager.getDefault().readAccess(new LuceneIndexManager.Action() { - public Boolean run() throws IOException { - return LuceneIndex.this.valid; - } - }); - if (res) { - return res; - } - res = LuceneIndexManager.getDefault().writeAccess(new LuceneIndexManager.Action() { - public Boolean run() throws IOException { - boolean res = directory.list().length == 0; - if (!res) { - final Collection locks = getOrphanLock(); - res = locks.isEmpty(); - if (res) { - try { - res = IndexReader.indexExists(directory); - } catch (IOException e) { - //Directory does not exist, no need to call clear - res = false; - } catch (RuntimeException e) { - LOGGER.log(Level.INFO, "Broken index: " + indexFolder.getAbsolutePath(), e); - res = false; - } - if (res) { - try { - getReader(); - } catch (java.io.IOException e) { - res = false; - clear(); - } catch (RuntimeException e) { - res = false; - clear(); - } - } - } - else { - LOGGER.warning("Broken (locked) index folder: " + indexFolder.getAbsolutePath()); //NOI18N - for (String lockName : locks) { - directory.deleteFile(lockName); - } - clear(); - } - } - LuceneIndex.this.valid = res; - return res; - } - }); - return res; - } - - public void close() throws IOException { - checkPreconditions(); - LuceneIndexManager.getDefault().writeAccess(new LuceneIndexManager.Action() { - public Void run() throws IOException { - _close(); - return null; - } - }); + luceneIndex.clear(); } - // - public void evicted() { - //Threading: The called may own the LIM.readAccess, perform by dedicated worker to prevent deadlock - RP.post(new Runnable() { - public void run () { - try { - LuceneIndexManager.getDefault().writeAccess(new LuceneIndexManager.Action() { - public Void run() throws IOException { - _closeReader(); - if (LOGGER.isLoggable(Level.FINE)) { - LOGGER.fine("Evicted index: " + indexFolder.getAbsolutePath()); //NOI18N - } - return null; - } - }); - } catch (IOException ex) { - Exceptions.printStackTrace(ex); - } - } - }); + public void close() throws IOException { + luceneIndex.close(); } - /// - + // ----------------------------------------------------------------------- // Private implementation // ----------------------------------------------------------------------- - private static final Logger LOGGER = Logger.getLogger(LuceneIndex.class.getName()); - private static final boolean debugIndexMerging = Boolean.getBoolean("LuceneIndex.debugIndexMerge"); // NOI18N + private static final Logger LOGGER = Logger.getLogger(DocumentBasedIndex.class.getName()); + private static final Convertor ADD_CONVERTOR = new AddConvertor(); + private static final Convertor REMOVE_CONVERTOR = new RemoveConvertor(); + private static final Convertor QUERY_CONVERTOR = new QueryConvertor(); + private static final LMListener lmListener = new LMListener(); + + /* package */ static final int VERSION = 1; - /* package */ static final int VERSION = 1; - private static final String CACHE_LOCK_PREFIX = "nb-lock"; //NOI18N - - private final URL indexFolderUrl; private final File indexFolder; - - //@GuardedBy (LuceneIndexManager.writeAccess) - private volatile Directory directory; - private volatile IndexReader reader; //Cache, do not use this directly, use getReader - private volatile boolean closed; - private boolean valid; - private volatile boolean empty; //Volatile as there may be more readLocks in getReader and query - - private static final LMListener lmListener = new LMListener(); + private final Index luceneIndex; //@GuardedBy (this) private final List toAdd = new LinkedList(); private final List toRemove = new LinkedList(); private final Set staleFiles = new HashSet(); - private void _hit() { - IndexCacheFactory.getDefault().getCache().put(indexFolderUrl, this); - } - - // called under LuceneIndexManager.writeAccess - private void _clear() throws IOException { - _closeReader(); - try { - boolean dirty = false; - try { - final String[] content = this.directory.list(); - for (String file : content) { - try { - directory.deleteFile(file); - } catch (IOException e) { - //Some temporary files - if (directory.fileExists(file)) { - dirty = true; - } - } - } - } finally { - _closeDirectory(); - } - if (dirty) { - //Try to delete dirty files and log what's wrong - final File cacheDir = ((FSDirectory)this.directory).getFile(); - final File[] children = cacheDir.listFiles(); - if (children != null) { - for (final File child : children) { - if (!child.delete()) { - final Class c = this.directory.getClass(); - int refCount = -1; - try { - final Field field = c.getDeclaredField("refCount"); //NOI18N - field.setAccessible(true); - refCount = field.getInt(this.directory); - } catch (NoSuchFieldException e) {/*Not important*/} - catch (IllegalAccessException e) {/*Not important*/} - - throw new IOException("Cannot delete: " + child.getAbsolutePath() + "(" + //NOI18N - child.exists() +","+ //NOI18N - child.canRead() +","+ //NOI18N - child.canWrite() +","+ //NOI18N - cacheDir.canRead() +","+ //NOI18N - cacheDir.canWrite() +","+ //NOI18N - refCount+")"); //NOI18N - } - } - } - } - } finally { - //Need to recreate directory, see issue: #148374 - this.directory = createDirectory(indexFolder); - closed = false; - } - } - - // called under LuceneIndexManager.writeAccess - private void _close() throws IOException { - try { - _closeReader(); - } finally { - _closeDirectory(); - } - } - - // called under LuceneIndexManager.writeAccess - private void _closeReader() throws IOException { - if (reader != null) { - reader.close(); - reader = null; - } - } - - // called under LuceneIndexManager.writeAccess - private void _closeDirectory() throws IOException { - directory.close(); - closed = true; - } - - - - // called under LuceneIndexManager.readAccess - private static List _query( - final IndexReader in, - final String fieldName, - final String value, - final QuerySupport.Kind kind, - final String... fieldsToLoad - ) throws IOException { - - final List result = new LinkedList(); - final Set toSearch = new TreeSet (new TermComparator()); - - switch (kind) { - case EXACT: - { - toSearch.add(new Term (fieldName,value)); - break; - } - case PREFIX: - if (value.length() == 0) { - if (fieldName.length() == 0) { - //Special case (all) handle in different way - emptyPrefixSearch(in, fieldsToLoad, result); - return result; - } else { - final Term nameTerm = new Term (fieldName, value); - fieldSearch(nameTerm, in, toSearch); - break; - } - } - else { - final Term nameTerm = new Term (fieldName, value); - prefixSearch(nameTerm, in, toSearch); - break; - } - case CASE_INSENSITIVE_PREFIX: - if (value.length() == 0) { - if (fieldName.length() == 0) { - //Special case (all) handle in different way - emptyPrefixSearch(in, fieldsToLoad, result); - return result; - } else { - final Term nameTerm = new Term (fieldName, value); - fieldSearch(nameTerm, in, toSearch); - break; - } - } - else { - final Term nameTerm = new Term (fieldName,value.toLowerCase()); //XXX: I18N, Locale - prefixSearch(nameTerm, in, toSearch); - break; - } - case CAMEL_CASE: - if (value.length() == 0) { - throw new IllegalArgumentException (); - } - { - StringBuilder sb = new StringBuilder(); - String prefix = null; - int lastIndex = 0; - int index; - do { - index = findNextUpper(value, lastIndex + 1); - String token = value.substring(lastIndex, index == -1 ? value.length(): index); - if ( lastIndex == 0 ) { - prefix = token; - } - sb.append(Pattern.quote(token)); - sb.append( index != -1 ? "[\\p{javaLowerCase}\\p{Digit}_\\$]*" : ".*"); // NOI18N - lastIndex = index; - } - while(index != -1); - - final Pattern pattern = Pattern.compile(sb.toString()); - regExpSearch(pattern, new Term (fieldName,prefix),in,toSearch); - } - break; - case CASE_INSENSITIVE_REGEXP: - if (value.length() == 0) { - throw new IllegalArgumentException (); - } - else { - final Pattern pattern = Pattern.compile(value,Pattern.CASE_INSENSITIVE); - if (Character.isJavaIdentifierStart(value.charAt(0))) { - regExpSearch(pattern, new Term (fieldName, value.toLowerCase()), in, toSearch); //XXX: Locale - } - else { - regExpSearch(pattern, new Term (fieldName,""), in, toSearch); //NOI18N - } - break; - } - case REGEXP: - if (value.length() == 0) { - throw new IllegalArgumentException (); - } else { - final Pattern pattern = Pattern.compile(value); - if (Character.isJavaIdentifierStart(value.charAt(0))) { - regExpSearch(pattern, new Term (fieldName, value), in, toSearch); - } - else { - regExpSearch(pattern, new Term(fieldName,""), in, toSearch); //NOI18N - } - break; - } - case CASE_INSENSITIVE_CAMEL_CASE: - if (value.length() == 0) { - if (fieldName.length() == 0) { - //Special case (all) handle in different way - emptyPrefixSearch(in, fieldsToLoad, result); - return result; - } else { - final Term nameTerm = new Term (fieldName, value); - fieldSearch(nameTerm, in, toSearch); - break; - } - } - else { - final Term nameTerm = new Term(fieldName,value.toLowerCase()); //XXX: I18N, Locale - prefixSearch(nameTerm, in, toSearch); - StringBuilder sb = new StringBuilder(); - String prefix = null; - int lastIndex = 0; - int index; - do { - index = findNextUpper(value, lastIndex + 1); - String token = value.substring(lastIndex, index == -1 ? value.length(): index); - if ( lastIndex == 0 ) { - prefix = token; - } - sb.append(Pattern.quote(token)); - sb.append( index != -1 ? "[\\p{javaLowerCase}\\p{Digit}_\\$]*" : ".*"); // NOI18N - lastIndex = index; - } - while(index != -1); - final Pattern pattern = Pattern.compile(sb.toString()); - regExpSearch(pattern,new Term (fieldName, prefix),in,toSearch); - break; - } - default: - throw new UnsupportedOperationException (kind.toString()); - } - final TermDocs tds = in.termDocs(); - final Set docNums = new TreeSet(); - try { - int[] docs = new int[25]; - int[] freq = new int [25]; - int len; - for(Term t : toSearch) { - tds.seek(t); - while ((len = tds.read(docs, freq))>0) { - for (int i = 0; i < len; i++) { - docNums.add (docs[i]); - } - } - } - } finally { - tds.close(); - } - final FieldSelector selector = DocumentUtil.selector(fieldsToLoad); - for (Integer docNum : docNums) { - final Document doc = in.document(docNum, selector); - result.add (new LuceneDocument(doc)); - } - return result; - } - - // called under LuceneIndexManager.writeAccess - // Always has to invalidate the cached reader - private void flush(File indexFolder, List toAdd, List toRemove, Directory directory, LMListener lmListener, final boolean optimize) throws IOException { - LOGGER.log(Level.FINE, "Flushing: {0}", indexFolder); //NOI18N - try { - assert LuceneIndexManager.getDefault().holdsWriteLock(); - _hit(); - boolean exists = IndexReader.indexExists(this.directory); - final IndexWriter out = new IndexWriter( - directory, // index directory - new KeywordAnalyzer(), //analyzer to tokenize fields - !exists, // open existing or create new index - IndexWriter.MaxFieldLength.LIMITED - ); - try { - //1) delete all documents from to delete and toAdd - if (exists) { - for (Iterator it = toRemove.iterator(); it.hasNext();) { - String toRemoveItem = it.next(); - it.remove(); - out.deleteDocuments(DocumentUtil.sourceNameQuery(toRemoveItem)); - } - for (LuceneDocument toRemoveItem : toAdd) { - out.deleteDocuments(DocumentUtil.sourceNameQuery(toRemoveItem.getSourceName())); - } - } - //2) add all documents form to add - if (debugIndexMerging) { - out.setInfoStream (System.err); - } - - Directory memDir = null; - IndexWriter activeOut = null; - if (lmListener.isLowMemory()) { - activeOut = out; - } - else { - memDir = new RAMDirectory (); - activeOut = new IndexWriter (memDir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); - } - for (Iterator it = toAdd.iterator(); it.hasNext();) { - final LuceneDocument doc = it.next(); - it.remove(); - activeOut.addDocument(doc.doc); - if (memDir != null && lmListener.isLowMemory()) { - activeOut.close(); - out.addIndexesNoOptimize(new Directory[] {memDir}); - memDir = new RAMDirectory (); - activeOut = new IndexWriter (memDir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); - } - LOGGER.log(Level.FINEST, "LuceneDocument merged: {0}", doc); //NOI18N - } - if (memDir != null) { - activeOut.close(); - out.addIndexesNoOptimize(new Directory[] {memDir}); - activeOut = null; - memDir = null; - } - if (optimize) { - out.optimize(false); - } - } finally { - empty = false; - try { - out.close(); - } finally { - refreshReader(); - } - } - } catch (final LockObtainFailedException e) { - final String msg = "Valid: " + valid + " Locks: " + getOrphanLock(); //NOI18N - throw Exceptions.attachMessage(e, msg); - } finally { - LOGGER.log(Level.FINE, "Index flushed: {0}", indexFolder); //NOI18N - } - } - - // called under LuceneIndexManager.readAccess - private void checkPreconditions() throws IOException { - if (closed) { - throw new IOException("Index already closed: " + indexFolder); //NOI18N - } - } - - // called under LuceneIndexManager.readAccess or LuceneIndexManager.writeAccess - private IndexReader getReader() throws IOException { - _hit(); - synchronized (this) { - IndexReader r = reader; - if (r == null) { - //Issue #149757 - logging - try { - //It's important that no Query will get access to original IndexReader - //any norms call to it will initialize the HashTable of norms: sizeof (byte) * maxDoc() * max(number of unique fields in document) - r = reader = new NoNormsReader(IndexReader.open(this.directory)); - } catch (FileNotFoundException fnf) { - empty = true; - LOGGER.fine(String.format("LuceneIndex[%s] does not exist.", this.toString())); //NOI18N - //pass - returns null - } catch (IOException ioe) { - throw annotateException(ioe, indexFolder); - } - } - return r; - } - } - - private synchronized void refreshReader() throws IOException { - if (reader != null) { - final IndexReader newReader = reader.reopen(); - if (newReader != reader) { - reader.close(); - reader = newReader; - } - } - } - - private static Directory createDirectory(final File indexFolder) throws IOException { - assert indexFolder != null; - FSDirectory directory = FSDirectory.getDirectory(indexFolder); - directory.getLockFactory().setLockPrefix(CACHE_LOCK_PREFIX); - return directory; - } - - private Collection getOrphanLock () { - final String[] content = indexFolder.list(); - final List locks = new LinkedList(); - for (String name : content) { - if (name.startsWith(CACHE_LOCK_PREFIX)) { - locks.add(name); - } - } - return locks; - } - - @Override public String toString () { return getClass().getSimpleName()+"["+indexFolder.getAbsolutePath()+"]"; //NOI18N + } + + private static Queries.QueryKind translateQueryKind(final QuerySupport.Kind kind) { + switch (kind) { + case EXACT: return Queries.QueryKind.EXACT; + case PREFIX: return Queries.QueryKind.PREFIX; + case CASE_INSENSITIVE_PREFIX: return Queries.QueryKind.CASE_INSENSITIVE_PREFIX; + case CAMEL_CASE: return Queries.QueryKind.CAMEL_CASE; + case CASE_INSENSITIVE_REGEXP: return Queries.QueryKind.CASE_INSENSITIVE_PREFIX; + case REGEXP: return Queries.QueryKind.REGEXP; + case CASE_INSENSITIVE_CAMEL_CASE: return Queries.QueryKind.CASE_INSENSITIVE_CAMEL_CASE; + default: throw new UnsupportedOperationException (kind.toString()); + } + } + + private static final class AddConvertor implements Convertor { + @Override + public Document convert(LuceneDocument p) { + return p.doc; + } + } + + private static final class RemoveConvertor implements Convertor { + @Override + public Query convert(String p) { + return DocumentUtil.sourceNameQuery(p); + } + } + + private static final class QueryConvertor implements Convertor { + @Override + public IndexDocumentImpl convert(Document p) { + return new LuceneDocument(p); + } } - private static IOException annotateException (final IOException ioe, final File indexFolder) { - String message; - File[] children = indexFolder == null ? null : indexFolder.listFiles(); - if (children == null) { - message = "Non existing index folder"; //NOI18N - } - else { - StringBuilder b = new StringBuilder(); - b.append("Index folder: ").append(indexFolder.getAbsolutePath()).append("\n"); //NOI18N - for (File c : children) { - b.append(c.getName()).append(" f: ").append(c.isFile()) //NOI18N - .append(" r: ").append(c.canRead()) //NOI18N - .append(" w: ").append(c.canWrite()) //NOI18N - .append("\n"); //NOI18N - } - message = b.toString(); - } - return Exceptions.attachMessage(ioe, message); - } - - private static void emptyPrefixSearch (final IndexReader in, final String[] fieldsToLoad, final List result) throws IOException { - final int bound = in.maxDoc(); - for (int i=0; i toSearch) throws IOException { - final Object prefixField = valueTerm.field(); // It's Object only to silence the stupid hint - final TermEnum en = in.terms(valueTerm); - try { - do { - Term term = en.term(); - if (term != null && prefixField == term.field()) { - toSearch.add (term); - } - else { - break; - } - } while (en.next()); - } finally { - en.close(); - } - } - - private static void prefixSearch (final Term valueTerm, final IndexReader in, final Set toSearch) throws IOException { - final Object prefixField = valueTerm.field(); // It's Object only to silence the stupid hint - final String name = valueTerm.text(); - final TermEnum en = in.terms(valueTerm); - try { - do { - Term term = en.term(); - if (term != null && prefixField == term.field() && term.text().startsWith(name)) { - toSearch.add (term); - } - else { - break; - } - } while (en.next()); - } finally { - en.close(); - } - } - - private static void regExpSearch (final Pattern pattern, Term startTerm, final IndexReader in, final Set< ? super Term> toSearch) throws IOException { - final String startText = startTerm.text(); - String startPrefix; - if (startText.length() > 0) { - final StringBuilder startBuilder = new StringBuilder (); - startBuilder.append(startText.charAt(0)); - for (int i=1; i indexes = new HashMap (); + private final Map indexes = new HashMap (); - private LuceneIndexManager() {} + private DocumentBasedIndexManager() {} - public static interface Action { - public R run () throws IOException; - } - public static enum Mode { OPENED, CREATE, @@ -76,55 +69,26 @@ } - public R writeAccess (final Action action) throws IOException { - assert action != null; - lock.writeLock().lock(); - try { - return action.run(); - } - finally { - lock.writeLock().unlock(); - } - } - - public R readAccess (final Action action) throws IOException { - assert action != null; - lock.readLock().lock(); - try { - return action.run(); - } finally { - lock.readLock().unlock(); - } - } - - boolean holdsWriteLock () { - return lock.isWriteLockedByCurrentThread(); - } - - - public static synchronized LuceneIndexManager getDefault () { + public static synchronized DocumentBasedIndexManager getDefault () { if (instance == null) { - instance = new LuceneIndexManager(); + instance = new DocumentBasedIndexManager(); } return instance; } - public synchronized LuceneIndex getIndex (final URL root, final Mode mode) throws IOException { + public synchronized DocumentBasedIndex getIndex (final URL root, final Mode mode) throws IOException { assert root != null; - if (invalid) { - return null; - } - LuceneIndex li = indexes.get(root); + DocumentBasedIndex li = indexes.get(root); if (li == null) { switch (mode) { case CREATE: - li = new LuceneIndex(root); + li = new DocumentBasedIndex(root); indexes.put(root,li); break; case IF_EXIST: final FileObject fo = URLMapper.findFileObject(root); if (fo != null && fo.isFolder() && fo.getChildren(false).hasMoreElements()) { - li = new LuceneIndex(root); + li = new DocumentBasedIndex(root); indexes.put(root,li); } break; diff --git a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/DocumentUtil.java b/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/DocumentUtil.java --- a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/DocumentUtil.java +++ b/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/DocumentUtil.java @@ -42,15 +42,8 @@ package org.netbeans.modules.parsing.impl.indexing.lucene; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldSelector; -import org.apache.lucene.document.FieldSelectorResult; import org.apache.lucene.document.Fieldable; -import org.apache.lucene.document.SetBasedFieldSelector; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -75,23 +68,4 @@ assert relativePath != null; return new Term (FIELD_SOURCE_NAME, relativePath); } - - // when fields == null load all fields - static FieldSelector selector (String... fieldNames) { - if (fieldNames != null && fieldNames.length > 0) { - final Set fields = new HashSet(Arrays.asList(fieldNames)); - fields.add(FIELD_SOURCE_NAME); - final FieldSelector selector = new SetBasedFieldSelector(fields, - Collections.emptySet()); - return selector; - } else { - return ALL; - } - } - - private static final FieldSelector ALL = new FieldSelector() { - public FieldSelectorResult accept(String arg0) { - return FieldSelectorResult.LOAD; - } - }; } diff --git a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/LuceneIndexFactory.java b/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/LuceneIndexFactory.java --- a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/LuceneIndexFactory.java +++ b/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/LuceneIndexFactory.java @@ -57,24 +57,27 @@ */ public class LuceneIndexFactory implements IndexFactoryImpl { + @Override public IndexDocumentImpl createDocument(final Indexable indexable) { assert indexable !=null; return new LuceneDocument(indexable); } + @Override public IndexImpl createIndex (Context ctx) throws IOException { final FileObject luceneIndexFolder = getIndexFolder(ctx.getIndexFolder()); - return LuceneIndexManager.getDefault().getIndex(luceneIndexFolder.getURL(), LuceneIndexManager.Mode.CREATE); + return DocumentBasedIndexManager.getDefault().getIndex(luceneIndexFolder.getURL(), DocumentBasedIndexManager.Mode.CREATE); } + @Override public IndexImpl getIndex(final FileObject indexFolder) throws IOException { final FileObject luceneIndexFolder = getIndexFolder(indexFolder); - return LuceneIndexManager.getDefault().getIndex(luceneIndexFolder.getURL(), LuceneIndexManager.Mode.IF_EXIST); + return DocumentBasedIndexManager.getDefault().getIndex(luceneIndexFolder.getURL(), DocumentBasedIndexManager.Mode.IF_EXIST); } private FileObject getIndexFolder (final FileObject indexFolder) throws IOException { assert indexFolder != null; - final String indexVersion = Integer.toString(LuceneIndex.VERSION); + final String indexVersion = Integer.toString(DocumentBasedIndex.VERSION); final FileObject luceneIndexFolder = FileUtil.createFolder(indexFolder,indexVersion); //NOI18N return luceneIndexFolder; } diff --git a/parsing.api/src/org/netbeans/modules/parsing/spi/indexing/support/QuerySupport.java b/parsing.api/src/org/netbeans/modules/parsing/spi/indexing/support/QuerySupport.java --- a/parsing.api/src/org/netbeans/modules/parsing/spi/indexing/support/QuerySupport.java +++ b/parsing.api/src/org/netbeans/modules/parsing/spi/indexing/support/QuerySupport.java @@ -78,7 +78,6 @@ import org.netbeans.modules.parsing.impl.indexing.lucene.LuceneIndexFactory; import org.openide.filesystems.FileObject; import org.openide.filesystems.FileStateInvalidException; -import org.openide.util.Exceptions; import org.openide.util.Parameters; /** diff --git a/parsing.api/test/unit/src/org/netbeans/modules/parsing/impl/indexing/lucene/LuceneIndexTest.java b/parsing.api/test/unit/src/org/netbeans/modules/parsing/impl/indexing/lucene/LuceneIndexTest.java --- a/parsing.api/test/unit/src/org/netbeans/modules/parsing/impl/indexing/lucene/LuceneIndexTest.java +++ b/parsing.api/test/unit/src/org/netbeans/modules/parsing/impl/indexing/lucene/LuceneIndexTest.java @@ -43,15 +43,11 @@ package org.netbeans.modules.parsing.impl.indexing.lucene; import java.io.File; -import java.io.FileOutputStream; import java.io.IOException; -import java.lang.reflect.Field; import java.net.MalformedURLException; import java.net.URL; import java.util.BitSet; import java.util.Collection; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.store.Directory; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -70,7 +66,7 @@ private static File wd; private File indexFolder; - private LuceneIndex index; + private DocumentBasedIndex index; public LuceneIndexTest(final String name) { super(name); @@ -83,7 +79,7 @@ wd = getWorkDir(); indexFolder = new File (wd, "index"); //NOI18N indexFolder.mkdirs(); - index = new LuceneIndex(indexFolder.toURI().toURL()); + index = new DocumentBasedIndex(indexFolder.toURI().toURL()); } @After @@ -112,57 +108,6 @@ assertIndex(expected); } - @Test - public void testIsValid() throws Exception { - //Empty index => valid - assertTrue(index.isValid()); - - clearValidityCache(); - LuceneDocument docwrap = new LuceneDocument(SPIAccessor.getInstance().create(new FakeIndexableImpl(1))); - docwrap.addPair("bin", Integer.toBinaryString(1), true, true); - docwrap.addPair("oct", Integer.toOctalString(1), true, true); - index.addDocument(docwrap); - index.store(true, null); - //Existing index => valid - assertTrue(index.isValid()); - assertTrue(indexFolder.listFiles().length>0); - - clearValidityCache(); - createLock(); - //Index with orphan lock => invalid - assertFalse(index.isValid()); - assertTrue(indexFolder.listFiles().length==0); - - clearValidityCache(); - docwrap = new LuceneDocument(SPIAccessor.getInstance().create(new FakeIndexableImpl(1))); - docwrap.addPair("bin", Integer.toBinaryString(1), true, true); - docwrap.addPair("oct", Integer.toOctalString(1), true, true); - index.addDocument(docwrap); - index.store(true, null); - assertTrue(index.isValid()); - assertTrue(indexFolder.listFiles().length>0); - - //Broken index => invalid - clearValidityCache(); - File bt = null; - for (File file : indexFolder.listFiles()) { - if (file.getName().endsWith(".cfs")) { - bt = file; - break; - } - } - assertNotNull(bt); - FileOutputStream out = new FileOutputStream(bt); - try { - out.write(new byte[] {0,0,0,0,0,0,0,0,0,0}, 0, 10); - } finally { - out.close(); - } - assertFalse(index.isValid()); - assertTrue(indexFolder.listFiles().length==0); - - } - // Commented out as it takes a long time // @Test // public void testPerformance() throws Exception { @@ -190,30 +135,9 @@ // assertTrue(end < 3 * start); // } - private void clearValidityCache() throws NoSuchFieldException, IllegalArgumentException, IllegalAccessException, IOException { - final Class li = LuceneIndex.class; - final Field valid = li.getDeclaredField("valid"); //NOI18N - valid.setAccessible(true); - valid.set(index, false); - final Field reader = li.getDeclaredField("reader"); //NOI18N - reader.setAccessible(true); - IndexReader r = (IndexReader) reader.get(index); - if (r != null) { - r.close(); - } - reader.set(index,null); - } - private void createLock() throws NoSuchFieldException, IllegalArgumentException, IllegalAccessException, IOException { - final Class li = LuceneIndex.class; - final Field directory = li.getDeclaredField("directory"); //NOI18N - directory.setAccessible(true); - Directory dir = (Directory) directory.get(index); - dir.makeLock("test").obtain(); //NOI18N - } - - private void assertIndex(final BitSet expected) throws IOException { + private void assertIndex(final BitSet expected) throws IOException, InterruptedException { for (int i=0; i < expected.length(); i++) { final Collection res = index.query("bin", Integer.toBinaryString(i), Kind.EXACT, "bin","oct"); boolean should = expected.get(i); @@ -234,10 +158,12 @@ this.id = id; } + @Override public String getRelativePath() { return Integer.toString(id); } + @Override public URL getURL() { try { return new File(wd, getRelativePath()).toURI().toURL(); @@ -247,10 +173,12 @@ } } + @Override public String getMimeType() { return "text/test"; } + @Override public boolean isTypeOf(String mimeType) { return true; } diff --git a/parsing.lucene/apichanges.xml b/parsing.lucene/apichanges.xml new file mode 100644 --- /dev/null +++ b/parsing.lucene/apichanges.xml @@ -0,0 +1,176 @@ + + + + + + + + + + + + + Lucene Support Friend API + + + + + + + + Adding a low level indexing friend API + + + + + +

+ Added a low level indexing friend API based on Lucene to be shared among java.source and parsing.api. The api provides common access to Lucene index, memory caching of indexes, effective lucene index merging, IDE specific queries and file descriptors pool. +

+
+ + + + + + +
+ +
+ + + + + Change History for the Parsing Lucene Support Friend API + + + + + + +

Introduction

+

What do the Dates Mean?

+

+ The supplied dates indicate when the API change was made, on the CVS + trunk. From this you can generally tell whether the change should be + present in a given build or not; for trunk builds, simply whether it + was made before or after the change; for builds on a stabilization + branch, whether the branch was made before or after the given date. In + some cases corresponding API changes have been made both in the trunk + and in an in-progress stabilization branch, if they were needed for a + bug fix; this ought to be marked in this list. +

+
+ +
+

@FOOTER@

+ +
+ +
diff --git a/parsing.lucene/build.xml b/parsing.lucene/build.xml new file mode 100644 --- /dev/null +++ b/parsing.lucene/build.xml @@ -0,0 +1,5 @@ + + + Builds, tests, and runs the project org.netbeans.modules.parsing.lucene + + diff --git a/parsing.lucene/manifest.mf b/parsing.lucene/manifest.mf new file mode 100644 --- /dev/null +++ b/parsing.lucene/manifest.mf @@ -0,0 +1,7 @@ +Manifest-Version: 1.0 +AutoUpdate-Show-In-Client: false +OpenIDE-Module: org.netbeans.modules.parsing.lucene +OpenIDE-Module-Layer: org/netbeans/modules/parsing/lucene/layer.xml +OpenIDE-Module-Localizing-Bundle: org/netbeans/modules/parsing/lucene/Bundle.properties +OpenIDE-Module-Specification-Version: 1.0 + diff --git a/parsing.lucene/nbproject/project.properties b/parsing.lucene/nbproject/project.properties new file mode 100644 --- /dev/null +++ b/parsing.lucene/nbproject/project.properties @@ -0,0 +1,4 @@ +is.autoload=true +javac.source=1.6 +javadoc.apichanges=${basedir}/apichanges.xml +javac.compilerargs=-Xlint -Xlint:-serial diff --git a/parsing.lucene/nbproject/project.xml b/parsing.lucene/nbproject/project.xml new file mode 100644 --- /dev/null +++ b/parsing.lucene/nbproject/project.xml @@ -0,0 +1,52 @@ + + + org.netbeans.modules.apisupport.project + + + org.netbeans.modules.parsing.lucene + + + org.netbeans.api.annotations.common + + + + 1 + 1.7 + + + + org.netbeans.libs.lucene + + + + 1 + 2.13 + + + + org.netbeans.modules.masterfs + + + + 2 + 2.29 + + + + org.openide.util + + + + 8.9 + + + + + org.netbeans.modules.java.source + org.netbeans.modules.java.sourceui + org.netbeans.modules.parsing.api + org.netbeans.modules.parsing.lucene.support + + + + diff --git a/parsing.lucene/src/org/netbeans/modules/parsing/lucene/BitSetCollector.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/BitSetCollector.java new file mode 100644 --- /dev/null +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/BitSetCollector.java @@ -0,0 +1,85 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2010 Oracle and/or its affiliates. All rights reserved. + * + * Oracle and Java are registered trademarks of Oracle and/or its affiliates. + * Other names may be trademarks of their respective owners. + * + * The contents of this file are subject to the terms of either the GNU + * General Public License Version 2 only ("GPL") or the Common + * Development and Distribution License("CDDL") (collectively, the + * "License"). You may not use this file except in compliance with the + * License. You can obtain a copy of the License at + * http://www.netbeans.org/cddl-gplv2.html + * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the + * specific language governing permissions and limitations under the + * License. When distributing the software, include this License Header + * Notice in each file and include the License file at + * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the GPL Version 2 section of the License file that + * accompanied this code. If applicable, add the following below the + * License Header, with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + * + * If you wish your version of this file to be governed by only the CDDL + * or only the GPL Version 2, indicate your decision by adding + * "[Contributor] elects to include this software in this distribution + * under the [CDDL or GPL Version 2] license." If you do not indicate a + * single choice of license, a recipient has the option to distribute + * your version of this file under either the CDDL, the GPL Version 2 or + * to extend the choice of license to its licensees as provided above. + * However, if you add GPL Version 2 code and therefore, elected the GPL + * Version 2 license, then the option applies only if the new code is + * made subject to such option by the copyright holder. + * + * Contributor(s): + * + * Portions Copyrighted 2010 Sun Microsystems, Inc. + */ + +package org.netbeans.modules.parsing.lucene; + +import java.util.BitSet; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.Scorer; + +/** + * + * @author Tomas Zezula + */ +class BitSetCollector extends Collector { + + private int docBase; + public final BitSet bits; + + BitSetCollector(final BitSet bitSet) { + assert bitSet != null; + bits = bitSet; + } + + // ignore scorer + @Override + public void setScorer(Scorer scorer) { + } + + // accept docs out of order (for a BitSet it doesn't matter) + @Override + public boolean acceptsDocsOutOfOrder() { + return true; + } + + @Override + public void collect(int doc) { + bits.set(doc + docBase); + } + + @Override + public void setNextReader(IndexReader reader, int docBase) { + this.docBase = docBase; + } + +} \ No newline at end of file diff --git a/parsing.lucene/src/org/netbeans/modules/parsing/lucene/Bundle.properties b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/Bundle.properties new file mode 100644 --- /dev/null +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/Bundle.properties @@ -0,0 +1,5 @@ +OpenIDE-Module-Display-Category=Base IDE +OpenIDE-Module-Long-Description=\ + Provides common support for editor features requiring indexing. +OpenIDE-Module-Name=Parsing Lucene Support +OpenIDE-Module-Short-Description=Lucene Support for editor features diff --git a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/util/Evictable.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/Evictable.java rename from parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/util/Evictable.java rename to parsing.lucene/src/org/netbeans/modules/parsing/lucene/Evictable.java --- a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/util/Evictable.java +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/Evictable.java @@ -40,13 +40,13 @@ * Portions Copyrighted 2009 Sun Microsystems, Inc. */ -package org.netbeans.modules.parsing.impl.indexing.lucene.util; +package org.netbeans.modules.parsing.lucene; /** * * @author Tomas Zezula */ -public interface Evictable { +interface Evictable { public void evicted (); diff --git a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/util/EvictionPolicy.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/EvictionPolicy.java rename from parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/util/EvictionPolicy.java rename to parsing.lucene/src/org/netbeans/modules/parsing/lucene/EvictionPolicy.java --- a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/util/EvictionPolicy.java +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/EvictionPolicy.java @@ -40,7 +40,7 @@ * Portions Copyrighted 2009 Sun Microsystems, Inc. */ -package org.netbeans.modules.parsing.impl.indexing.lucene.util; +package org.netbeans.modules.parsing.lucene; /** * diff --git a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/IndexCacheFactory.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/IndexCacheFactory.java rename from parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/IndexCacheFactory.java rename to parsing.lucene/src/org/netbeans/modules/parsing/lucene/IndexCacheFactory.java --- a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/IndexCacheFactory.java +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/IndexCacheFactory.java @@ -40,20 +40,17 @@ * Portions Copyrighted 2009 Sun Microsystems, Inc. */ -package org.netbeans.modules.parsing.impl.indexing.lucene; +package org.netbeans.modules.parsing.lucene; import java.net.URL; import java.util.logging.Logger; -import org.netbeans.modules.parsing.impl.indexing.lucene.util.Evictable; -import org.netbeans.modules.parsing.impl.indexing.lucene.util.EvictionPolicy; -import org.netbeans.modules.parsing.impl.indexing.lucene.util.LRUCache; import org.openide.util.Utilities; /** * * @author Tomas Zezula */ -public class IndexCacheFactory { +class IndexCacheFactory { private static final Logger LOG = Logger.getLogger(IndexCacheFactory.class.getName()); private static final IndexCacheFactory instance = new IndexCacheFactory(); diff --git a/parsing.lucene/src/org/netbeans/modules/parsing/lucene/IndexFactory.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/IndexFactory.java new file mode 100644 --- /dev/null +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/IndexFactory.java @@ -0,0 +1,57 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2010 Oracle and/or its affiliates. All rights reserved. + * + * Oracle and Java are registered trademarks of Oracle and/or its affiliates. + * Other names may be trademarks of their respective owners. + * + * The contents of this file are subject to the terms of either the GNU + * General Public License Version 2 only ("GPL") or the Common + * Development and Distribution License("CDDL") (collectively, the + * "License"). You may not use this file except in compliance with the + * License. You can obtain a copy of the License at + * http://www.netbeans.org/cddl-gplv2.html + * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the + * specific language governing permissions and limitations under the + * License. When distributing the software, include this License Header + * Notice in each file and include the License file at + * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the GPL Version 2 section of the License file that + * accompanied this code. If applicable, add the following below the + * License Header, with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + * + * If you wish your version of this file to be governed by only the CDDL + * or only the GPL Version 2, indicate your decision by adding + * "[Contributor] elects to include this software in this distribution + * under the [CDDL or GPL Version 2] license." If you do not indicate a + * single choice of license, a recipient has the option to distribute + * your version of this file under either the CDDL, the GPL Version 2 or + * to extend the choice of license to its licensees as provided above. + * However, if you add GPL Version 2 code and therefore, elected the GPL + * Version 2 license, then the option applies only if the new code is + * made subject to such option by the copyright holder. + * + * Contributor(s): + * + * Portions Copyrighted 2010 Sun Microsystems, Inc. + */ + +package org.netbeans.modules.parsing.lucene; + +import java.io.File; +import java.io.IOException; +import org.apache.lucene.analysis.Analyzer; +import org.netbeans.api.annotations.common.NonNull; +import org.netbeans.modules.parsing.lucene.support.Index; + +/** + * + * @author Tomas Zezula + */ +public interface IndexFactory { + Index createIndex (@NonNull File cacheFolder, @NonNull Analyzer analyzer) throws IOException; +} diff --git a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/util/LRUCache.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/LRUCache.java rename from parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/util/LRUCache.java rename to parsing.lucene/src/org/netbeans/modules/parsing/lucene/LRUCache.java --- a/parsing.api/src/org/netbeans/modules/parsing/impl/indexing/lucene/util/LRUCache.java +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/LRUCache.java @@ -40,7 +40,7 @@ * Portions Copyrighted 2009 Sun Microsystems, Inc. */ -package org.netbeans.modules.parsing.impl.indexing.lucene.util; +package org.netbeans.modules.parsing.lucene; import java.util.LinkedHashMap; import java.util.Map.Entry; @@ -51,7 +51,7 @@ * * @author Tomas Zezula */ -public final class LRUCache { +final class LRUCache { private final LinkedHashMap cache; private final ReadWriteLock lock; diff --git a/parsing.lucene/src/org/netbeans/modules/parsing/lucene/LowMemoryWatcher.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/LowMemoryWatcher.java new file mode 100644 --- /dev/null +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/LowMemoryWatcher.java @@ -0,0 +1,91 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2010 Oracle and/or its affiliates. All rights reserved. + * + * Oracle and Java are registered trademarks of Oracle and/or its affiliates. + * Other names may be trademarks of their respective owners. + * + * The contents of this file are subject to the terms of either the GNU + * General Public License Version 2 only ("GPL") or the Common + * Development and Distribution License("CDDL") (collectively, the + * "License"). You may not use this file except in compliance with the + * License. You can obtain a copy of the License at + * http://www.netbeans.org/cddl-gplv2.html + * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the + * specific language governing permissions and limitations under the + * License. When distributing the software, include this License Header + * Notice in each file and include the License file at + * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the GPL Version 2 section of the License file that + * accompanied this code. If applicable, add the following below the + * License Header, with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + * + * If you wish your version of this file to be governed by only the CDDL + * or only the GPL Version 2, indicate your decision by adding + * "[Contributor] elects to include this software in this distribution + * under the [CDDL or GPL Version 2] license." If you do not indicate a + * single choice of license, a recipient has the option to distribute + * your version of this file under either the CDDL, the GPL Version 2 or + * to extend the choice of license to its licensees as provided above. + * However, if you add GPL Version 2 code and therefore, elected the GPL + * Version 2 license, then the option applies only if the new code is + * made subject to such option by the copyright holder. + * + * Contributor(s): + * + * Portions Copyrighted 2010 Sun Microsystems, Inc. + */ + +package org.netbeans.modules.parsing.lucene; + +import java.lang.management.ManagementFactory; +import java.lang.management.MemoryMXBean; +import java.lang.management.MemoryUsage; + +/** + * + * @author Tomas Zezula + */ +class LowMemoryWatcher { + + private static float heapLimit = 0.8f; + private static LowMemoryWatcher instance; + private final MemoryMXBean memBean; + + private LowMemoryWatcher () { + this.memBean = ManagementFactory.getMemoryMXBean(); + assert this.memBean != null; + } + + public boolean isLowMemory () { + if (this.memBean != null) { + final MemoryUsage usage = this.memBean.getHeapMemoryUsage(); + if (usage != null) { + long used = usage.getUsed(); + long max = usage.getMax(); + return used > max * heapLimit; + } + } + return false; + } + + static synchronized LowMemoryWatcher getInstance() { + if (instance == null) { + instance = new LowMemoryWatcher(); + } + return instance; + } + + static float getHeapLimit () { + return heapLimit; + } + + static void setHeapLimit(final float limit) { + heapLimit = limit; + } + +} diff --git a/parsing.lucene/src/org/netbeans/modules/parsing/lucene/LuceneIndex.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/LuceneIndex.java new file mode 100644 --- /dev/null +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/LuceneIndex.java @@ -0,0 +1,881 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2010 Oracle and/or its affiliates. All rights reserved. + * + * Oracle and Java are registered trademarks of Oracle and/or its affiliates. + * Other names may be trademarks of their respective owners. + * + * The contents of this file are subject to the terms of either the GNU + * General Public License Version 2 only ("GPL") or the Common + * Development and Distribution License("CDDL") (collectively, the + * "License"). You may not use this file except in compliance with the + * License. You can obtain a copy of the License at + * http://www.netbeans.org/cddl-gplv2.html + * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the + * specific language governing permissions and limitations under the + * License. When distributing the software, include this License Header + * Notice in each file and include the License file at + * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the GPL Version 2 section of the License file that + * accompanied this code. If applicable, add the following below the + * License Header, with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + * + * If you wish your version of this file to be governed by only the CDDL + * or only the GPL Version 2, indicate your decision by adding + * "[Contributor] elects to include this software in this distribution + * under the [CDDL or GPL Version 2] license." If you do not indicate a + * single choice of license, a recipient has the option to distribute + * your version of this file under either the CDDL, the GPL Version 2 or + * to extend the choice of license to its licensees as provided above. + * However, if you add GPL Version 2 code and therefore, elected the GPL + * Version 2 license, then the option applies only if the new code is + * made subject to such option by the copyright holder. + * + * Contributor(s): + * + * Portions Copyrighted 2010 Sun Microsystems, Inc. + */ + +package org.netbeans.modules.parsing.lucene; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.lang.ref.SoftReference; +import java.lang.reflect.Field; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Arrays; +import java.util.BitSet; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.FieldSelector; +import org.apache.lucene.document.FieldSelectorResult; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.FilterIndexReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.TermEnum; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.DefaultSimilarity; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Searcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FSDirectory; +import org.apache.lucene.store.RAMDirectory; +import org.netbeans.api.annotations.common.NonNull; +import org.netbeans.api.annotations.common.NullAllowed; +import org.netbeans.modules.parsing.lucene.support.Convertor; +import org.netbeans.modules.parsing.lucene.support.Index; +import org.netbeans.modules.parsing.lucene.support.IndexManager; +import org.netbeans.modules.parsing.lucene.support.StoppableConvertor; +import org.openide.util.Exceptions; +import org.openide.util.Parameters; +import org.openide.util.RequestProcessor; +import org.openide.util.Utilities; + +/** + * + * @author Tomas Zezula + */ +//@NotTreadSafe +public class LuceneIndex implements Index { + + private static final String PROP_INDEX_POLICY = "java.index.useMemCache"; //NOI18N + private static final String PROP_CACHE_SIZE = "java.index.size"; //NOI18N + private static final boolean debugIndexMerging = Boolean.getBoolean("java.index.debugMerge"); // NOI18N + private static final CachePolicy DEFAULT_CACHE_POLICY = CachePolicy.DYNAMIC; + private static final float DEFAULT_CACHE_SIZE = 0.05f; + private static final CachePolicy cachePolicy = getCachePolicy(); + private static final Logger LOGGER = Logger.getLogger(LuceneIndex.class.getName()); + private static final FieldSelector ALL_FIELDS = new AllFieldsSelector(); + + + private final DirCache dirCache; + + public static LuceneIndex create (final File cacheRoot, final Analyzer analyzer) throws IOException { + assert cacheRoot != null && cacheRoot.exists() && cacheRoot.canRead() && cacheRoot.canWrite(); + return new LuceneIndex (cacheRoot, analyzer); + } + + /** Creates a new instance of LuceneIndex */ + private LuceneIndex (final File refCacheRoot, final Analyzer analyzer) throws IOException { + assert refCacheRoot != null; + assert analyzer != null; + this.dirCache = new DirCache(refCacheRoot,cachePolicy, analyzer); + } + + @Override + public void query ( + final @NonNull Collection result, + final @NonNull Convertor convertor, + @NullAllowed FieldSelector selector, + final @NullAllowed AtomicBoolean cancel, + final @NonNull Query... queries + ) throws IOException, InterruptedException { + Parameters.notNull("queries", queries); //NOI18N + Parameters.notNull("convertor", convertor); //NOI18N + Parameters.notNull("result", result); //NOI18N + final IndexReader in = dirCache.getReader(); + if (in == null) { + LOGGER.fine(String.format("LuceneIndex[%s] is invalid!\n", this.toString())); + return; + } + if (selector == null) { + selector = ALL_FIELDS; + } + final BitSet bs = new BitSet(in.maxDoc()); + final Collector c = new BitSetCollector(bs); + final Searcher searcher = new IndexSearcher(in); + try { + for (Query q : queries) { + if (cancel != null && cancel.get()) { + throw new InterruptedException (); + } + searcher.search(q, c); + } + } finally { + searcher.close(); + } + for (int docNum = bs.nextSetBit(0); docNum >= 0; docNum = bs.nextSetBit(docNum+1)) { + if (cancel != null && cancel.get()) { + throw new InterruptedException (); + } + final Document doc = in.document(docNum, selector); + final T value = convertor.convert(doc); + if (value != null) { + result.add (value); + } + } + } + + @Override + public void queryTerms( + final @NonNull Collection result, + final @NullAllowed Term seekTo, + final @NonNull StoppableConvertor filter, + final @NullAllowed AtomicBoolean cancel) throws IOException, InterruptedException { + + final IndexReader in = dirCache.getReader(); + if (in == null) { + return; + } + + final TermEnum terms = seekTo == null ? in.terms () : in.terms (seekTo); + try { + do { + if (cancel.get()) { + throw new InterruptedException (); + } + final Term currentTerm = terms.term(); + if (currentTerm != null) { + final T vote = filter.convert(currentTerm); + if (vote != null) { + result.add(vote); + } + } + } while (terms.next()); + } catch (StoppableConvertor.Stop stop) { + //Stop iteration of TermEnum + } finally { + terms.close(); + } + } + + @Override + public void queryDocTerms( + final @NonNull Map> result, + final @NonNull Convertor convertor, + final @NonNull Convertor termConvertor, + @NullAllowed FieldSelector selector, + final @NullAllowed AtomicBoolean cancel, + final @NonNull Query... queries) throws IOException, InterruptedException { + Parameters.notNull("queries", queries); //NOI18N + Parameters.notNull("slector", selector); //NOI18N + Parameters.notNull("convertor", convertor); //NOI18N + Parameters.notNull("termConvertor", termConvertor); //NOI18N + Parameters.notNull("result", result); //NOI18N + final IndexReader in = dirCache.getReader(); + if (in == null) { + LOGGER.fine(String.format("LuceneIndex[%s] is invalid!\n", this.toString())); //NOI18N + return; + } + if (selector == null) { + selector = ALL_FIELDS; + } + final BitSet bs = new BitSet(in.maxDoc()); + final Collector c = new BitSetCollector(bs); + final Searcher searcher = new IndexSearcher(in); + final TermCollector termCollector = new TermCollector(); + try { + for (Query q : queries) { + if (cancel.get()) { + throw new InterruptedException (); + } + if (q instanceof TermCollector.TermCollecting) { + ((TermCollector.TermCollecting)q).attach(termCollector); + } else { + throw new IllegalArgumentException ( + String.format("Query: %s does not implement TermCollecting", //NOI18N + q.getClass().getName())); + } + searcher.search(q, c); + } + } finally { + searcher.close(); + } + + for (int docNum = bs.nextSetBit(0); docNum >= 0; docNum = bs.nextSetBit(docNum+1)) { + if (cancel.get()) { + throw new InterruptedException (); + } + final Document doc = in.document(docNum, selector); + final T value = convertor.convert(doc); + if (value != null) { + final Set terms = termCollector.get(docNum); + result.put (value, convertTerms(termConvertor, terms)); + } + } + } + + private static Set convertTerms(final Convertor convertor, final Set terms) { + final Set result = new HashSet(terms.size()); + for (Term term : terms) { + result.add(convertor.convert(term)); + } + return result; + } + + @Override + public void store ( + final @NonNull Collection toAdd, + final @NonNull Collection toDelete, + final @NonNull Convertor docConvertor, + final @NonNull Convertor queryConvertor, + final boolean optimize) throws IOException{ + try { + IndexManager.writeAccess(new IndexManager.Action() { + @Override + public Void run() throws IOException, InterruptedException { + _store(toAdd, toDelete, docConvertor, queryConvertor, optimize); + return null; + } + }); + } catch (InterruptedException ie) { + throw new IOException("Interrupted"); //NOI18N + } + } + + private void _store ( + final @NonNull Collection data, + final @NonNull Collection toDelete, + final @NonNull Convertor docConvertor, + final @NonNull Convertor queryConvertor, + final boolean optimize) throws IOException { + assert IndexManager.holdsWriteLock(); + boolean create = !exists(); + final IndexWriter out = dirCache.getWriter(create); + try { + if (!create) { + for (S td : toDelete) { + out.deleteDocuments(queryConvertor.convert(td)); + } + } + storeData(out, data, docConvertor, optimize); + } finally { + try { + out.close(); + } finally { + dirCache.refreshReader(); + } + } + } + + private void storeData ( + final IndexWriter out, + final @NonNull Collection data, + final @NonNull Convertor convertor, + final boolean optimize) throws IOException { + if (debugIndexMerging) { + out.setInfoStream (System.err); + } + final LowMemoryWatcher lmListener = LowMemoryWatcher.getInstance(); + Directory memDir = null; + IndexWriter activeOut = null; + if (lmListener.isLowMemory()) { + activeOut = out; + } + else { + memDir = new RAMDirectory (); + activeOut = new IndexWriter (memDir, dirCache.getAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); + } + for (Iterator it = data.iterator(); it.hasNext();) { + T entry = it.next(); + it.remove(); + final Document doc = convertor.convert(entry); + activeOut.addDocument(doc); + if (memDir != null && lmListener.isLowMemory()) { + activeOut.close(); + out.addIndexesNoOptimize(new Directory[] {memDir}); + memDir = new RAMDirectory (); + activeOut = new IndexWriter (memDir, dirCache.getAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); + } + } + if (memDir != null) { + activeOut.close(); + out.addIndexesNoOptimize(new Directory[] {memDir}); + activeOut = null; + memDir = null; + } + if (optimize) { + out.optimize(false); + } + } + + @Override + public boolean isValid (boolean force) throws IOException { + return dirCache.isValid(force); + } + + @Override + public void clear () throws IOException { + try { + IndexManager.writeAccess(new IndexManager.Action() { + @Override + public Void run() throws IOException, InterruptedException { + dirCache.clear(); + return null; + } + }); + } catch (InterruptedException ex) { + throw new IOException(ex); + } + } + + @Override + public boolean exists () { + return this.dirCache.exists(); + } + + @Override + public void close () throws IOException { + if (LOGGER.isLoggable(Level.FINEST)) { + LOGGER.log(Level.FINEST, "Closing index: {0} {1}", //NOI18N + new Object[]{ + this.dirCache.toString(), + Thread.currentThread().getStackTrace()}); + } + dirCache.close(true); + } + + + @Override + public String toString () { + return getClass().getSimpleName()+"["+this.dirCache.toString()+"]"; //NOI18N + } + + private static CachePolicy getCachePolicy() { + final String value = System.getProperty(PROP_INDEX_POLICY); //NOI18N + if (Boolean.TRUE.toString().equals(value) || + CachePolicy.ALL.getSystemName().equals(value)) { + return CachePolicy.ALL; + } + if (Boolean.FALSE.toString().equals(value) || + CachePolicy.NONE.getSystemName().equals(value)) { + return CachePolicy.NONE; + } + if (CachePolicy.DYNAMIC.getSystemName().equals(value)) { + return CachePolicy.DYNAMIC; + } + return DEFAULT_CACHE_POLICY; + } + + + // + + private static class AllFieldsSelector implements FieldSelector { + @Override + public FieldSelectorResult accept(final String fieldName) { + return FieldSelectorResult.LOAD; + } + } + + private static class NoNormsReader extends FilterIndexReader { + + //@GuardedBy (this) + private byte[] norms; + + public NoNormsReader (final IndexReader reader) { + super (reader); + } + + @Override + public byte[] norms(String field) throws IOException { + byte[] _norms = fakeNorms (); + return _norms; + } + + @Override + public void norms(String field, byte[] norm, int offset) throws IOException { + byte[] _norms = fakeNorms (); + System.arraycopy(_norms, 0, norm, offset, _norms.length); + } + + @Override + public boolean hasNorms(String field) throws IOException { + return false; + } + + @Override + protected void doSetNorm(int doc, String field, byte norm) throws CorruptIndexException, IOException { + //Ignore + } + + @Override + protected void doClose() throws IOException { + synchronized (this) { + this.norms = null; + } + super.doClose(); + } + + @Override + public IndexReader reopen() throws IOException { + final IndexReader newIn = in.reopen(); + if (newIn == in) { + return this; + } + return new NoNormsReader(newIn); + } + + /** + * Expert: Fakes norms, norms are not needed for Netbeans index. + */ + private synchronized byte[] fakeNorms() { + if (this.norms == null) { + this.norms = new byte[maxDoc()]; + Arrays.fill(this.norms, DefaultSimilarity.encodeNorm(1.0f)); + } + return this.norms; + } + } + + private enum CachePolicy { + + NONE("none", false), //NOI18N + DYNAMIC("dynamic", true), //NOI18N + ALL("all", true); //NOI18N + + private final String sysName; + private final boolean hasMemCache; + + CachePolicy(final String sysName, final boolean hasMemCache) { + assert sysName != null; + this.sysName = sysName; + this.hasMemCache = hasMemCache; + } + + String getSystemName() { + return sysName; + } + + boolean hasMemCache() { + return hasMemCache; + } + } + + private static final class DirCache implements Evictable { + + private static final String CACHE_LOCK_PREFIX = "nb-lock"; //NOI18N + private static final RequestProcessor RP = new RequestProcessor(LuceneIndex.class.getName(), 1); + private static final long maxCacheSize = getCacheSize(); + private static volatile long currentCacheSize; + + private final File folder; + private final CachePolicy cachePolicy; + private final Analyzer analyzer; + private FSDirectory fsDir; + private RAMDirectory memDir; + private CleanReference ref; + private IndexReader reader; + private volatile boolean closed; + private volatile Boolean validCache; + + private DirCache( + final @NonNull File folder, + final @NonNull CachePolicy cachePolicy, + final @NonNull Analyzer analyzer) throws IOException { + assert folder != null; + assert cachePolicy != null; + assert analyzer != null; + this.folder = folder; + this.fsDir = createFSDirectory(folder); + this.cachePolicy = cachePolicy; + this.analyzer = analyzer; + } + + Analyzer getAnalyzer() { + return this.analyzer; + } + + synchronized void clear() throws IOException { + checkPreconditions(); + close (false); + try { + final String[] content = fsDir.listAll(); + boolean dirty = false; + if (content != null) { + for (String file : content) { + try { + fsDir.deleteFile(file); + } catch (IOException e) { + //Some temporary files + if (fsDir.fileExists(file)) { + dirty = true; + } + } + } + } + if (dirty) { + //Try to delete dirty files and log what's wrong + final File cacheDir = fsDir.getFile(); + final File[] children = cacheDir.listFiles(); + if (children != null) { + for (final File child : children) { + if (!child.delete()) { + final Class c = fsDir.getClass(); + int refCount = -1; + try { + final Field field = c.getDeclaredField("refCount"); //NOI18N + field.setAccessible(true); + refCount = field.getInt(fsDir); + } catch (NoSuchFieldException e) {/*Not important*/} + catch (IllegalAccessException e) {/*Not important*/} + final Map sts = Thread.getAllStackTraces(); + throw new IOException("Cannot delete: " + child.getAbsolutePath() + "(" + //NOI18N + child.exists() +","+ //NOI18N + child.canRead() +","+ //NOI18N + child.canWrite() +","+ //NOI18N + cacheDir.canRead() +","+ //NOI18N + cacheDir.canWrite() +","+ //NOI18N + refCount +","+ //NOI18N + sts +")"); //NOI18N + } + } + } + } + } finally { + //Need to recreate directory, see issue: #148374 + this.close(true); + this.fsDir = createFSDirectory(this.folder); + closed = false; + } + } + + synchronized void close (final boolean closeFSDir) throws IOException { + try { + try { + if (this.reader != null) { + this.reader.close(); + this.reader = null; + } + } finally { + if (memDir != null) { + assert cachePolicy.hasMemCache(); + if (this.ref != null) { + this.ref.clear(); + } + final Directory tmpDir = this.memDir; + memDir = null; + tmpDir.close(); + } + } + } finally { + if (closeFSDir) { + this.closed = true; + this.fsDir.close(); + } + } + } + + boolean exists() { + try { + return IndexReader.indexExists(this.fsDir); + } catch (IOException e) { + return false; + } catch (RuntimeException e) { + LOGGER.log(Level.INFO, "Broken index: " + folder.getAbsolutePath(), e); + return false; + } + } + + boolean isValid(boolean force) throws IOException { + checkPreconditions(); + Boolean valid = validCache; + if (force || valid == null) { + final Collection locks = getOrphanLock(); + boolean res = false; + if (!locks.isEmpty()) { + LOGGER.log(Level.WARNING, "Broken (locked) index folder: {0}", folder.getAbsolutePath()); //NOI18N + for (String lockName : locks) { + fsDir.deleteFile(lockName); + } + if (force) { + clear(); + } + } else { + res = exists(); + if (res && force) { + try { + getReader(); + } catch (java.io.IOException e) { + res = false; + clear(); + } catch (RuntimeException e) { + res = false; + clear(); + } + } + } + valid = res; + validCache = valid; + } + return valid; + } + + IndexWriter getWriter (final boolean create) throws IOException { + checkPreconditions(); + hit(); + //Issue #149757 - logging + try { + return new IndexWriter (this.fsDir, analyzer, create, IndexWriter.MaxFieldLength.LIMITED); + } catch (IOException ioe) { + throw annotateException (ioe); + } + } + + synchronized IndexReader getReader () throws IOException { + checkPreconditions(); + hit(); + if (this.reader == null) { + if (validCache == Boolean.FALSE) { + return null; + } + //Issue #149757 - logging + try { + Directory source; + if (cachePolicy.hasMemCache()) { + memDir = new RAMDirectory(fsDir); + if (cachePolicy == CachePolicy.DYNAMIC) { + ref = new CleanReference (new RAMDirectory[] {this.memDir}); + } + source = memDir; + } else { + source = fsDir; + } + assert source != null; + this.reader = new NoNormsReader(IndexReader.open(source,true)); + } catch (final FileNotFoundException fnf) { + //pass - returns null + } catch (IOException ioe) { + throw annotateException (ioe); + } + } + return this.reader; + } + + + synchronized void refreshReader() throws IOException { + try { + if (cachePolicy.hasMemCache()) { + close(false); + } else { + if (reader != null) { + final IndexReader newReader = reader.reopen(); + if (newReader != reader) { + reader.close(); + reader = newReader; + } + } + } + } finally { + validCache = true; + } + } + + @Override + public String toString() { + return this.folder.getAbsolutePath(); + } + + @Override + public void evicted() { + //When running from memory cache no need to close the reader, it does not own file handler. + if (!cachePolicy.hasMemCache()) { + //Threading: The called may own the CIM.readAccess, perform by dedicated worker to prevent deadlock + RP.post(new Runnable() { + @Override + public void run () { + try { + IndexManager.writeAccess(new IndexManager.Action() { + @Override + public Void run() throws IOException, InterruptedException { + close(false); + LOGGER.log(Level.FINE, "Evicted index: {0}", folder.getAbsolutePath()); //NOI18N + return null; + } + }); + } catch (IOException ex) { + Exceptions.printStackTrace(ex); + } catch (InterruptedException ie) { + Exceptions.printStackTrace(ie); + } + } + }); + } else if ((ref != null && currentCacheSize > maxCacheSize)) { + ref.clearHRef(); + } + } + + private synchronized void hit() { + if (!cachePolicy.hasMemCache()) { + try { + final URL url = folder.toURI().toURL(); + IndexCacheFactory.getDefault().getCache().put(url, this); + } catch (MalformedURLException e) { + Exceptions.printStackTrace(e); + } + } else if (ref != null) { + ref.get(); + } + } + + private Collection getOrphanLock () { + final List locks = new LinkedList(); + final String[] content = folder.list(); + if (content != null) { + for (String name : content) { + if (name.startsWith(CACHE_LOCK_PREFIX)) { + locks.add(name); + } + } + } + return locks; + } + + private void checkPreconditions () throws IndexClosedException { + if (closed) { + throw new IndexClosedException(); + } + } + + private IOException annotateException (final IOException ioe) { + String message; + File[] children = folder.listFiles(); + if (children == null) { + message = "Non existing index folder"; + } + else { + StringBuilder b = new StringBuilder(); + for (File c : children) { + b.append(c.getName()).append(" f: ").append(c.isFile()). + append(" r: ").append(c.canRead()). + append(" w: ").append(c.canWrite()).append("\n"); //NOI18N + } + message = b.toString(); + } + return Exceptions.attachMessage(ioe, message); + } + + private static FSDirectory createFSDirectory (final File indexFolder) throws IOException { + assert indexFolder != null; + FSDirectory directory = FSDirectory.open(indexFolder); + directory.getLockFactory().setLockPrefix(CACHE_LOCK_PREFIX); + return directory; + } + + private static long getCacheSize() { + float per = -1.0f; + final String propVal = System.getProperty(PROP_CACHE_SIZE); + if (propVal != null) { + try { + per = Float.parseFloat(propVal); + } catch (NumberFormatException nfe) { + //Handled below + } + } + if (per<0) { + per = DEFAULT_CACHE_SIZE; + } + return (long) (per * Runtime.getRuntime().maxMemory()); + } + + private final class CleanReference extends SoftReference implements Runnable { + + @SuppressWarnings("VolatileArrayField") + private volatile Directory[] hardRef; //clearHRef may be called by more concurrently (read lock). + private final AtomicLong size = new AtomicLong(); //clearHRef may be called by more concurrently (read lock). + + private CleanReference(final RAMDirectory[] dir) { + super (dir, Utilities.activeReferenceQueue()); + boolean doHardRef = currentCacheSize < maxCacheSize; + if (doHardRef) { + this.hardRef = dir; + long _size = dir[0].sizeInBytes(); + size.set(_size); + currentCacheSize+=_size; + } + LOGGER.log(Level.FINEST, "Caching index: {0} cache policy: {1}", //NOI18N + new Object[]{ + folder.getAbsolutePath(), + cachePolicy.getSystemName() + }); + } + + @Override + public void run() { + try { + LOGGER.log(Level.FINEST, "Dropping cache index: {0} cache policy: {1}", //NOI18N + new Object[] { + folder.getAbsolutePath(), + cachePolicy.getSystemName() + }); + close(false); + } catch (IOException ex) { + Exceptions.printStackTrace(ex); + } + } + + @Override + public void clear() { + clearHRef(); + super.clear(); + } + + void clearHRef() { + this.hardRef = null; + long mySize = size.getAndSet(0); + currentCacheSize-=mySize; + } + } + } + // + +} diff --git a/parsing.lucene/src/org/netbeans/modules/parsing/lucene/LuceneIndexFactory.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/LuceneIndexFactory.java new file mode 100644 --- /dev/null +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/LuceneIndexFactory.java @@ -0,0 +1,61 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2010 Oracle and/or its affiliates. All rights reserved. + * + * Oracle and Java are registered trademarks of Oracle and/or its affiliates. + * Other names may be trademarks of their respective owners. + * + * The contents of this file are subject to the terms of either the GNU + * General Public License Version 2 only ("GPL") or the Common + * Development and Distribution License("CDDL") (collectively, the + * "License"). You may not use this file except in compliance with the + * License. You can obtain a copy of the License at + * http://www.netbeans.org/cddl-gplv2.html + * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the + * specific language governing permissions and limitations under the + * License. When distributing the software, include this License Header + * Notice in each file and include the License file at + * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the GPL Version 2 section of the License file that + * accompanied this code. If applicable, add the following below the + * License Header, with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + * + * If you wish your version of this file to be governed by only the CDDL + * or only the GPL Version 2, indicate your decision by adding + * "[Contributor] elects to include this software in this distribution + * under the [CDDL or GPL Version 2] license." If you do not indicate a + * single choice of license, a recipient has the option to distribute + * your version of this file under either the CDDL, the GPL Version 2 or + * to extend the choice of license to its licensees as provided above. + * However, if you add GPL Version 2 code and therefore, elected the GPL + * Version 2 license, then the option applies only if the new code is + * made subject to such option by the copyright holder. + * + * Contributor(s): + * + * Portions Copyrighted 2010 Sun Microsystems, Inc. + */ + +package org.netbeans.modules.parsing.lucene; + +import java.io.File; +import java.io.IOException; +import org.apache.lucene.analysis.Analyzer; +import org.netbeans.modules.parsing.lucene.support.Index; + +/** + * + * @author Tomas Zezula + */ +public class LuceneIndexFactory implements IndexFactory { + + @Override + public Index createIndex(File cacheFolder, Analyzer analyzer) throws IOException { + return LuceneIndex.create(cacheFolder, analyzer); + } + +} diff --git a/java.source/src/org/netbeans/modules/java/source/usages/TermCollector.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/TermCollector.java copy from java.source/src/org/netbeans/modules/java/source/usages/TermCollector.java copy to parsing.lucene/src/org/netbeans/modules/parsing/lucene/TermCollector.java --- a/java.source/src/org/netbeans/modules/java/source/usages/TermCollector.java +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/TermCollector.java @@ -40,7 +40,7 @@ * Portions Copyrighted 2010 Sun Microsystems, Inc. */ -package org.netbeans.modules.java.source.usages; +package org.netbeans.modules.parsing.lucene; import java.util.HashMap; import java.util.HashSet; @@ -53,7 +53,7 @@ * * @author Tomas Zezula */ -final class TermCollector { +public final class TermCollector { private final Map> doc2Terms; @@ -61,7 +61,7 @@ doc2Terms = new HashMap>(); } - void add (final int docId, final @NonNull Term term) { + public void add (final int docId, final @NonNull Term term) { Set slot = doc2Terms.get(docId); if (slot == null) { slot = new HashSet(); @@ -75,7 +75,7 @@ } - static interface TermCollecting { + public static interface TermCollecting { void attach (TermCollector collector); } diff --git a/parsing.lucene/src/org/netbeans/modules/parsing/lucene/layer.xml b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/layer.xml new file mode 100644 --- /dev/null +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/layer.xml @@ -0,0 +1,3 @@ + + + diff --git a/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/Convertor.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/Convertor.java new file mode 100644 --- /dev/null +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/Convertor.java @@ -0,0 +1,59 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2010 Oracle and/or its affiliates. All rights reserved. + * + * Oracle and Java are registered trademarks of Oracle and/or its affiliates. + * Other names may be trademarks of their respective owners. + * + * The contents of this file are subject to the terms of either the GNU + * General Public License Version 2 only ("GPL") or the Common + * Development and Distribution License("CDDL") (collectively, the + * "License"). You may not use this file except in compliance with the + * License. You can obtain a copy of the License at + * http://www.netbeans.org/cddl-gplv2.html + * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the + * specific language governing permissions and limitations under the + * License. When distributing the software, include this License Header + * Notice in each file and include the License file at + * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the GPL Version 2 section of the License file that + * accompanied this code. If applicable, add the following below the + * License Header, with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + * + * If you wish your version of this file to be governed by only the CDDL + * or only the GPL Version 2, indicate your decision by adding + * "[Contributor] elects to include this software in this distribution + * under the [CDDL or GPL Version 2] license." If you do not indicate a + * single choice of license, a recipient has the option to distribute + * your version of this file under either the CDDL, the GPL Version 2 or + * to extend the choice of license to its licensees as provided above. + * However, if you add GPL Version 2 code and therefore, elected the GPL + * Version 2 license, then the option applies only if the new code is + * made subject to such option by the copyright holder. + * + * Contributor(s): + * + * Portions Copyrighted 2010 Sun Microsystems, Inc. + */ + +package org.netbeans.modules.parsing.lucene.support; + +/** + * A convertor used by the {@link Index} to convert user types + * into or from lucene Documents, Queries, Terms. + * The interface allows isolation of user code from the lucene + * specific types. + * @author Tomas Zezula + */ +public interface Convertor { + /** + * Converts given object + * @param p the object to be converted + * @return the result of conversion + */ + R convert (P p); +} diff --git a/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/Index.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/Index.java new file mode 100644 --- /dev/null +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/Index.java @@ -0,0 +1,149 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2010 Oracle and/or its affiliates. All rights reserved. + * + * Oracle and Java are registered trademarks of Oracle and/or its affiliates. + * Other names may be trademarks of their respective owners. + * + * The contents of this file are subject to the terms of either the GNU + * General Public License Version 2 only ("GPL") or the Common + * Development and Distribution License("CDDL") (collectively, the + * "License"). You may not use this file except in compliance with the + * License. You can obtain a copy of the License at + * http://www.netbeans.org/cddl-gplv2.html + * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the + * specific language governing permissions and limitations under the + * License. When distributing the software, include this License Header + * Notice in each file and include the License file at + * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the GPL Version 2 section of the License file that + * accompanied this code. If applicable, add the following below the + * License Header, with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + * + * If you wish your version of this file to be governed by only the CDDL + * or only the GPL Version 2, indicate your decision by adding + * "[Contributor] elects to include this software in this distribution + * under the [CDDL or GPL Version 2] license." If you do not indicate a + * single choice of license, a recipient has the option to distribute + * your version of this file under either the CDDL, the GPL Version 2 or + * to extend the choice of license to its licensees as provided above. + * However, if you add GPL Version 2 code and therefore, elected the GPL + * Version 2 license, then the option applies only if the new code is + * made subject to such option by the copyright holder. + * + * Contributor(s): + * + * Portions Copyrighted 2010 Sun Microsystems, Inc. + */ + +package org.netbeans.modules.parsing.lucene.support; + +import java.io.IOException; +import java.util.Collection; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.FieldSelector; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.Query; +import org.netbeans.api.annotations.common.NonNull; +import org.netbeans.api.annotations.common.NullAllowed; + +/** + * Lucene based index supporting queries and stores. The index instances + * do resource management (memory cache of used indexes, LRU cache of open + * file handles). The user is responsible for closing the index when it's no + * more used to keep the index consistent. + * @author Tomas Zezula + */ +public interface Index { + + /** + * An exception thrown by {@link Index} when operation is called on + * a closed index. + */ + public static final class IndexClosedException extends IOException { + } + + /** + * Check if the index already exists + * @return true if the index already exists on disk. + */ + boolean exists (); + + /** + * Checks the validity of the index. The index is invalid when it's broken. + * @param tryOpen when true the {@link Index} does exact but more expensive check. + * @return true when {@link Index} is not broken + * @throws IOException in case of IO problem + */ + boolean isValid (boolean tryOpen) throws IOException; + + /** + * Queries the {@link Index} by given queries. + * @param result the {@link Collection} to store query results into + * @param convertor the {@link Convertor} used to convert lucene documents into the user objects added into the result + * @param selector the selector used to select document's fields which should be loaded, if null all fields are loaded + * @param cancel the {@link AtomicBoolean} used to cancel the index iteration by the caller. When set to true the iteration + * is stopped. + * @param queries the queries to be performed on the {@link Index} + * @throws IOException in case of IO problem + * @throws InterruptedException when query was canceled + */ + void query (Collection result, @NonNull Convertor convertor, @NullAllowed FieldSelector selector, @NullAllowed AtomicBoolean cancel, @NonNull Query... queries) throws IOException, InterruptedException; + + /** + * Queries the {@link Index} by given queries. In addition to documents it also collects the terms which matched the queries. + * @param result the {@link Collection} to store query results into + * @param convertor the {@link Convertor} used to convert lucene documents into the user objects added into the result + * @param termConvertor the {@link Convertor} used to convert lucene terms into the user objects added into the result + * @param selector the selector used to select document's fields which should be loaded, if null all fields are loaded + * @param cancel the {@link AtomicBoolean} used to cancel the index iteration by the caller. When set to true the iteration + * is stopped. + * @param queries the queries to be performed on the {@link Index} + * @throws IOException in case of IO problem + * @throws InterruptedException when query was canceled + */ + void queryDocTerms(Map> result, @NonNull Convertor convertor, @NonNull Convertor termConvertor,@NullAllowed FieldSelector selector, @NullAllowed AtomicBoolean cancel, @NonNull Query... queries) throws IOException, InterruptedException; + + /** + * Queries the {@link Index}'s b-tree for terms starting by the start term and accepted by the filter. + * @param result the {@link Collection} to store results into + * @param start the first term to start the b-tree iteration with, if null the iteration start on the first term. + * @param filter converting the terms into the user objects which are added into the result or null to skeep them. + * The filter can stop the iteration by throwing the {@link StoppableConvertor.Stop}. + * @param cancel the {@link AtomicBoolean} used to cancel the index iteration by the caller. When set to true the iteration + * is stopped. + * @throws IOException in case of IO problem + * @throws InterruptedException when query was canceled + */ + void queryTerms(@NonNull Collection result, @NullAllowed Term start, @NonNull StoppableConvertor filter, @NullAllowed AtomicBoolean cancel) throws IOException, InterruptedException; + + /** + * Updates the {@link Index} by adding the toAdd objects and deleting toDelete objects. + * @param toAdd the objects to be added into the index + * @param toDelete the objects to be removed from the index + * @param docConvertor the {@link Convertor} used to convert toAdd objects into lucene's Documents which are stored into the {@link Index} + * @param queryConvertor the {@link Convertor} used to convert toDelete objects into lucene's Queries used to delete the documents from {@link Index} + * @param optimize if true the index is optimized. The optimized index is faster for queries but optimize takes significant time. + * @throws IOException in case of IO problem + */ + void store (@NonNull Collection toAdd, @NonNull Collection toDelete, @NonNull Convertor docConvertor, @NonNull Convertor queryConvertor, boolean optimize) throws IOException; + + /** + * Completely deletes the {@link Index} + * @throws IOException in case of IO problem + */ + void clear () throws IOException; + + /** + * Closes the {@link Index} + * @throws IOException in case of IO problem + */ + void close () throws IOException; +} diff --git a/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/IndexManager.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/IndexManager.java new file mode 100644 --- /dev/null +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/IndexManager.java @@ -0,0 +1,175 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2010 Oracle and/or its affiliates. All rights reserved. + * + * Oracle and Java are registered trademarks of Oracle and/or its affiliates. + * Other names may be trademarks of their respective owners. + * + * The contents of this file are subject to the terms of either the GNU + * General Public License Version 2 only ("GPL") or the Common + * Development and Distribution License("CDDL") (collectively, the + * "License"). You may not use this file except in compliance with the + * License. You can obtain a copy of the License at + * http://www.netbeans.org/cddl-gplv2.html + * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the + * specific language governing permissions and limitations under the + * License. When distributing the software, include this License Header + * Notice in each file and include the License file at + * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the GPL Version 2 section of the License file that + * accompanied this code. If applicable, add the following below the + * License Header, with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + * + * If you wish your version of this file to be governed by only the CDDL + * or only the GPL Version 2, indicate your decision by adding + * "[Contributor] elects to include this software in this distribution + * under the [CDDL or GPL Version 2] license." If you do not indicate a + * single choice of license, a recipient has the option to distribute + * your version of this file under either the CDDL, the GPL Version 2 or + * to extend the choice of license to its licensees as provided above. + * However, if you add GPL Version 2 code and therefore, elected the GPL + * Version 2 license, then the option applies only if the new code is + * made subject to such option by the copyright holder. + * + * Contributor(s): + * + * Portions Copyrighted 2010 Sun Microsystems, Inc. + */ + +package org.netbeans.modules.parsing.lucene.support; + +import java.io.File; +import java.io.IOException; +import java.util.concurrent.Callable; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import org.apache.lucene.analysis.Analyzer; +import org.netbeans.api.annotations.common.NonNull; +import org.netbeans.modules.masterfs.providers.ProvidedExtensions; +import org.netbeans.modules.parsing.lucene.IndexFactory; +import org.netbeans.modules.parsing.lucene.LuceneIndexFactory; +import org.openide.util.Parameters; + +/** + * The {@link IndexManager} controls access to {@link Index} instances and acts + * as an {@link Index} factory. + * + * @author Tomas Zezula + */ +public final class IndexManager { + + private static final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); + + static IndexFactory factory = new LuceneIndexFactory(); //Unit tests overrides the factory + + private IndexManager() {} + + + /** + * The action to be performed under the {@link IndexManager}'s lock + */ + public static interface Action { + + /** + * The action + * @return the action result + * @throws IOException + * @throws InterruptedException + */ + public R run () throws IOException, InterruptedException; + } + + + /** + * Runs the given action under {@link IndexManager}'s write lock. + * @param action the action to be performed. + * @return the result of the action + * @throws IOException when the action throws {@link IOException} + * @throws InterruptedException when the action throws {@link InterruptedException} + */ + public static R writeAccess (final Action action) throws IOException, InterruptedException { + assert action != null; + lock.writeLock().lock(); + try { + return ProvidedExtensions.priorityIO(new Callable() { + @Override + public R call() throws Exception { + return action.run(); + } + }); + } catch (IOException ioe) { + //rethrow ioe + throw ioe; + } catch (InterruptedException ie) { + //rethrow ioe + throw ie; + } catch (RuntimeException re) { + //rethrow ioe + throw re; + } catch (Exception e) { + throw new IOException(e); + } finally { + lock.writeLock().unlock(); + } + } + + /** + * Runs the given action under {@link IndexManager}'s read lock. + * @param action the action to be performed. + * @return the result of the action + * @throws IOException when the action throws {@link IOException} + * @throws InterruptedException when the action throws {@link InterruptedException} + */ + public static R readAccess (final Action action) throws IOException, InterruptedException { + assert action != null; + lock.readLock().lock(); + try { + return ProvidedExtensions.priorityIO(new Callable() { + @Override + public R call() throws Exception { + return action.run(); + } + }); + } catch (IOException ioe) { + //rethrow ioe + throw ioe; + } catch (InterruptedException ie) { + //rethrow ioe + throw ie; + } catch (RuntimeException re) { + //rethrow ioe + throw re; + } catch (Exception e) { + throw new IOException(e); + } finally { + lock.readLock().unlock(); + } + } + + /** + * Checks if the caller thread holds the {@link IndexManager}'s write lock + * @return true when the caller holds the lock + */ + public static boolean holdsWriteLock () { + return lock.isWriteLockedByCurrentThread(); + } + + /** + * Creates a new {@link Index} for given folder with given lucene Analyzer. + * The returned {@link Index} is not cached, next call with the same arguments returns a different instance + * of {@link Index}. The caller is responsible to cache the returned {@link Index}. + * @param cacheFolder the folder in which the index is stored + * @param analyzer the lucene Analyzer used to split fields into tokens. + * @return the created {@link Index} + * @throws IOException in case of IO problem. + */ + public static Index createIndex(final @NonNull File cacheFolder, final @NonNull Analyzer analyzer) throws IOException { + Parameters.notNull("cacheFolder", cacheFolder); //NOI18N + Parameters.notNull("analyzer", analyzer); //NOI18N + return factory.createIndex(cacheFolder, analyzer); + } + +} diff --git a/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/Queries.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/Queries.java new file mode 100644 --- /dev/null +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/Queries.java @@ -0,0 +1,635 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2010 Oracle and/or its affiliates. All rights reserved. + * + * Oracle and Java are registered trademarks of Oracle and/or its affiliates. + * Other names may be trademarks of their respective owners. + * + * The contents of this file are subject to the terms of either the GNU + * General Public License Version 2 only ("GPL") or the Common + * Development and Distribution License("CDDL") (collectively, the + * "License"). You may not use this file except in compliance with the + * License. You can obtain a copy of the License at + * http://www.netbeans.org/cddl-gplv2.html + * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the + * specific language governing permissions and limitations under the + * License. When distributing the software, include this License Header + * Notice in each file and include the License file at + * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the GPL Version 2 section of the License file that + * accompanied this code. If applicable, add the following below the + * License Header, with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + * + * If you wish your version of this file to be governed by only the CDDL + * or only the GPL Version 2, indicate your decision by adding + * "[Contributor] elects to include this software in this distribution + * under the [CDDL or GPL Version 2] license." If you do not indicate a + * single choice of license, a recipient has the option to distribute + * your version of this file under either the CDDL, the GPL Version 2 or + * to extend the choice of license to its licensees as provided above. + * However, if you add GPL Version 2 code and therefore, elected the GPL + * Version 2 license, then the option applies only if the new code is + * made subject to such option by the copyright holder. + * + * Contributor(s): + * + * Portions Copyrighted 2010 Sun Microsystems, Inc. + */ + +package org.netbeans.modules.parsing.lucene.support; + +import java.io.IOException; +import java.util.BitSet; +import java.util.regex.Pattern; +import org.apache.lucene.document.FieldSelector; +import org.apache.lucene.document.FieldSelectorResult; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.TermDocs; +import org.apache.lucene.index.TermEnum; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.DocIdSet; +import org.apache.lucene.search.Filter; +import org.apache.lucene.search.FilteredQuery; +import org.apache.lucene.search.FilteredTermEnum; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.PrefixTermEnum; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.util.OpenBitSet; +import org.netbeans.api.annotations.common.NonNull; +import org.netbeans.modules.parsing.lucene.TermCollector; +import org.openide.util.Parameters; + +/** + * A factory class for creating queries and filed selectors + * @author Tomas Zezula + */ +public final class Queries { + + /** + * Encodes a type of the query used by {@link Queries#createQuery} + * and {@link Queries#createTermCollectingQuery} + */ + public enum QueryKind { + /** + * The created query looks for exact match with given text + */ + EXACT, + + /** + * The given text is a prefix of requested value. + */ + PREFIX, + + /** + * The given text is a case insensitive prefix of requested value. + */ + CASE_INSENSITIVE_PREFIX, + + /** + * The given text is treated as camel case pattern used to match the value. + */ + CAMEL_CASE, + + /** + * The given text is treated as case insensitive camel case pattern used to match the value. + */ + CASE_INSENSITIVE_CAMEL_CASE, + + /** + * The given text is a regular expression used to match the value. + */ + REGEXP, + + /** + * The given text is a case insensitive regular expression used to match the value. + */ + CASE_INSENSITIVE_REGEXP; + } + + /** + * Creates a standard lucene query querying the index for + * documents having indexed field containing the given value + * @param fieldName the name of the field + * @param caseInsensitiveFieldName the name of the field containing the case insensitive value + * @param value the value to search for + * @param kind the type of query, {@link Queries.QueryKind} + * @return the created query + */ + public static Query createQuery ( + final @NonNull String fieldName, + final @NonNull String caseInsensitiveFieldName, + final @NonNull String value, + final @NonNull QueryKind kind) { + Parameters.notNull("fieldName", fieldName); //NOI18N + Parameters.notNull("caseInsensitiveFieldName", caseInsensitiveFieldName); //NOI18N + Parameters.notNull("value", value); //NOI18N + Parameters.notNull("kind", kind); //NOI18N + return createQueryImpl(fieldName, caseInsensitiveFieldName, value, kind, new StandardQueryFactory()); + } + + /** + * Creates an extended lucene query querying the index for + * documents having indexed field containing the given value. + * This query is required by the {@link Index#queryDocTerms} method, + * in addition to matching documents the query also collects the matched terms. + * @param fieldName the name of the field + * @param caseInsensitiveFieldName the name of the field containing the case insensitive value + * @param value the value to search for + * @param kind the type of query {@link Queries.QueryKind} + * @return the created query + */ + public static Query createTermCollectingQuery( + final @NonNull String fieldName, + final @NonNull String caseInsensitiveFieldName, + final @NonNull String value, + final @NonNull QueryKind kind) { + Parameters.notNull("fieldName", fieldName); //NOI18N + Parameters.notNull("caseInsensitiveFieldName", caseInsensitiveFieldName); //NOI18N + Parameters.notNull("value", value); //NOI18N + Parameters.notNull("kind", kind); //NOI18N + return createQueryImpl(fieldName, caseInsensitiveFieldName, value, kind, new TCQueryFactory()); + } + + /** + * Creates a FieldSelector loading the given fields. + * @param fieldsToLoad the fields to be loaded into the document. + * @return the created FieldSelector + */ + public static FieldSelector createFieldSelector(final @NonNull String... fieldsToLoad) { + return new FieldSelectorImpl(fieldsToLoad); + } + + + // + private static Query createQueryImpl( + final @NonNull String fieldName, + final @NonNull String caseInsensitiveFieldName, + final @NonNull String value, + final @NonNull QueryKind kind, + final @NonNull QueryFactory f) { + switch (kind) { + case EXACT: + return f.createTermQuery(fieldName, value); + case PREFIX: + if (value.length() == 0) { + return f.createAllDocsQuery(fieldName); + } + else { + return f.createPrefixQuery(fieldName, value); + } + case CASE_INSENSITIVE_PREFIX: + if (value.length() == 0) { + return f.createAllDocsQuery(caseInsensitiveFieldName); + } + else { + return f.createPrefixQuery(caseInsensitiveFieldName, value.toLowerCase()); + } + case CAMEL_CASE: + if (value.length() == 0) { + throw new IllegalArgumentException (); + } else { + return f.createRegExpQuery(fieldName,createCamelCaseRegExp(value, true), true); + } + case CASE_INSENSITIVE_REGEXP: + if (value.length() == 0) { + throw new IllegalArgumentException (); + } else { + return f.createRegExpQuery(caseInsensitiveFieldName, value.toLowerCase(), false); + } + case REGEXP: + if (value.length() == 0) { + throw new IllegalArgumentException (); + } else { + return f.createRegExpQuery(fieldName, value, true); + } + case CASE_INSENSITIVE_CAMEL_CASE: + if (value.length() == 0) { + //Special case (all) handle in different way + return f.createAllDocsQuery(caseInsensitiveFieldName); + } + else { + final Query pq = f.createPrefixQuery(caseInsensitiveFieldName, value.toLowerCase()); + final Query fq = f.createRegExpQuery(caseInsensitiveFieldName, createCamelCaseRegExp(value, false), false); + final BooleanQuery result = f.createBooleanQuery(); + result.add(pq, Occur.SHOULD); + result.add(fq, Occur.SHOULD); + return result; + } + default: + throw new UnsupportedOperationException (kind.toString()); + } + } + + private static String createCamelCaseRegExp(final String camel, final boolean caseSensitive) { + final StringBuilder sb = new StringBuilder(); + int lastIndex = 0; + int index; + do { + index = findNextUpper(camel, lastIndex + 1); + String token = camel.substring(lastIndex, index == -1 ? camel.length(): index); + sb.append(Pattern.quote(caseSensitive ? token : token.toLowerCase())); + sb.append( index != -1 ? "[\\p{javaLowerCase}\\p{Digit}_\\$]*" : ".*"); // NOI18N + lastIndex = index; + } while(index != -1); + return sb.toString(); + } + + private static int findNextUpper(String text, int offset ) { + for( int i = offset; i < text.length(); i++ ) { + if ( Character.isUpperCase(text.charAt(i)) ) { + return i; + } + } + return -1; + } + + private static abstract class DocumentVisitor { + + public void generate(IndexReader reader, TermEnum enumerator) throws IOException { + final int[] docs = new int[32]; + final int[] freqs = new int[32]; + final TermDocs termDocs = reader.termDocs(); + try { + do { + final Term term = enumerator.term(); + if (term == null) { + break; + } + termDocs.seek(term); + while (true) { + final int count = termDocs.read(docs, freqs); + if (count != 0) { + for (int i = 0; i < count; i++) { + visit(term, docs[i]); + } + } else { + break; + } + } + } while (enumerator.next()); + } finally { + termDocs.close(); + } + } + + abstract public void visit(Term term, int doc); + } + + private static abstract class TCFilter extends Filter { + public abstract void attach (TermCollector collector); + } + + private static abstract class AbstractTCFilter extends TCFilter { + + private TermCollector termCollector; + + @Override + public final BitSet bits(IndexReader reader) throws IOException { + final FilteredTermEnum enumerator = getTermEnum(reader); + try { + final BitSet bitSet = new BitSet(reader.maxDoc()); + new DocumentVisitor() { + @Override + public void visit(Term term, int doc) { + bitSet.set(doc); + if (termCollector != null) { + termCollector.add(doc, term); + } + } + }.generate(reader, enumerator); + return bitSet; + } finally { + enumerator.close(); + } + } + + @Override + public final DocIdSet getDocIdSet(IndexReader reader) throws IOException { + final FilteredTermEnum enumerator = getTermEnum(reader); + try { + // if current term in enum is null, the enum is empty -> shortcut + if (enumerator.term() == null) { + return DocIdSet.EMPTY_DOCIDSET; + } + // else fill into a OpenBitSet + final OpenBitSet bitSet = new OpenBitSet(reader.maxDoc()); + new DocumentVisitor() { + @Override + public void visit(Term term, int doc) { + bitSet.set(doc); + if (termCollector != null) { + termCollector.add(doc, term); + } + } + }.generate(reader, enumerator); + return bitSet; + } finally { + enumerator.close(); + } + } + + @Override + public final void attach(final TermCollector tc) { + this.termCollector = tc; + } + + protected abstract FilteredTermEnum getTermEnum(IndexReader reader) throws IOException; + + } + + private static class RegexpTermEnum extends FilteredTermEnum { + + private final String fieldName; + private final String startPrefix; + private final Pattern pattern; + private boolean endEnum; + + public RegexpTermEnum( + final IndexReader in, + final String fieldName, + final Pattern pattern, + final String startPrefix) throws IOException { + final Term term = new Term(fieldName,startPrefix); + this.fieldName = term.field(); + this.pattern = pattern; + this.startPrefix = startPrefix; + setEnum(in.terms(term)); + } + + @Override + protected boolean termCompare(Term term) { + if (fieldName == term.field()) { + String searchText = term.text(); + if (searchText.startsWith(startPrefix)) { + return pattern.matcher(term.text()).matches(); + } + } + endEnum = true; + return false; + } + + @Override + public float difference() { + return 1.0f; + } + + @Override + protected boolean endEnum() { + return endEnum; + } + } + + private static class RegexpFilter extends AbstractTCFilter { + + private final String fieldName; + private final String startPrefix; + private final Pattern pattern; + + public RegexpFilter(final String fieldName, final String regexp, final boolean caseSensitive) { + this.fieldName = fieldName; + this.pattern = caseSensitive ? Pattern.compile(regexp) : Pattern.compile(regexp, Pattern.CASE_INSENSITIVE); + this.startPrefix = getStartText(regexp); + } + + protected FilteredTermEnum getTermEnum(final @NonNull IndexReader reader) throws IOException { + return new RegexpTermEnum(reader, fieldName, pattern, startPrefix); + } + + private static String getStartText(final String regexp) { + if (!Character.isJavaIdentifierStart(regexp.charAt(0))) { + return ""; //NOI18N + } + final StringBuilder startBuilder = new StringBuilder (); + startBuilder.append(regexp.charAt(0)); + for (int i=1; i + +} diff --git a/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/StoppableConvertor.java b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/StoppableConvertor.java new file mode 100644 --- /dev/null +++ b/parsing.lucene/src/org/netbeans/modules/parsing/lucene/support/StoppableConvertor.java @@ -0,0 +1,67 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2010 Oracle and/or its affiliates. All rights reserved. + * + * Oracle and Java are registered trademarks of Oracle and/or its affiliates. + * Other names may be trademarks of their respective owners. + * + * The contents of this file are subject to the terms of either the GNU + * General Public License Version 2 only ("GPL") or the Common + * Development and Distribution License("CDDL") (collectively, the + * "License"). You may not use this file except in compliance with the + * License. You can obtain a copy of the License at + * http://www.netbeans.org/cddl-gplv2.html + * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the + * specific language governing permissions and limitations under the + * License. When distributing the software, include this License Header + * Notice in each file and include the License file at + * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the GPL Version 2 section of the License file that + * accompanied this code. If applicable, add the following below the + * License Header, with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + * + * If you wish your version of this file to be governed by only the CDDL + * or only the GPL Version 2, indicate your decision by adding + * "[Contributor] elects to include this software in this distribution + * under the [CDDL or GPL Version 2] license." If you do not indicate a + * single choice of license, a recipient has the option to distribute + * your version of this file under either the CDDL, the GPL Version 2 or + * to extend the choice of license to its licensees as provided above. + * However, if you add GPL Version 2 code and therefore, elected the GPL + * Version 2 license, then the option applies only if the new code is + * made subject to such option by the copyright holder. + * + * Contributor(s): + * + * Portions Copyrighted 2010 Sun Microsystems, Inc. + */ + +package org.netbeans.modules.parsing.lucene.support; + +/** + * A convertor used by the {@link Index#queryTerms} to convert lucene Terms + * into user types. + * The interface allows isolation of user code from the lucene + * specific types. + * @author Tomas Zezula + */ +public interface StoppableConvertor { + + /** + * The exception used by the convertor to stop the iteration. + */ + public static final class Stop extends Exception {}; + + /** + * Converts given object + * @param p the object to be converted + * @return the result of conversion + * @throws Stop to stop the index iteration + */ + R convert (P param) throws Stop; + +} diff --git a/parsing.api/test/unit/src/org/netbeans/modules/parsing/impl/indexing/lucene/util/LRUCacheTest.java b/parsing.lucene/test/unit/src/org/netbeans/modules/parsing/lucene/LRUCacheTest.java rename from parsing.api/test/unit/src/org/netbeans/modules/parsing/impl/indexing/lucene/util/LRUCacheTest.java rename to parsing.lucene/test/unit/src/org/netbeans/modules/parsing/lucene/LRUCacheTest.java --- a/parsing.api/test/unit/src/org/netbeans/modules/parsing/impl/indexing/lucene/util/LRUCacheTest.java +++ b/parsing.lucene/test/unit/src/org/netbeans/modules/parsing/lucene/LRUCacheTest.java @@ -40,7 +40,7 @@ * Portions Copyrighted 2009 Sun Microsystems, Inc. */ -package org.netbeans.modules.parsing.impl.indexing.lucene.util; +package org.netbeans.modules.parsing.lucene; import java.util.HashSet; import java.util.Set; @@ -81,6 +81,7 @@ } private static class TestEvictionPolicy implements EvictionPolicy { + @Override public boolean shouldEvict(int size, Integer key, Evictable value) { return size > 10; } @@ -94,6 +95,7 @@ this.value = i; } + @Override public void evicted() { used.remove(value); } diff --git a/parsing.lucene/test/unit/src/org/netbeans/modules/parsing/lucene/LuceneIndexTest.java b/parsing.lucene/test/unit/src/org/netbeans/modules/parsing/lucene/LuceneIndexTest.java new file mode 100644 --- /dev/null +++ b/parsing.lucene/test/unit/src/org/netbeans/modules/parsing/lucene/LuceneIndexTest.java @@ -0,0 +1,202 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2010 Oracle and/or its affiliates. All rights reserved. + * + * Oracle and Java are registered trademarks of Oracle and/or its affiliates. + * Other names may be trademarks of their respective owners. + * + * The contents of this file are subject to the terms of either the GNU + * General Public License Version 2 only ("GPL") or the Common + * Development and Distribution License("CDDL") (collectively, the + * "License"). You may not use this file except in compliance with the + * License. You can obtain a copy of the License at + * http://www.netbeans.org/cddl-gplv2.html + * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the + * specific language governing permissions and limitations under the + * License. When distributing the software, include this License Header + * Notice in each file and include the License file at + * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the GPL Version 2 section of the License file that + * accompanied this code. If applicable, add the following below the + * License Header, with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + * + * If you wish your version of this file to be governed by only the CDDL + * or only the GPL Version 2, indicate your decision by adding + * "[Contributor] elects to include this software in this distribution + * under the [CDDL or GPL Version 2] license." If you do not indicate a + * single choice of license, a recipient has the option to distribute + * your version of this file under either the CDDL, the GPL Version 2 or + * to extend the choice of license to its licensees as provided above. + * However, if you add GPL Version 2 code and therefore, elected the GPL + * Version 2 license, then the option applies only if the new code is + * made subject to such option by the copyright holder. + * + * Contributor(s): + * + * Portions Copyrighted 2010 Sun Microsystems, Inc. + */ + +package org.netbeans.modules.parsing.lucene; + +/** + * + * @author Tomas Zezula + */ +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import org.apache.lucene.analysis.KeywordAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.store.Directory; +import org.netbeans.junit.NbTestCase; +import org.netbeans.modules.parsing.lucene.support.Convertor; + +/** + * + * @author Tomas Zezula + */ +public class LuceneIndexTest extends NbTestCase { + + public LuceneIndexTest (String testName) { + super (testName); + } + + @Override + protected void setUp() throws Exception { + super.setUp(); + this.clearWorkDir(); + //Prepare indeces + } + + public void testIsValid() throws Exception { + final File wd = getWorkDir(); + final File cache = new File(wd,"cache"); + cache.mkdirs(); + final LuceneIndex index = LuceneIndex.create(cache, new KeywordAnalyzer()); + //Empty index => invalid + assertFalse(index.isValid(true)); + + clearValidityCache(index); + List refs = new ArrayList(); + refs.add("A"); + Set toDel = new HashSet(); + index.store( + refs, + toDel, + new StrToDocConvertor("resources"), + new StrToQueryCovertor("resource"), + true); + //Existing index => valid + assertTrue(index.isValid(true)); + assertTrue(cache.listFiles().length>0); + + clearValidityCache(index); + createLock(index); + //Index with orphan lock => invalid + assertFalse(index.isValid(true)); + assertTrue(cache.listFiles().length==0); + + refs.add("B"); + clearValidityCache(index); + index.store( + refs, + toDel, + new StrToDocConvertor("resources"), + new StrToQueryCovertor("resource"), + true); + assertTrue(index.isValid(true)); + assertTrue(cache.listFiles().length>0); + + //Broken index => invalid + clearValidityCache(index); + File bt = null; + for (File file : cache.listFiles()) { + if (file.getName().endsWith(".cfs")) { + bt = file; + break; + } + } + assertNotNull(bt); + FileOutputStream out = new FileOutputStream(bt); + try { + out.write(new byte[] {0,0,0,0,0,0,0,0,0,0}, 0, 10); + } finally { + out.close(); + } + assertFalse(index.isValid(true)); + assertTrue(cache.listFiles().length==0); + + } + + + private void createLock(final LuceneIndex index) throws NoSuchFieldException, IllegalArgumentException, IllegalAccessException, IOException { + final Class li = LuceneIndex.class; + final java.lang.reflect.Field dirCache = li.getDeclaredField("dirCache"); //NOI18N + dirCache.setAccessible(true); + Object o = dirCache.get(index); + final java.lang.reflect.Field directory = o.getClass().getDeclaredField("fsDir"); //NOI18N + directory.setAccessible(true); + Directory dir = (Directory) directory.get(o); + dir.makeLock("test").obtain(); //NOI18N + } + + + private void clearValidityCache(final LuceneIndex index) throws NoSuchFieldException, IllegalArgumentException, IllegalAccessException, IOException { + final Class li = LuceneIndex.class; + final java.lang.reflect.Field dirCache = li.getDeclaredField("dirCache"); //NOI18N + dirCache.setAccessible(true); + Object o = dirCache.get(index); + final java.lang.reflect.Field reader = o.getClass().getDeclaredField("reader"); + reader.setAccessible(true); + IndexReader r = (IndexReader) reader.get(o); + if (r != null) { + r.close(); + } + reader.set(o,null); + } + + private static class StrToDocConvertor implements Convertor{ + + private final String name; + + public StrToDocConvertor(final String name) { + this.name = name; + } + + @Override + public Document convert(final String p) { + final Document doc = new Document(); + doc.add(new Field(name, p, Field.Store.YES, Field.Index.ANALYZED)); + return doc; + } + } + + private static class StrToQueryCovertor implements Convertor { + + private final String name; + + public StrToQueryCovertor(final String name) { + this.name = name; + } + + @Override + public Query convert(String p) { + return new TermQuery(new Term(name, p)); + } + } + +} + diff --git a/parsing.lucene/test/unit/src/org/netbeans/modules/parsing/lucene/support/IndexManagerTestUtilities.java b/parsing.lucene/test/unit/src/org/netbeans/modules/parsing/lucene/support/IndexManagerTestUtilities.java new file mode 100644 --- /dev/null +++ b/parsing.lucene/test/unit/src/org/netbeans/modules/parsing/lucene/support/IndexManagerTestUtilities.java @@ -0,0 +1,65 @@ +/* + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. + * + * Copyright 2010 Oracle and/or its affiliates. All rights reserved. + * + * Oracle and Java are registered trademarks of Oracle and/or its affiliates. + * Other names may be trademarks of their respective owners. + * + * The contents of this file are subject to the terms of either the GNU + * General Public License Version 2 only ("GPL") or the Common + * Development and Distribution License("CDDL") (collectively, the + * "License"). You may not use this file except in compliance with the + * License. You can obtain a copy of the License at + * http://www.netbeans.org/cddl-gplv2.html + * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the + * specific language governing permissions and limitations under the + * License. When distributing the software, include this License Header + * Notice in each file and include the License file at + * nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the GPL Version 2 section of the License file that + * accompanied this code. If applicable, add the following below the + * License Header, with the fields enclosed by brackets [] replaced by + * your own identifying information: + * "Portions Copyrighted [year] [name of copyright owner]" + * + * If you wish your version of this file to be governed by only the CDDL + * or only the GPL Version 2, indicate your decision by adding + * "[Contributor] elects to include this software in this distribution + * under the [CDDL or GPL Version 2] license." If you do not indicate a + * single choice of license, a recipient has the option to distribute + * your version of this file under either the CDDL, the GPL Version 2 or + * to extend the choice of license to its licensees as provided above. + * However, if you add GPL Version 2 code and therefore, elected the GPL + * Version 2 license, then the option applies only if the new code is + * made subject to such option by the copyright holder. + * + * Contributor(s): + * + * Portions Copyrighted 2010 Sun Microsystems, Inc. + */ + +package org.netbeans.modules.parsing.lucene.support; + +import org.netbeans.api.annotations.common.NonNull; +import org.netbeans.modules.parsing.lucene.IndexFactory; +import org.openide.util.Parameters; + +/** + * + * @author Tomas Zezula + */ +public final class IndexManagerTestUtilities { + private IndexManagerTestUtilities(){} + + public static IndexFactory getIndexFactory() { + return IndexManager.factory; + } + + public static void setIndexFactory(final @NonNull IndexFactory factory) { + Parameters.notNull("factory",factory); //NOI18N + IndexManager.factory = factory; + } + +}