Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 8 additions & 1 deletion src/net/hillsdon/reviki/search/SearchEngine.java
Original file line number Diff line number Diff line change
Expand Up @@ -104,5 +104,12 @@ public interface SearchEngine {
* @return A quoted version that escapes any characters that have special significance in the search syntax.
*/
String escape(String in);


/**
*
* @param page Page to check
* @return Boolean indicating whether the page contents are indexed
* @throws IOException On error reading from the search index
*/
boolean isIndexUpToDate(PageInfo page) throws IOException, PageStoreException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -140,4 +140,8 @@ public Set<String> outgoingLinks(final String page) throws IOException, PageStor
return _delegate.outgoingLinks(page);
}

public boolean isIndexUpToDate(PageInfo page) throws IOException, PageStoreException {
return _delegate.isIndexUpToDate(page);
}

}
41 changes: 37 additions & 4 deletions src/net/hillsdon/reviki/search/impl/LuceneSearcher.java
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;

import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.queryParser.QueryParser.Operator;
Expand Down Expand Up @@ -109,6 +108,8 @@ public NoQueryPerformedException(final QuerySyntaxException cause) {

private static final String[] ALL_SEARCH_FIELDS = new String[] {FIELD_PATH, FIELD_PATH_LOWER, FIELD_TITLE_TOKENIZED, FIELD_CONTENT, FIELD_ATTRIBUTES};

private final Map<String, Integer> _contentHashCodes = new LinkedHashMap<String, Integer>();
private final Map<String, Integer> _attrHashCodes = new LinkedHashMap<String, Integer>();
private final String _wikiName;
private final File _dir;
private final List<File> _otherDirs;
Expand Down Expand Up @@ -162,8 +163,7 @@ public TokenStream tokenStream(final String fieldName, final Reader reader) {
return perField;
}

private Document createWikiPageDocument(final PageInfo page) throws IOException, PageStoreException {
RenderedPage renderedPage = _renderedPageFactory.create(page, URLOutputFilter.NULL);
private Document createWikiPageDocument(final PageInfo page, final RenderedPage renderedPage) throws IOException, PageStoreException {
final String path = page.getPath();
final String wiki = page.getWiki();
final String content = page.getContent();
Expand Down Expand Up @@ -226,6 +226,28 @@ private void replaceDocument(final String keyField, final Document document) thr
writer.close();
}
}

private Integer getPageContentHashCode(final PageInfo page) {
return _contentHashCodes.get(uidFor(page.getWiki(), page.getPath()));
}

private void setPageContentHashCode(final PageInfo page, final Integer hashCode) {
_contentHashCodes.put(uidFor(page.getWiki(), page.getPath()), hashCode);
}

private Integer computePageAttrHashCode(final PageInfo page) {
List<String> attrs = attributesToStringList(page.getAttributes());
Collections.sort(attrs);
return attrs.hashCode();
}

private Integer getPageAttrHashCode(final PageInfo page) {
return _attrHashCodes.get(uidFor(page.getWiki(), page.getPath()));
}

private void setPageAttrHashCode(final PageInfo page, final Integer hashCode) {
_attrHashCodes.put(uidFor(page.getWiki(), page.getPath()), hashCode);
}

// Lucene allows multiple non-deleting readers and at most one writer at a time.
// It maintains a lock file but we never want it to fail to take the lock, so serialize writes.
Expand All @@ -235,7 +257,10 @@ public synchronized void index(final PageInfo page, final boolean buildingIndex)
}
if (!isIndexBeingBuilt() || buildingIndex) {
createIndexIfNecessary();
replaceWikiDocument(createWikiPageDocument(page));
RenderedPage renderedPage = _renderedPageFactory.create(page, URLOutputFilter.NULL);
replaceWikiDocument(createWikiPageDocument(page, renderedPage));
setPageContentHashCode(page, renderedPage.getPageHashCode());
setPageAttrHashCode(page, computePageAttrHashCode(page));
}
}

Expand Down Expand Up @@ -520,4 +545,12 @@ public void setIndexBeingBuilt(boolean buildingIndex) throws IOException {
public String escape(final String in) {
return QueryParser.escape(in);
}

public boolean isIndexUpToDate(final PageInfo page) throws IOException, PageStoreException {
RenderedPage renderedPage = _renderedPageFactory.create(page, URLOutputFilter.NULL);
Integer hashForContent = getPageContentHashCode(page);
Integer hashForAttrs = getPageAttrHashCode(page);

return hashForContent != null && hashForAttrs != null && hashForContent.equals(renderedPage.getPageHashCode()) && hashForAttrs.equals(computePageAttrHashCode(page));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@
import net.hillsdon.reviki.search.SearchEngine;
import net.hillsdon.reviki.vc.InterveningCommitException;
import net.hillsdon.reviki.vc.PageInfo;
import net.hillsdon.reviki.vc.PageReference;
import net.hillsdon.reviki.vc.PageStore;
import net.hillsdon.reviki.vc.PageStoreException;
import net.hillsdon.reviki.vc.VersionedPageInfo;
import net.hillsdon.reviki.vc.impl.SimpleDelegatingPageStore;

import org.apache.commons.logging.Log;
Expand Down Expand Up @@ -62,5 +64,20 @@ public long set(final PageInfo page, final String lockToken, final long baseRevi
}
return newRevision;
}


@Override
public VersionedPageInfo get(PageReference ref, long revision) throws PageStoreException {
VersionedPageInfo page = super.get(ref, revision);
if (!page.isNewPage()) {
try {
if (!_indexer.isIndexUpToDate(page)) {
_indexer.index(page, false);
}
}
catch (IOException e) {
LOG.error("Error adding to search index, skipping page: " + page, e);
}
}
return page;
}
}
4 changes: 4 additions & 0 deletions src/net/hillsdon/reviki/wiki/RenderedPage.java
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,10 @@ public String getPage() {
return _pageName;
}

public Integer getPageHashCode() {
return _rendered.hashCode();
}

/**
* @return outgoing links in document order.
* @throws IOException If we fail to parse.
Expand Down
28 changes: 28 additions & 0 deletions webtests/net/hillsdon/reviki/webtests/TestSearchMacro.java
Original file line number Diff line number Diff line change
Expand Up @@ -90,4 +90,32 @@ public void testAttributeKeyWithColon() throws Exception {
assertTrue(searchingOnPageAsText.contains(searchingFor));
assertSearchFindsPageUsingQuery(searchingOnPage, searchingFor, "path:" + searchingFor);
}

public void testBackLinksOnReferencedPage() throws Exception {
String refs = uniqueWikiPageName("Refs");
String findMe = uniqueWikiPageName("FindMe");
editWikiPage(refs, String.format("Macro: <<search:path:%s>>", findMe), "", "Search Macro Test", true);

editWikiPage(findMe, "I'm here", "", "Search Macro Test", true);

HtmlPage refsPage = getWikiPage(refs);
String refsPageAsText = refsPage.asText();
assertTrue(refsPageAsText.contains(findMe));

HtmlPage findMePage = getWikiPage(findMe);
String findMePageAsText = findMePage.asText();
assertTrue(findMePageAsText.contains(refs));
}

public void testMatchingAttributesNoChangeToRenderedPage() throws Exception {
String searchingFor = uniqueWikiPageName("SearchMacroSearchingFor");
editWikiPage(searchingFor, "Should not be found by macro", "", "Search Macro Test", true);
editWikiPage(searchingFor, "Should not be found by macro", "status = completed", "Search Macro Test", false);
String searchingOn = uniqueWikiPageName("SearchMacroTest");
HtmlPage searchingOnPage = editWikiPage(searchingOn, "Search Macro Results: <<search:path:" + searchingFor + " AND @status:completed>>", "", "Search Macro Test", true);
String searchingOnPageAsText = searchingOnPage.asText();
assertTrue(searchingOnPageAsText.contains("Search Macro Results:"));
assertTrue(searchingOnPageAsText.contains(searchingFor));
assertSearchFindsPageUsingQuery(searchingOnPage, searchingFor, "path:" + searchingFor);
}
}