Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,13 @@ const VariantTableWidget = observer(props => {
session.hideWidget(widget)
}

function resetPaginationToFirstPage() {
setPageSizeModel(prev => ({
page: 0,
pageSize: prev.pageSize,
}));
}

function handleQuery(passedFilters, pushToHistory, pageQueryModel = pageSizeModel, sortQueryModel = sortModel) {
const { page = pageSizeModel.page, pageSize = pageSizeModel.pageSize } = pageQueryModel;
const { field = "genomicPosition", sort = false } = sortQueryModel[0] ?? {};
Expand Down Expand Up @@ -461,7 +468,7 @@ const VariantTableWidget = observer(props => {
columnVisibilityModel={columnVisibilityModel}
pageSizeOptions={[10,25,50,100]}
paginationModel={ pageSizeModel }
rowCount={ totalHits }
rowCount={ -1 }
paginationMode="server"
onPaginationModelChange = {(newModel) => {
setPageSizeModel(newModel)
Expand All @@ -485,6 +492,7 @@ const VariantTableWidget = observer(props => {
onSortModelChange={(newModel) => {
setSortModel(newModel)
handleQuery(filters, true, { page: 0, pageSize: pageSizeModel.pageSize }, newModel);
resetPaginationToFirstPage()
}}
localeText={{
MuiTablePagination: {
Expand Down Expand Up @@ -515,7 +523,10 @@ const VariantTableWidget = observer(props => {
fieldTypeInfo: fieldTypeInfo,
allowedGroupNames: allowedGroupNames,
promotedFilters: promotedFilters,
handleQuery: (filters) => handleQuery(filters, true, { page: 0, pageSize: pageSizeModel.pageSize}, sortModel)
handleQuery: (filters) => {
handleQuery(filters, true, { page: 0, pageSize: pageSizeModel.pageSize}, sortModel)
resetPaginationToFirstPage()
}
}}
/>
);
Expand Down
3 changes: 2 additions & 1 deletion jbrowse/src/client/JBrowse/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,8 @@ export async function fetchLuceneQuery(filters, sessionId, trackGUID, offset, pa
successCallback(jsonRes)
},
failure: function(res) {
failureCallback("There was an error: " + res.status + "\n Status Body: " + res.responseText + "\n Session ID:" + sessionId)
console.error("There was an error: " + res.status + "\n Status Body: " + res.responseText + "\n Session ID:" + sessionId)
failureCallback("There was an error: status " + res.status)
},
params: {
"searchString": encoded,
Expand Down
82 changes: 50 additions & 32 deletions jbrowse/src/org/labkey/jbrowse/JBrowseLuceneSearch.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LRUQueryCache;
import org.apache.lucene.search.MatchAllDocsQuery;
Expand All @@ -24,6 +25,7 @@
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.UsageTrackingQueryCachingPolicy;
import org.apache.lucene.store.Directory;
Expand Down Expand Up @@ -65,6 +67,8 @@
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

Expand All @@ -75,6 +79,7 @@
public class JBrowseLuceneSearch
{
private static final Logger _log = LogHelper.getLogger(JBrowseLuceneSearch.class, "Logger related to JBrowse/Lucene indexing and queries");
private static final ExecutorService SEARCH_EXECUTOR = Executors.newFixedThreadPool(JBrowseServiceImpl.get().getCoresForLuceneSearches());
private final JBrowseSession _session;
private final JsonFile _jsonFile;
private final User _user;
Expand Down Expand Up @@ -108,6 +113,11 @@ public static JBrowseLuceneSearch create(String sessionId, String trackId, User
private static synchronized CacheEntry getCacheEntryForSession(String trackObjectId, File indexPath) throws IOException {
CacheEntry cacheEntry = _cache.get(trackObjectId);

if (SEARCH_EXECUTOR.isShutdown() || SEARCH_EXECUTOR.isTerminated())
{
throw new IllegalStateException("The server is shutting down!");
}

// Open directory of lucene path, get a directory reader, and create the index search manager
if (cacheEntry == null)
{
Expand All @@ -116,7 +126,7 @@ private static synchronized CacheEntry getCacheEntryForSession(String trackObjec
Directory indexDirectory = FSDirectory.open(indexPath.toPath());
LRUQueryCache queryCache = new LRUQueryCache(maxCachedQueries, maxRamBytesUsed);
IndexReader indexReader = DirectoryReader.open(indexDirectory);
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
IndexSearcher indexSearcher = new IndexSearcher(indexReader, SEARCH_EXECUTOR);
indexSearcher.setQueryCache(queryCache);
indexSearcher.setQueryCachingPolicy(new ForceMatchAllDocsCachingPolicy());
cacheEntry = new CacheEntry(queryCache, indexSearcher, indexPath);
Expand Down Expand Up @@ -252,7 +262,7 @@ private SearchConfig createSearchConfig(User u, String searchString, final int p

if (searchString.equals(ALL_DOCS))
{
booleanQueryBuilder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
booleanQueryBuilder.add(new ConstantScoreQuery(new MatchAllDocsQuery()), BooleanClause.Occur.MUST);
}

// Split input into tokens, 1 token per query separated by &
Expand Down Expand Up @@ -321,41 +331,46 @@ else if (numericQueryParserFields.containsKey(fieldName))
}

private JSONObject paginateJSON(SearchConfig c) throws IOException, ParseException {
// Get chunks of size {pageSize}. Default to 1 chunk -- add to the offset to get more.
// We then iterate over the range of documents we want based on the offset. This does grow in memory
// linearly with the number of documents, but my understanding is that these are just score,id pairs
// rather than full documents, so mem usage *should* still be pretty low.
// Perform the search with sorting
TopFieldDocs topDocs = c.cacheEntry.indexSearcher.search(c.query, c.pageSize * (c.offset + 1), c.sort);
IndexSearcher searcher = c.cacheEntry.indexSearcher;
TopDocs topDocs;

if (c.offset == 0) {
topDocs = searcher.search(c.query, c.pageSize, c.sort);
} else {
TopFieldDocs prev = searcher.search(c.query, c.pageSize * c.offset, c.sort);
long totalHits = prev.totalHits.value;
ScoreDoc[] prevHits = prev.scoreDocs;

if (prevHits.length < c.pageSize * c.offset)
{
JSONObject results = new JSONObject();
results.put("data", Collections.emptyList());
results.put("totalHits", totalHits);
return results;
}

ScoreDoc lastDoc = prevHits[c.pageSize * c.offset - 1];
topDocs = searcher.searchAfter(lastDoc, c.query, c.pageSize, c.sort);
}

JSONObject results = new JSONObject();
List<JSONObject> data = new ArrayList<>(topDocs.scoreDocs.length);

// Iterate over the doc list, (either to the total end or until the page ends) grab the requested docs,
// and add to returned results
List<JSONObject> data = new ArrayList<>();
for (int i = c.pageSize * c.offset; i < Math.min(c.pageSize * (c.offset + 1), topDocs.scoreDocs.length); i++)
for (ScoreDoc sd : topDocs.scoreDocs)
{
Document doc = searcher.storedFields().document(sd.doc);
JSONObject elem = new JSONObject();
Document doc = c.cacheEntry.indexSearcher.storedFields().document(topDocs.scoreDocs[i].doc);

for (IndexableField field : doc.getFields())
for (IndexableField f : doc.getFields())
{
String fieldName = field.name();
String[] fieldValues = doc.getValues(fieldName);
if (fieldValues.length > 1)
{
elem.put(fieldName, fieldValues);
}
else
{
elem.put(fieldName, fieldValues[0]);
}
String name = f.name();
String[] vals = doc.getValues(name);
elem.put(name, vals.length > 1 ? Arrays.asList(vals) : vals[0]);
}
data.add(elem);
}

results.put("data", data);
results.put("totalHits", topDocs.totalHits.value);

return results;
}

Expand Down Expand Up @@ -679,17 +694,20 @@ public String getName()
return "JBrowse-Lucene Shutdown Listener";
}

@Override
public void shutdownPre()
{

}

@Override
public void shutdownStarted()
{
_log.info("Clearing all open JBrowse/Lucene cached readers");
JBrowseLuceneSearch.emptyCache();

try
{
SEARCH_EXECUTOR.shutdown();
}
catch (Exception e)
{
_log.error("Error shutting down SEARCH_EXECUTOR", e);
}
}
}

Expand Down
4 changes: 4 additions & 0 deletions jbrowse/src/org/labkey/jbrowse/JBrowseServiceImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -435,4 +435,8 @@ public boolean isAvailable(Container c)
return c.getActiveModules().contains(ModuleLoader.getInstance().getModule(JBrowseModule.class));
}
}

public int getCoresForLuceneSearches() {
return Runtime.getRuntime().availableProcessors();
}
}