mirror of
https://github.com/koniu/recoll-webui.git
synced 2025-10-03 09:49:25 +02:00
add paging of results
This commit is contained in:
parent
807e1fca43
commit
86102a205c
6 changed files with 88 additions and 24 deletions
|
@ -5,18 +5,35 @@ body { margin: 0 }
|
||||||
border-bottom: 1px solid #666;
|
border-bottom: 1px solid #666;
|
||||||
}
|
}
|
||||||
|
|
||||||
#status { padding: 1.5em; }
|
#status {
|
||||||
#found { float: left; }
|
margin: 1em;
|
||||||
#downloads { position: fixed; bottom: 2px; right: 7px; margin-left: 1em; }
|
background: #fcfcfc;
|
||||||
|
border: 1px solid #efefef;
|
||||||
|
color: #666;
|
||||||
|
font-size: 9pt;
|
||||||
|
padding: 0.5em;
|
||||||
|
}
|
||||||
|
#found { float: left }
|
||||||
|
#downloads { float: right; }
|
||||||
#downloads a {
|
#downloads a {
|
||||||
border: 1px solid #aaa;
|
padding: 5 3 5 3;
|
||||||
padding: 5px;
|
|
||||||
background: #f8f8f8;
|
|
||||||
color: #999;
|
color: #999;
|
||||||
font-size: 7pt;
|
text-decoration: underline;
|
||||||
}
|
}
|
||||||
#downloads a:hover { background: #ccc; color: white }
|
#downloads a:hover { background: #ccc; color: white }
|
||||||
|
|
||||||
|
#pages { clear: both; float: none; width: 100%; text-align: center; }
|
||||||
|
.page {
|
||||||
|
font-size: 8pt;
|
||||||
|
color: gray;
|
||||||
|
padding-left: 6px; padding-right: 6px;
|
||||||
|
padding-top: 2px; padding-bottom: 2px;
|
||||||
|
border: 1px solid #ddd;
|
||||||
|
background: #f5f5f5;
|
||||||
|
|
||||||
|
}
|
||||||
|
.page:hover, .current { background: #ccc; color: white; }
|
||||||
|
|
||||||
#results { padding: 1em; }
|
#results { padding: 1em; }
|
||||||
.search-result {
|
.search-result {
|
||||||
margin-left: 15%;
|
margin-left: 15%;
|
||||||
|
|
26
views/pages.tpl
Normal file
26
views/pages.tpl
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
%q = dict(query)
|
||||||
|
%def page_href(page):
|
||||||
|
%q['page'] = page
|
||||||
|
%return '../results?%s' % urllib.urlencode(q)
|
||||||
|
%end
|
||||||
|
%if nres > 0:
|
||||||
|
%import math, urllib
|
||||||
|
%npages = int(math.ceil(nres/float(config['perpage'])))
|
||||||
|
%if npages > 1:
|
||||||
|
<div id="pages">
|
||||||
|
<a title="First" class="page" href="{{page_href(1)}}">«</a>
|
||||||
|
<a title="Previous" class="page" href="{{page_href(max(1,query['page']-1))}}">‹</a>
|
||||||
|
%offset = ((query['page'])/10)*10
|
||||||
|
%for p in range(max(1,offset), min(offset+10,npages+1)):
|
||||||
|
%if p == query['page']:
|
||||||
|
%cls = "page current"
|
||||||
|
%else:
|
||||||
|
%cls = "page"
|
||||||
|
%end
|
||||||
|
<a href="{{page_href(p)}}" class="{{cls}}">{{p}}</a>
|
||||||
|
%end
|
||||||
|
<a title="Next" class="page" href="{{page_href(min(npages, query['page']+1))}}">›</a>
|
||||||
|
<a title="Last" class="page" href="{{page_href(npages)}}">»</a>
|
||||||
|
</div>
|
||||||
|
%end
|
||||||
|
%end
|
|
@ -1,10 +1,10 @@
|
||||||
%import shlex, unicodedata
|
%import shlex, unicodedata
|
||||||
%def strip_accents(s): return ''.join((c for c in unicodedata.normalize('NFD', s) if unicodedata.category(c) != 'Mn'))
|
%def strip_accents(s): return ''.join((c for c in unicodedata.normalize('NFD', s) if unicodedata.category(c) != 'Mn'))
|
||||||
%include header title=": " + query['keywords']+" ("+str(len(res))+")"
|
%include header title=": " + query['query']+" ("+str(nres)+")"
|
||||||
%include search query=query, dirs=dirs, sorts=sorts
|
%include search query=query, dirs=dirs, sorts=sorts
|
||||||
<div id="status">
|
<div id="status">
|
||||||
<div id="found">
|
<div id="found">
|
||||||
Found <b>{{len(res)}}</b> matching: <b><i>{{qs}}</i></b>
|
Found <b>{{nres}}</b> matching: <b><i>{{qs}}</i></b>
|
||||||
<small class="gray">({{time.seconds}}.{{time.microseconds/10000}}s)</small>
|
<small class="gray">({{time.seconds}}.{{time.microseconds/10000}}s)</small>
|
||||||
</div>
|
</div>
|
||||||
%if len(res) > 0:
|
%if len(res) > 0:
|
||||||
|
@ -13,12 +13,15 @@
|
||||||
<a href="../csv?{{query_string}}">CSV</a>
|
<a href="../csv?{{query_string}}">CSV</a>
|
||||||
</div>
|
</div>
|
||||||
%end
|
%end
|
||||||
|
<br style="clear: both">
|
||||||
</div>
|
</div>
|
||||||
|
%include pages query=query, config=config, nres=nres
|
||||||
<div id="results">
|
<div id="results">
|
||||||
%for i in range(0, len(res)):
|
%for i in range(0, len(res)):
|
||||||
%d = res[i]
|
%d = res[i]
|
||||||
<div class="search-result">
|
<div class="search-result">
|
||||||
<div class="search-result-number"><a href="#r{{d['sha']}}">#{{i+1}}</a></div>
|
%number = (query['page'] - 1)*config['perpage'] + i + 1
|
||||||
|
<div class="search-result-number"><a href="#r{{d['sha']}}">#{{number}}</a></div>
|
||||||
%url = d['url'].replace('file://', '')
|
%url = d['url'].replace('file://', '')
|
||||||
%for dr, prefix in config['mounts'].items():
|
%for dr, prefix in config['mounts'].items():
|
||||||
%url = url.replace(dr, prefix)
|
%url = url.replace(dr, prefix)
|
||||||
|
@ -38,7 +41,7 @@
|
||||||
<a href="{{url.replace('/'+d['filename'],'')}}">{{urllabel}}</a>
|
<a href="{{url.replace('/'+d['filename'],'')}}">{{urllabel}}</a>
|
||||||
</div>
|
</div>
|
||||||
<div class="search-result-date">{{d['time']}}</div>
|
<div class="search-result-date">{{d['time']}}</div>
|
||||||
%for q in shlex.split(query['keywords'].replace("'","\\'")):
|
%for q in shlex.split(query['query'].replace("'","\\'")):
|
||||||
%if not q == "OR":
|
%if not q == "OR":
|
||||||
% w = strip_accents(q.decode('utf-8').lower()).encode('utf-8')
|
% w = strip_accents(q.decode('utf-8').lower()).encode('utf-8')
|
||||||
% d['snippet'] = d['snippet'].replace(w,'<span class="search-result-highlight">'+w+'</span>')
|
% d['snippet'] = d['snippet'].replace(w,'<span class="search-result-highlight">'+w+'</span>')
|
||||||
|
@ -48,6 +51,7 @@
|
||||||
</div>
|
</div>
|
||||||
%end
|
%end
|
||||||
</div>
|
</div>
|
||||||
|
%include pages query=query, config=config, nres=nres
|
||||||
%include footer
|
%include footer
|
||||||
<!-- vim: fdm=marker:tw=80:ts=4:sw=4:sts=4:et:ai
|
<!-- vim: fdm=marker:tw=80:ts=4:sw=4:sts=4:et:ai
|
||||||
-->
|
-->
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
<tr>
|
<tr>
|
||||||
<td width="50%">
|
<td width="50%">
|
||||||
<b>Query</b>
|
<b>Query</b>
|
||||||
<input tabindex="0" type="search" name="query" value="{{query['keywords']}}" autofocus><br><br>
|
<input tabindex="0" type="search" name="query" value="{{query['query']}}" autofocus><br><br>
|
||||||
<input type="submit" value="Search">
|
<input type="submit" value="Search">
|
||||||
<a href=".." tabindex="-1"><input type="button" value="Reset"></a>
|
<a href=".." tabindex="-1"><input type="button" value="Reset"></a>
|
||||||
<a href="settings" tabindex="-1"><input type="button" value="Settings"></a>
|
<a href="settings" tabindex="-1"><input type="button" value="Settings"></a>
|
||||||
|
@ -48,8 +48,8 @@
|
||||||
</select>
|
</select>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
||||||
</table>
|
</table>
|
||||||
|
<input type="hidden" name="page" value="1" />
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
<!-- vim: fdm=marker:tw=80:ts=4:sw=4:sts=4:et:ai
|
<!-- vim: fdm=marker:tw=80:ts=4:sw=4:sts=4:et:ai
|
||||||
|
|
|
@ -6,6 +6,8 @@
|
||||||
<b>Max results</b> <small class="gray">(maximum number of results to show)</small>
|
<b>Max results</b> <small class="gray">(maximum number of results to show)</small>
|
||||||
<input name="maxresults" value={{maxresults}}>
|
<input name="maxresults" value={{maxresults}}>
|
||||||
<hr>
|
<hr>
|
||||||
|
<b>Results per page</b> <small class="gray">(0 for no pagination)</small>
|
||||||
|
<input name="perpage" value={{perpage}}>
|
||||||
<b>Context words</b> <small class="gray">(number of words shown in search results)</small>
|
<b>Context words</b> <small class="gray">(number of words shown in search results)</small>
|
||||||
<input name="context" value={{context}}>
|
<input name="context" value={{context}}>
|
||||||
<b>Context characters</b> <small class="gray">(max characters in a snippet)</small>
|
<b>Context characters</b> <small class="gray">(max characters in a snippet)</small>
|
||||||
|
|
37
webui.py
37
webui.py
|
@ -28,6 +28,7 @@ DEFAULTS = {
|
||||||
'dirdepth': 3,
|
'dirdepth': 3,
|
||||||
'maxchars': 500,
|
'maxchars': 500,
|
||||||
'maxresults': 100,
|
'maxresults': 100,
|
||||||
|
'perpage': 25,
|
||||||
}
|
}
|
||||||
|
|
||||||
# sort fields/labels
|
# sort fields/labels
|
||||||
|
@ -139,18 +140,19 @@ def get_dirs(tops, depth):
|
||||||
#{{{ get_query
|
#{{{ get_query
|
||||||
def get_query():
|
def get_query():
|
||||||
query = {
|
query = {
|
||||||
'keywords': select([bottle.request.query.get('query'), '']),
|
'query': select([bottle.request.query.get('query'), '']),
|
||||||
'before': select([bottle.request.query.get('before'), '']),
|
'before': select([bottle.request.query.get('before'), '']),
|
||||||
'after': select([bottle.request.query.get('after'), '']),
|
'after': select([bottle.request.query.get('after'), '']),
|
||||||
'dir': select([bottle.request.query.get('dir'), '', '<all>'], [None, '']),
|
'dir': select([bottle.request.query.get('dir'), '', '<all>'], [None, '']),
|
||||||
'sort': select([bottle.request.query.get('sort'), SORTS[0][0]]),
|
'sort': select([bottle.request.query.get('sort'), SORTS[0][0]]),
|
||||||
'ascending': int(select([bottle.request.query.get('ascending'), 0])),
|
'ascending': int(select([bottle.request.query.get('ascending'), 0])),
|
||||||
|
'page': int(select([bottle.request.query.get('page'), 1])),
|
||||||
}
|
}
|
||||||
return query
|
return query
|
||||||
#}}}
|
#}}}
|
||||||
#{{{ query_to_recoll_string
|
#{{{ query_to_recoll_string
|
||||||
def query_to_recoll_string(q):
|
def query_to_recoll_string(q):
|
||||||
qs = q['keywords'].decode('utf-8')
|
qs = q['query'].decode('utf-8')
|
||||||
if len(q['after']) > 0 or len(q['before']) > 0:
|
if len(q['after']) > 0 or len(q['before']) > 0:
|
||||||
qs += " date:%s/%s" % (q['after'], q['before'])
|
qs += " date:%s/%s" % (q['after'], q['before'])
|
||||||
if q['dir'] != '<all>':
|
if q['dir'] != '<all>':
|
||||||
|
@ -158,19 +160,28 @@ def query_to_recoll_string(q):
|
||||||
return qs
|
return qs
|
||||||
#}}}
|
#}}}
|
||||||
#{{{ recoll_search
|
#{{{ recoll_search
|
||||||
def recoll_search(q, sort, ascending):
|
def recoll_search(q):
|
||||||
config = get_config()
|
config = get_config()
|
||||||
tstart = datetime.datetime.now()
|
tstart = datetime.datetime.now()
|
||||||
results = []
|
results = []
|
||||||
db = recoll.connect()
|
db = recoll.connect()
|
||||||
db.setAbstractParams(config['maxchars'], config['context'])
|
db.setAbstractParams(config['maxchars'], config['context'])
|
||||||
query = db.query()
|
query = db.query()
|
||||||
query.sortby(sort, ascending)
|
query.sortby(q['sort'], q['ascending'])
|
||||||
try:
|
try:
|
||||||
nres = query.execute(q, config['stem'])
|
qs = query_to_recoll_string(q)
|
||||||
|
nres = query.execute(qs, config['stem'])
|
||||||
except:
|
except:
|
||||||
nres = 0
|
nres = 0
|
||||||
for i in range(0, min(nres, config['maxresults'])):
|
if config['maxresults'] == 0:
|
||||||
|
config['maxresults'] = nres
|
||||||
|
if nres > config['maxresults']:
|
||||||
|
nres = config['maxresults']
|
||||||
|
if config['perpage'] == 0:
|
||||||
|
config['perpage'] = nres
|
||||||
|
offset = (q['page'] - 1) * config['perpage']
|
||||||
|
query.next = offset
|
||||||
|
while query.next >= 0 and query.next < offset + config['perpage'] and query.next < nres:
|
||||||
doc = query.fetchone()
|
doc = query.fetchone()
|
||||||
d = {}
|
d = {}
|
||||||
for f in FIELDS:
|
for f in FIELDS:
|
||||||
|
@ -181,7 +192,7 @@ def recoll_search(q, sort, ascending):
|
||||||
d['snippet'] = db.makeDocAbstract(doc, query).encode('utf-8')
|
d['snippet'] = db.makeDocAbstract(doc, query).encode('utf-8')
|
||||||
results.append(d)
|
results.append(d)
|
||||||
tend = datetime.datetime.now()
|
tend = datetime.datetime.now()
|
||||||
return results, tend - tstart
|
return results, nres, tend - tstart
|
||||||
#}}}
|
#}}}
|
||||||
#}}}
|
#}}}
|
||||||
#{{{ routes
|
#{{{ routes
|
||||||
|
@ -205,10 +216,14 @@ def results():
|
||||||
config = get_config()
|
config = get_config()
|
||||||
query = get_query()
|
query = get_query()
|
||||||
qs = query_to_recoll_string(query)
|
qs = query_to_recoll_string(query)
|
||||||
res, timer = recoll_search(qs, query['sort'], query['ascending'])
|
res, nres, timer = recoll_search(query)
|
||||||
|
if config['maxresults'] == 0:
|
||||||
|
config['maxresults'] = nres
|
||||||
|
if config['perpage'] == 0:
|
||||||
|
config['perpage'] = nres
|
||||||
return { 'res': res, 'time': timer, 'query': query, 'dirs':
|
return { 'res': res, 'time': timer, 'query': query, 'dirs':
|
||||||
get_dirs(config['dirs'], config['dirdepth']),'qs': qs, 'sorts': SORTS, 'config': config,
|
get_dirs(config['dirs'], config['dirdepth']),'qs': qs, 'sorts': SORTS, 'config': config,
|
||||||
'query_string': bottle.request.query_string }
|
'query_string': bottle.request.query_string, 'nres': nres }
|
||||||
#}}}
|
#}}}
|
||||||
#{{{ json
|
#{{{ json
|
||||||
@bottle.route('/json')
|
@bottle.route('/json')
|
||||||
|
@ -217,7 +232,7 @@ def get_json():
|
||||||
qs = query_to_recoll_string(query)
|
qs = query_to_recoll_string(query)
|
||||||
bottle.response.headers['Content-Type'] = 'application/json'
|
bottle.response.headers['Content-Type'] = 'application/json'
|
||||||
bottle.response.headers['Content-Disposition'] = 'attachment; filename=recoll-%s.json' % normalise_filename(qs)
|
bottle.response.headers['Content-Disposition'] = 'attachment; filename=recoll-%s.json' % normalise_filename(qs)
|
||||||
res, timer = recoll_search(qs, query['sort'], query['ascending'])
|
res, nres, timer = recoll_search(query)
|
||||||
|
|
||||||
return json.dumps({ 'query': query, 'results': res })
|
return json.dumps({ 'query': query, 'results': res })
|
||||||
#}}}
|
#}}}
|
||||||
|
@ -228,7 +243,7 @@ def get_csv():
|
||||||
qs = query_to_recoll_string(query)
|
qs = query_to_recoll_string(query)
|
||||||
bottle.response.headers['Content-Type'] = 'text/csv'
|
bottle.response.headers['Content-Type'] = 'text/csv'
|
||||||
bottle.response.headers['Content-Disposition'] = 'attachment; filename=recoll-%s.csv' % normalise_filename(qs)
|
bottle.response.headers['Content-Disposition'] = 'attachment; filename=recoll-%s.csv' % normalise_filename(qs)
|
||||||
res, timer = recoll_search(qs, query['sort'], query['ascending'])
|
res, nres, timer = recoll_search(query)
|
||||||
si = StringIO.StringIO()
|
si = StringIO.StringIO()
|
||||||
cw = csv.writer(si)
|
cw = csv.writer(si)
|
||||||
cw.writerow(FIELDS)
|
cw.writerow(FIELDS)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue