Improved tag pages (added paginator) + Fixing query bugs. * Closes #20.
@@ -35,6 +35,7 @@ <script src="js/models.js"> </script>
<script src="js/components/navbar.js"> </script> <script src="js/components/uploader.js"> </script> <script src="js/components/editor.js"> </script> + <script src="js/components/doclist.js"> </script> <script src="js/modules/info.js"> </script> <script src="js/modules/tags.js"> </script> <script src="js/modules/htmldoc.js"> </script>
@@ -10,11 +10,13 @@ app.init = function(info){
app.system = info; m.route(document.body, "/info", { "/info": app.info, - "/tags/:id": app.tags, "/html/:id...": app.htmldoc, "/document/:action/:id...": app.document, "/guide/:id": app.guide, "/new": app.create, + "/tags/:id": app.tags, + "/tags/:id/:page": app.tags, + "/tags/:id/:page/:limit": app.tags, "/search/:q": app.search, "/search/:q/:page": app.search, "/search/:q/:page/:limit": app.search
@@ -17,15 +17,17 @@ Info.get = function(){
var content = m.prop(""); return m.request({ method: "GET", - url: "/v1/info" + url: "/info" }).then(content); }; - Doc.getByTag = function(tag) { + Doc.getByTag = function(tag, offset, limit) { + offset = offset || 0; + limit = limit || 10; var docs = m.prop(""); return m.request({ method: "GET", - url: "/v1/docs?contents=false&tags="+tag + url: "/docs?contents=false&tags="+tag+"&limit="+limit+"&offset="+offset }).then(docs); };@@ -35,7 +37,7 @@ limit = limit || 10;
var docs = m.prop(""); return m.request({ method: "GET", - url: "/v1/docs?contents=false&search="+search+"&limit="+limit+"&offset="+offset, + url: "/docs?contents=false&search="+search+"&limit="+limit+"&offset="+offset, }).then(docs); };@@ -43,14 +45,14 @@ Doc.get = function(id) {
var doc = m.prop(""); return m.request({ method: "GET", - url: "/v1/docs/"+id+"?raw=true" + url: "/docs/"+id+"?raw=true" }).then(doc); }; Doc.delete = function(id){ return m.request({ method: "DELETE", - url: "/v1/docs/"+id + url: "/docs/"+id }); };@@ -59,7 +61,7 @@ xhrcfg = u.setContentType(doc, contentType);
console.log("Doc.put - Saving Document:", doc); return m.request({ method: "PUT", - url: "/v1/docs/"+doc.id, + url: "/docs/"+doc.id, data: doc.data, serialize: function(data){return data;}, config: xhrcfg@@ -70,7 +72,7 @@ Doc.upload = function(doc) {
console.log("Doc.put - Uploading Document:", doc); return m.request({ method: "PUT", - url: "/v1/docs/"+doc.id, + url: "/docs/"+doc.id, data: doc.data, serialize: function(data) {return data} });@@ -102,7 +104,7 @@ }
console.log("Doc.patch - Saving Tags:", ops); return m.request({ method: "PATCH", - url: "/v1/docs/"+id, + url: "/docs/"+id, data: ops }); });
@@ -8,6 +8,7 @@ app.search = {vm: {}};
app.search.vm.init = function(){ var vm = this; vm.query = m.route.param("q"); + vm.baseurl = "/search/" + vm.query + "/"; vm.limit = m.route.param("limit") || 10; vm.page = m.route.param("page") || 1; vm.page -= 1; // pages are 0-based@@ -24,32 +25,15 @@ };
app.search.main = function(){ var vm = app.search.vm; var result = vm.result(); - var title = m("h2.col-md-12", ["You searched for: ", m("em", vm.query)]); - var total = m("p.col-md-12", [m("strong", result.total), " hits ("+vm.execTime+" ms)"]); - var resultPanel = function(res){ - var obj = {}; - var path = (res.id.match(/\.html?$/)) ? "/html/" : "/document/view/"; - obj.title = m("a", {href: path+res.id, config: m.route}, [res.id]); - obj.content = m("div", [ - m("p", [m.trust(res.highlight)]), - m("p", res.tags.map(function(tag){ - return u.taglink(tag); - })) - ] - ); - return m(".row.search-result", m(".col-md-12", [u.panel(obj)])); - }; - var results = m(".row", [m(".col-md-12", result.results.map(resultPanel))]); - - return m("section", [ - m(".row", title), - m(".row", total), - m(".row.text-center", [u.paginator(vm)]), - results, - m(".row.text-center", [u.paginator(vm)]) - ]); + var obj = {}; + obj.title = m("h2.col-md-12", ["You searched for: ", m("em", vm.query)]); + obj.subtitle = m("p.col-md-12", [m("strong", result.total), " results ("+vm.execTime+" ms)"]); + obj.items = result.results; + obj.items.forEach(function(item){ item.content = m.trust(item.highlight) }); + obj.querydata = vm; + return app.doclist.view(obj); }; u.layout(app.search); -}()) +}())
@@ -124,7 +124,7 @@ /**
* - total * - limit * - offset - * - query + * - baseurl */ u.paginator = function(obj) { var max_page = Math.min(14, Math.ceil(obj.total/obj.limit)-1);@@ -142,7 +142,7 @@ var offset = obj.limit * n;
sign = sign || n+1; return m("li", {class: klass}, [m("a", { - href: "/search/"+obj.query+"/"+(n+1), // assuming 10 elements per page //+"/"+obj.limit, + href: obj.baseurl +(n+1), // assuming 10 elements per page //+"/"+obj.limit, config: m.route }, [m.trust(sign)] )]
@@ -236,7 +236,7 @@ let jbody = body.parseJson
if jbody.kind != JArray: return resError(Http400, "Bad request: PATCH request body is not an array.") var options = newQueryOptions() - options.select = @["documents.id AS id", "content_type", "binary", "searchable", "created", "modified"] + options.select = @["documents.id AS id", "created", "modified"] let doc = LS.store.retrieveRawDocument(id, options) if doc == "": return resDocumentNotFound(id)@@ -297,7 +297,7 @@ discard # never happens really.
proc head(req: Request, LS: LiteStore, resource: string, id = ""): Response = var options = newQueryOptions() - options.select = @["documents.id AS id", "content_type", "binary", "searchable", "created", "modified"] + options.select = @["documents.id AS id", "created", "modified"] try: parseQueryOptions(req.url.query, options); if id != "":@@ -315,7 +315,7 @@ case resource:
of "docs": var options = newQueryOptions() if req.url.query.contains("contents=false"): - options.select = @["documents.id AS id", "content_type", "binary", "searchable", "created", "modified"] + options.select = @["documents.id AS id", "created", "modified"] try: parseQueryOptions(req.url.query, options); if id != "":
@@ -309,13 +309,15 @@ LOG.debug("Done")
except: eWarn() -proc vacuum*(store: Datastore) = +proc vacuum*(file: string) = + let data = db.open(file, "", "", "") try: - db.close(store.db) - let data = db.open(store.path, "", "", "") data.exec(SQL_VACUUM) + db.close(data) except: eWarn() + quit(203) + quit(0) proc importDir*(store: Datastore, dir: string) = # TODO: Only allow directory names (not paths)?
@@ -66,7 +66,8 @@ if options.search.len > 0:
result = result & "AND searchdata MATCH '" & options.search.replace("'", "''") & "' " if options.orderby.len > 0 and options.select[0] != "COUNT(docid)": result = result & "ORDER BY " & options.orderby & " " - if options.limit > 0: + if options.limit > 0 and options.search.len == 0: + # If searching, do not add limit to the outer select, it's already in the nested select (ranktable) result = result & "LIMIT " & $options.limit & " " if options.offset > 0: result = result & "OFFSET " & $options.offset & " "@@ -97,9 +98,15 @@ var count = 0
for s in cols: var key = s if s.contains(" "): + # documents.id AS id... let chunks = s.split(" ") key = chunks[chunks.len-1] - res.add((key, %doc[count])) + var value:JsonNode + if doc[count] == "": + value = newJNull() + else: + value = %doc[count] + res.add((key, value)) count.inc res.add(("tags", %tags)) return %res
@@ -15,7 +15,6 @@ lib/logger,
lib/utils, lib/core, lib/cli, - lib/queries, lib/server from asyncdispatch import runForever@@ -36,14 +35,7 @@ fail(200, "Unable to create datastore '$1'" % [LS.file])
# Manage vacuum operation separately if LS.operation == opVacuum: - let data = db.open(LS.file, "", "", "") - try: - data.exec(SQL_VACUUM) - db.close(data) - except: - eWarn() - quit(203) - quit(0) + vacuum LS.file # Open Datastore and execute operation try: