all repos — litestore @ a1fb4ca0dce34f19a3c754442aeec8defc5b3364

A minimalist nosql document store.

Implemented support for index  creation/deletion (no retrieval for now)
h3rald h3rald@h3rald.com
Sun, 12 Jan 2020 19:46:53 +0100
commit

a1fb4ca0dce34f19a3c754442aeec8defc5b3364

parent

6063d66f8eaa094c31caa313990f088de4fc4ea3

A .vscode/settings.json

@@ -0,0 +1,10 @@

+{ + "sqltools.connections": [ + { + "database": "${workspaceFolder}/data.db", + "dialect": "SQLite", + "name": "data" + } + ], + "sqltools.useNodeRuntime": true +}
A data Session.sql

@@ -0,0 +1,1 @@

+CREATE INDEX json_document_field_test ON documents(json_extract(documents.data, '$.id') COLLATE NOCASE);
M src/litestorepkg/lib/api_v5.nimsrc/litestorepkg/lib/api_v5.nim

@@ -473,6 +473,29 @@ setOrigin(LS, req, result.headers)

result.content = content.pretty result.code = Http200 +proc putIndex*(LS: LiteStore, id, field: string, req: LSRequest): LSResponse = + try: + LS.store.createIndex(id, field) + result.headers = ctJsonHeader() + setOrigin(LS, req, result.headers) + result.content = "{\"index\": \"$1\"}" % id + result.code = Http200 + except: + eWarn() + result = resError(Http500, "Unable to create index.") + +proc deleteIndex*(LS: LiteStore, id: string, req: LSRequest): LSResponse = + try: + LS.store.dropIndex(id) + result.headers = newHttpHeaders(TAB_HEADERS) + setOrigin(LS, req, result.headers) + result.headers["Content-Length"] = "0" + result.content = "" + result.code = Http204 + except: + eWarn() + result = resError(Http500, "Unable to delete index.") + proc postDocument*(LS: LiteStore, body: string, ct: string, folder="", req: LSRequest): LSResponse = if not folder.isFolder: return resError(Http400, "Invalid folder specified when creating document: $1" % folder)

@@ -712,16 +735,26 @@ return LS.postDocument(req.body.strip, ct, id, req)

proc put*(req: LSRequest, LS: LiteStore, resource: string, id = ""): LSResponse = if id != "": - var ct = "text/plain" - if req.headers.hasKey("Content-Type"): - ct = req.headers["Content-Type"] - return LS.putDocument(id, req.body.strip, ct, req) + if resource == "indexes": + try: + let field = parseJson(req.body.strip)["field"].getStr + return LS.putIndex(id, field, req) + except: + return resError(Http400, "Bad Request - Invalid JSON body - $1" % getCurrentExceptionMsg()) + else: # Assume docs + var ct = "text/plain" + if req.headers.hasKey("Content-Type"): + ct = req.headers["Content-Type"] + return LS.putDocument(id, req.body.strip, ct, req) else: return resError(Http400, "Bad request: document ID must be specified in PUT requests.") proc delete*(req: LSRequest, LS: LiteStore, resource: string, id = ""): LSResponse = if id != "": - return LS.deleteDocument(id, req) + if resource == "indexes": + return LS.deleteIndex(id, req) + else: # Assume docs + return LS.deleteDocument(id, req) else: return resError(Http400, "Bad request: document ID must be specified in DELETE requests.")
M src/litestorepkg/lib/core.nimsrc/litestorepkg/lib/core.nim

@@ -33,7 +33,7 @@ db.exec SQL_DROP_INDEX_DOCUMENTS_ID

db.exec SQL_DROP_INDEX_TAGS_TAG_ID db.exec SQL_DROP_INDEX_TAGS_DOCUMENT_ID -proc createDatastore*(file:string) = +proc createDatastore*(file: string) = if file.fileExists(): raise newException(EDatastoreExists, "Datastore '$1' already exists." % file) LOG.debug("Creating datastore '$1'", file)

@@ -52,24 +52,28 @@ proc closeDatastore*(store: Datastore) =

try: db.close(store.db) except: - raise newException(EDatastoreUnavailable, "Datastore '$1' cannot be closed." % store.path) + raise newException(EDatastoreUnavailable, + "Datastore '$1' cannot be closed." % store.path) -proc destroyDatastore*(store:Datastore) = +proc destroyDatastore*(store: Datastore) = try: if store.path.fileExists(): store.closeDataStore() store.path.removeFile() except: - raise newException(EDatastoreUnavailable, "Datastore '$1' cannot destroyed." % store.path) + raise newException(EDatastoreUnavailable, + "Datastore '$1' cannot destroyed." % store.path) -proc openDatastore*(file:string): Datastore = +proc openDatastore*(file: string): Datastore = if not file.fileExists: - raise newException(EDatastoreDoesNotExist, "Datastore '$1' does not exists." % file) + raise newException(EDatastoreDoesNotExist, + "Datastore '$1' does not exists." % file) try: result.db = db.open(file, "", "", "") # Register custom function & PRAGMAs LOG.debug("Registering custom functions...") - discard create_function(cast[PSqlite3](result.db), "rank", -1, SQLITE_ANY, cast[pointer](SQLITE_DETERMINISTIC), okapi_bm25f_kb, nil, nil) + discard create_function(cast[PSqlite3](result.db), "rank", -1, SQLITE_ANY, + cast[pointer](SQLITE_DETERMINISTIC), okapi_bm25f_kb, nil, nil) LOG.debug("Executing PRAGMAs...") discard result.db.tryExec("PRAGMA locking_mode = exclusive".sql) discard result.db.tryExec("PRAGMA page_size = 4096".sql)

@@ -79,7 +83,8 @@ LOG.debug("Done.")

result.path = file result.mount = "" except: - raise newException(EDatastoreUnavailable, "Datastore '$1' cannot be opened." % file) + raise newException(EDatastoreUnavailable, + "Datastore '$1' cannot be opened." % file) proc retrieveInfo*(store: Datastore): array[0..1, int] = var data = store.db.getRow(SQL_SELECT_INFO)

@@ -108,9 +113,23 @@ LS_TRANSACTION = false

store.db.exec("ROLLBACK".sql) LOG.debug("Rolled back.") +# Manage Indexes + +proc createIndex*(store: Datastore, indexId, field: string) = + let query = sql("CREATE INDEX json_index_$1 ON documents(json_extract(data, ?) COLLATE NOCASE)" % [indexId]) + store.begin() + store.db.exec(query, field) + store.commit() + +proc dropIndex*(store: Datastore, indexId: string) = + let query = sql("DROP INDEX json_index_" & indexId); + store.begin() + store.db.exec(query) + store.commit() + # Manage Tags -proc createTag*(store: Datastore, tagid, documentid: string, system=false) = +proc createTag*(store: Datastore, tagid, documentid: string, system = false) = if tagid.match(PEG_USER_TAG) or system and tagid.match(PEG_TAG): store.begin() store.db.exec(SQL_INSERT_TAG, tagid, documentid)

@@ -119,7 +138,8 @@ else:

store.rollback() raise newException(EInvalidTag, "Invalid Tag: $1" % tagid) -proc destroyTag*(store: Datastore, tagid, documentid: string, system=false): int64 = +proc destroyTag*(store: Datastore, tagid, documentid: string, + system = false): int64 = if tagid.match(PEG_USER_TAG) or system and tagid.match(PEG_TAG): store.begin() result = store.db.execAffectedRows(SQL_DELETE_TAG, tagid, documentid)

@@ -128,7 +148,8 @@ else:

store.rollback() raise newException(EInvalidTag, "Invalid Tag: $1" % tagid) -proc retrieveTag*(store: Datastore, id: string, options: QueryOptions = newQueryOptions()): JsonNode = +proc retrieveTag*(store: Datastore, id: string, + options: QueryOptions = newQueryOptions()): JsonNode = var options = options options.single = true var query = prepareSelectTagsQuery(options)

@@ -174,17 +195,19 @@ return %tag_array

# Manage Documents -proc retrieveRawDocument*(store: Datastore, id: string, options: QueryOptions = newQueryOptions()): string = +proc retrieveRawDocument*(store: Datastore, id: string, + options: QueryOptions = newQueryOptions()): string = var options = options options.single = true var select = prepareSelectDocumentsQuery(options) var raw_document = store.db.getRow(select.sql, id) - if raw_document[0] == "": + if raw_document[0] == "": return "" else: return $store.prepareJsonDocument(raw_document, options) -proc createDocument*(store: Datastore, id="", rawdata = "", contenttype = "text/plain", binary = -1, searchable = 1): string = +proc createDocument*(store: Datastore, id = "", rawdata = "", + contenttype = "text/plain", binary = -1, searchable = 1): string = let singleOp = not LS_TRANSACTION var id = id var contenttype = contenttype.replace(peg"""\;(.+)$""", "") # Strip charset for now

@@ -198,7 +221,8 @@ # Validate JSON data

try: discard data.parseJson except: - raise newException(JsonParsingError, "Invalid JSON content - " & getCurrentExceptionMsg()) + raise newException(JsonParsingError, "Invalid JSON content - " & + getCurrentExceptionMsg()) if id == "": id = $genOid() elif id.isFolder:

@@ -207,7 +231,8 @@ # Store document

try: LOG.debug("Creating document '$1'" % id) store.begin() - var res = store.db.insertID(SQL_INSERT_DOCUMENT, id, data, contenttype, binary, searchable, currentTime()) + var res = store.db.insertID(SQL_INSERT_DOCUMENT, id, data, contenttype, + binary, searchable, currentTime()) if res > 0: store.db.exec(SQL_INCREMENT_DOCS) if binary <= 0 and searchable >= 0:

@@ -231,7 +256,8 @@ store.rollback()

eWarn() raise -proc updateDocument*(store: Datastore, id: string, rawdata: string, contenttype = "text/plain", binary = -1, searchable = 1): string = +proc updateDocument*(store: Datastore, id: string, rawdata: string, + contenttype = "text/plain", binary = -1, searchable = 1): string = let singleOp = not LS_TRANSACTION var contenttype = contenttype.replace(peg"""\;(.+)$""", "") # Strip charset for now var binary = checkIfBinary(binary, contenttype)

@@ -241,14 +267,16 @@ # Validate JSON data

try: discard data.parseJson except: - raise newException(JsonParsingError, "Invalid JSON content - " & getCurrentExceptionMsg()) + raise newException(JsonParsingError, "Invalid JSON content - " & + getCurrentExceptionMsg()) var searchable = searchable if binary == 1: searchable = 0 try: LOG.debug("Updating document '$1'" % id) store.begin() - var res = store.db.execAffectedRows(SQL_UPDATE_DOCUMENT, data, contenttype, binary, searchable, currentTime(), id) + var res = store.db.execAffectedRows(SQL_UPDATE_DOCUMENT, data, contenttype, + binary, searchable, currentTime(), id) if res > 0: if binary <= 0 and searchable >= 0: store.db.exec(SQL_UPDATE_SEARCHCONTENT, data.toPlainText, id)

@@ -294,7 +322,9 @@ except:

eWarn() store.rollback() -proc retrieveDocument*(store: Datastore, id: string, options: QueryOptions = newQueryOptions()): tuple[data: string, contenttype: string] = +proc retrieveDocument*(store: Datastore, id: string, + options: QueryOptions = newQueryOptions()): tuple[data: string, + contenttype: string] = var options = options options.single = true var select = prepareSelectDocumentsQuery(options)

@@ -310,11 +340,13 @@ return (data: raw_document[1].decode, contenttype: raw_document[2])

else: return (data: raw_document[1], contenttype: raw_document[2]) -proc retrieveRawDocuments*(store: Datastore, options: var QueryOptions = newQueryOptions()): JsonNode = +proc retrieveRawDocuments*(store: Datastore, + options: var QueryOptions = newQueryOptions()): JsonNode = var select = prepareSelectDocumentsQuery(options) var raw_documents: seq[Row] if options.folder != "": - raw_documents = store.db.getAllRows(select.sql, options.folder, options.folder & "{") + raw_documents = store.db.getAllRows(select.sql, options.folder, + options.folder & "{") else: raw_documents = store.db.getAllRows(select.sql) var documents = newSeq[JsonNode](0)

@@ -415,7 +447,7 @@ store.db.createIndexes()

store.commit() LOG.info("Imported $1/$2 files", cFiles, files.len) -proc exportDir*(store: Datastore, dir: string) = +proc exportDir*(store: Datastore, dir: string) = let docs = store.db.getAllRows(SQL_SELECT_DOCUMENTS_BY_TAG, "$dir:"&dir) LOG.info("Exporting $1 files...", docs.len) for doc in docs:

@@ -430,14 +462,14 @@ file.parentDir.createDir

file.writeFile(data) LOG.info("Done."); -proc deleteDir*(store: Datastore, dir: string) = - store.db.exec(SQL_DELETE_SEARCHDATA_BY_TAG, "$dir:"&dir) - store.db.exec(SQL_DELETE_DOCUMENTS_BY_TAG, "$dir:"&dir) - store.db.exec(SQL_DELETE_TAGS_BY_TAG, "$dir:"&dir) - let total = store.db.getRow(SQL_COUNT_DOCUMENTS)[0].parseInt - store.db.exec(SQL_SET_TOTAL_DOCS, total) +proc deleteDir*(store: Datastore, dir: string) = + store.db.exec(SQL_DELETE_SEARCHDATA_BY_TAG, "$dir:"&dir) + store.db.exec(SQL_DELETE_DOCUMENTS_BY_TAG, "$dir:"&dir) + store.db.exec(SQL_DELETE_TAGS_BY_TAG, "$dir:"&dir) + let total = store.db.getRow(SQL_COUNT_DOCUMENTS)[0].parseInt + store.db.exec(SQL_SET_TOTAL_DOCS, total) -proc mountDir*(store: var Datastore, dir:string) = +proc mountDir*(store: var Datastore, dir: string) = if not dir.dirExists: raise newException(EDirectoryNotFound, "Directory '$1' not found." % dir) store.mount = dir
M src/litestorepkg/lib/queries.nimsrc/litestorepkg/lib/queries.nim

@@ -29,6 +29,8 @@ SQL_REINDEX* = sql"REINDEX"

SQL_OPTIMIZE* = sql"INSERT INTO searchdata(searchdata) VALUES('optimize')" SQL_REBUILD* = sql"INSERT INTO searchdata(searchdata) VALUES('rebuild')" + SQL_LIST_DOCUMENTS_INDEXES* = sql"select name, sql from sqlite_master where type = 'index' and tbl_name = 'documents' and name LIKE 'json_index_%'" + SQL_VACUUM* = sql"VACUUM" const SQL_CREATE_SEARCHDATA_TABLE* = sql"""
M src/litestorepkg/lib/server.nimsrc/litestorepkg/lib/server.nim

@@ -112,7 +112,7 @@ if access.hasKey(uri):

auth(uri) break if info.version == "v5": - if info.resource.match(peg"^docs / info / tags$"): + if info.resource.match(peg"^docs / info / tags / indexes$"): return api_v5.route(req, LS, info.resource, info.id) elif info.resource.match(peg"^dir$"): if LS.directory.len > 0:
M src/litestorepkg/lib/types.nimsrc/litestorepkg/lib/types.nim

@@ -97,7 +97,7 @@ PEG_URL* {.threadvar.}: Peg

PEG_TAG = peg"""^\$? [a-zA-Z0-9_\-?~:.@#^!+]+$""" PEG_USER_TAG = peg"""^[a-zA-Z0-9_\-?~:.@#^!+]+$""" -PEG_DEFAULT_URL = peg"""^\/{(docs / info / dir / tags)} (\/ {(.+)} / \/?)$""" +PEG_DEFAULT_URL = peg"""^\/{(docs / info / dir / tags / indexes)} (\/ {(.+)} / \/?)$""" PEG_URL = peg"""^\/({(v\d+)} \/) {([^\/]+)} (\/ {(.+)} / \/?)$""" # Initialize LiteStore