Implemented simple thread-safe logger. * Closes #21.
@@ -2,9 +2,9 @@ import
parseopt2, parsecfg, streams, - #logging, strutils import + logger, types, utils@@ -72,7 +72,7 @@ Options:
-a, --address Specify server address (default: 127.0.0.1). -d, --directory Specify a directory to import, export, delete, or mount. -h, --help Display this message. - -l, --log Specify the log level: debug, info, warn, error, fatal, none (default: info) + -l, --log Specify the log level: debug, info, warn, error, none (default: info) -m, --mount Mirror database changes to the specified directory on the filesystem. -p, --port Specify server port number (default: 9500). -r, --readonly Allow only data retrieval operations.@@ -113,12 +113,19 @@ file = val
of "log", "l": if val == "": fail(102, "Log level not specified.") - try: - discard - #logLevelLabel = val.toUpper - #logLevel = logging.LevelNames.find(logLevelLabel).Level - except: - fail(103, "Invalid log level '$1'" % val) + case val: + of "info": + LOG.level = lvInfo + of "warn": + LOG.level = lvWarn + of "debug": + LOG.level = lvDebug + of "error": + LOG.level = lvError + of "none": + LOG.level = lvNone + else: + fail(103, "Invalid log level '$1'" % val) of "directory", "d": if val == "": fail(104, "Directory not specified.")@@ -143,6 +150,7 @@
if directory == nil and (operation in [opDelete, opImport, opExport] or mount): fail(105, "Directory option not specified.") +# Initialize LiteStore var LS* {.threadvar.}: LiteStore LS.port = port@@ -154,12 +162,5 @@ LS.appversion = version
LS.readonly = readonly LS.appname = appname LS.favicon = favicon -LS.loglevel = logLevelLabel LS.mount = mount LS.reset = reset - -# Initialize loggers - -#logging.level = logLevel -#logging.handlers.add(newConsoleLogger(logLevel, "$date $time - ")) -#logging.handlers.add(newFileLogger("litestore.log.txt", fmAppend, logLevel, fmtStr = "$date $time - "))
@@ -15,6 +15,7 @@ import
types, contenttypes, queries, + logger, utils # Manage Datastores@@ -36,15 +37,15 @@
proc createDatastore*(file:string) = if file.fileExists(): raise newException(EDatastoreExists, "Datastore '$1' already exists." % file) - debug("Creating datastore '$1'", file) + LOG.debug("Creating datastore '$1'", file) let data = db.open(file, "", "", "") - debug("Creating tables") + LOG.debug("Creating tables") data.exec(SQL_CREATE_DOCUMENTS_TABLE) data.exec(SQL_CREATE_SEARCHDATA_TABLE) data.exec(SQL_CREATE_TAGS_TABLE) - debug("Creating indexes") + LOG.debug("Creating indexes") data.createIndexes() - debug("Database created") + LOG.debug("Database created") proc closeDatastore*(store:Datastore) = try:@@ -66,13 +67,13 @@ raise newException(EDatastoreDoesNotExist, "Datastore '$1' does not exists." % file)
try: result.db = db.open(file, "", "", "") # Register custom function & PRAGMAs - debug("Registering custom functions...") + LOG.debug("Registering custom functions...") discard result.db.create_function("rank", -1, SQLITE_ANY, cast[pointer](SQLITE_DETERMINISTIC), okapi_bm25f_kb, nil, nil) - debug("Executing PRAGMAs...") + LOG.debug("Executing PRAGMAs...") discard result.db.tryExec("PRAGMA locking_mode = exclusive".sql) discard result.db.tryExec("PRAGMA page_size = 4096".sql) discard result.db.tryExec("PRAGMA cache_size = 10000".sql) - debug("Done.") + LOG.debug("Done.") result.path = file result.mount = "" except:@@ -83,19 +84,19 @@ return store.mount.len > 0
proc begin(store: Datastore) = if not LS_TRANSACTION: - debug("Beginning transaction") + LOG.debug("Beginning transaction") LS_TRANSACTION = true store.db.exec("BEGIN".sql) proc commit(store: Datastore) = if LS_TRANSACTION: - debug("Committing transaction") + LOG.debug("Committing transaction") LS_TRANSACTION = false store.db.exec("COMMIT".sql) proc rollback(store: Datastore) = if LS_TRANSACTION: - debug("Rolling back transaction") + LOG.debug("Rolling back transaction") LS_TRANSACTION = false store.db.exec("ROLLBACK".sql)@@ -297,14 +298,14 @@
proc optimize*(store: Datastore) = try: store.begin() - debug("Reindexing columns...") + LOG.debug("Reindexing columns...") store.db.exec(SQL_REINDEX) - debug("Rebuilding full-text index...") + LOG.debug("Rebuilding full-text index...") store.db.exec(SQL_REBUILD) - debug("Optimixing full-text index...") + LOG.debug("Optimixing full-text index...") store.db.exec(SQL_OPTIMIZE) store.commit() - debug("Done") + LOG.debug("Done") except: eWarn()@@ -334,8 +335,8 @@ let nBatches = ceil(files.len/batchSize).toInt
var cFiles = 0 var cBatches = 0 store.begin() - info("Importing $1 files in $2 batches", files.len, nBatches) - debug("Dropping column indexes...") + LOG.info("Importing $1 files in $2 batches", files.len, nBatches) + LOG.debug("Dropping column indexes...") store.db.dropIndexes() for f in files: try:@@ -344,16 +345,16 @@ cFiles.inc
if (cFiles-1) mod batchSize == 0: cBatches.inc store.commit() - info("Importing batch $1/$2...", cBatches, nBatches) + LOG.info("Importing batch $1/$2...", cBatches, nBatches) store.begin() except: - warn("Unable to import file: $1", f) + LOG.warn("Unable to import file: $1", f) eWarn() store.rollback() - debug("Recreating column indexes...") + LOG.debug("Recreating column indexes...") store.db.createIndexes() store.commit() - info("Imported $1/$2 files", cFiles, files.len) + LOG.info("Imported $1/$2 files", cFiles, files.len) proc exportDir*(store: Datastore, dir: string) = let docs = store.db.getAllRows(SQL_SELECT_DOCUMENTS_BY_TAG, "$dir:"&dir)
@@ -0,0 +1,39 @@
+import + strutils, + times + +import + types + +proc currentTime*(plain = false): string = + if plain: + return getTime().getGMTime().format("yyyy-MM-dd' @ 'hh:mm:ss") + else: + return getTime().getGMTime().format("yyyy-MM-dd'T'hh:mm:ss'Z'") + +proc msg(logger: Logger, kind, message: string, params: varargs[string, `$`]) = + let s = format(message, params) + if kind == "WARNING": + stderr.writeln(currentTime(true) & " " & kind & ": " & s) + else: + echo currentTime(true), " ", kind, ": ", s + +proc error*(logger: Logger, message: string, params: varargs[string, `$`]) = + if logger.level <= lvError: + logger.msg(" ERROR", message, params) + +proc warn*(logger: Logger, message: string, params: varargs[string, `$`]) = + if logger.level <= lvWarn: + logger.msg("WARNING", message, params) + +proc info*(logger: Logger, message: string, params: varargs[string, `$`]) = + if logger.level <= lvInfo: + logger.msg(" INFO", message, params) + +proc debug*(logger: Logger, message: string, params: varargs[string, `$`]) = + if logger.level <= lvDebug: + logger.msg(" DEBUG", message, params) + +var LOG* {.threadvar.}: Logger + +LOG.level = lvInfo
@@ -5,7 +5,7 @@ times,
strutils, pegs, strtabs, - #logging, + logger, cgi import types,@@ -22,7 +22,7 @@ return req.hostname & " " & req.reqMethod & " " & url
proc handleCtrlC() {.noconv.} = echo "" - info("Exiting...") + LOG.info("Exiting...") quit() proc processApiUrl(req: Request, LS: LiteStore, info: ResourceInfo): Response =@@ -79,11 +79,11 @@
proc serve*(LS: LiteStore) = var server = newAsyncHttpServer() proc handleHttpRequest(req: Request): Future[void] {.async, gcsafe, closure.} = - info(getReqInfo(req).replace("$", "$$")) + LOG.info(getReqInfo(req).replace("$", "$$")) let res = req.process(LS) await req.respond(res.code, res.content, res.headers) - info(LS.appname & " v" & LS.appversion & " started on " & LS.address & ":" & $LS.port & ".") + LOG.info(LS.appname & " v" & LS.appversion & " started on " & LS.address & ":" & $LS.port & ".") if LS.mount: - info("Mirroring datastore changes to: " & LS.directory) + LOG.info("Mirroring datastore changes to: " & LS.directory) asyncCheck server.serve(LS.port.Port, handleHttpRequest, LS.address)
@@ -34,6 +34,14 @@ opExport,
opDelete, opVacuum, opOptimize + LogLevel* = enum + lvDebug + lvInfo + lvWarn + lvError + lvNone + Logger* = object + level*: LogLevel LiteStore* = object store*: Datastore address*: string
@@ -1,5 +1,18 @@
-import json, db_sqlite, strutils, pegs, asyncdispatch, asynchttpserver2, times, math, sqlite3, strutils -import types, queries, contenttypes +import + json, + db_sqlite, + strutils, + pegs, + asyncdispatch, + asynchttpserver2, + math, + sqlite3 + +import + types, + queries, + contenttypes, + logger proc dbQuote*(s: string): string = result = "'"@@ -8,25 +21,6 @@ if c == '\'': add(result, "''")
else: add(result, c) add(result, '\'') -proc currentTime*(plain = false): string = - if plain: - return getTime().getGMTime().format("yyyy-MM-dd' @ 'hh:mm:ss") - else: - return getTime().getGMTime().format("yyyy-MM-dd'T'hh:mm:ss'Z'") - -proc msg(kind, message: string, params: varargs[string, `$`]) = - let s = format(message, params) - echo currentTime(true), " ", kind, ": ", s - -proc info*(message: string, params: varargs[string, `$`]) = - msg(" INFO", message, params) - -proc warn*(message: string, params: varargs[string, `$`]) = - msg("WARNING", message, params) - -proc debug*(message: string, params: varargs[string, `$`]) = - msg(" DEBUG", message, params) - proc selectDocumentsByTags(tags: string): string = var select_tagged = "SELECT document_id FROM tags WHERE tag_id = '" result = ""@@ -76,7 +70,7 @@ if options.limit > 0:
result = result & "LIMIT " & $options.limit & " " if options.offset > 0: result = result & "OFFSET " & $options.offset & " " - debug(result.replace("$", "$$")) + LOG.debug(result.replace("$", "$$")) proc prepareSelectTagsQuery*(options: QueryOptions): string = result = "SELECT tag_id, COUNT(document_id) "@@ -88,7 +82,7 @@ if options.orderby.len > 0:
result = result & "ORDER BY " & options.orderby&" " if options.limit > 0: result = result & "LIMIT " & $options.limit & " " - debug(result.replace("$", "$$")) + LOG.debug(result.replace("$", "$$")) proc prepareJsonDocument*(store:Datastore, doc: TRow, cols:seq[string]): JsonNode = var raw_tags = store.db.getAllRows(SQL_SELECT_DOCUMENT_TAGS, doc[0])@@ -138,13 +132,13 @@ proc destroyDocumentSystemTags*(store: Datastore, docid) =
store.db.exec(SQL_DELETE_DOCUMENT_SYSTEM_TAGS, docid) proc fail*(code, msg) = - stderr.writeln(msg) + LOG.error(msg) quit(code) proc resError*(code: HttpCode, message: string, trace = ""): Response = - warn(message.replace("$", "$$")) + LOG.warn(message.replace("$", "$$")) if trace.len > 0: - debug(trace.replace("$", "$$")) + LOG.debug(trace.replace("$", "$$")) result.code = code result.content = """{"error":"$1"}""" % message result.headers = ctJsonHeader()@@ -154,8 +148,8 @@ resError(Http404, "Document '$1' not found." % id)
proc eWarn*() = var e = getCurrentException() - warn(e.msg) - debug(getStackTrace(e)) + LOG.warn(e.msg) + LOG.debug(getStackTrace(e)) # Created by Joshua Wilson on 27/05/14. # Copyright (c) 2014 Joshua Wilson. All rights reserved.
@@ -11,6 +11,7 @@ strtabs,
base64 import lib/types, + lib/logger, lib/utils, lib/core, lib/cli,@@ -23,13 +24,16 @@ {.compile: "vendor/sqlite/libsqlite3.c".}
{.passC: "-DSQLITE_ENABLE_FTS4=1 -DSQLITE_ENABLE_LOCKING_STYLE=1".} when isMainModule: + # Initialize Datastore if not LS.file.fileExists: try: - info("Creating datastore: ", LS.file) + LOG.debug("Creating datastore: ", LS.file) LS.file.createDatastore() except: + eWarn() fail(200, "Unable to create datastore '$1'" % [LS.file]) + # Manage vacuum operation separately if LS.operation == opVacuum: let data = db.open(LS.file, "", "", "")@@ -40,6 +44,7 @@ except:
eWarn() quit(203) quit(0) + # Open Datastore and execute operation try: LS.store = LS.file.openDatastore()@@ -47,7 +52,7 @@ if LS.mount:
try: LS.store.mountDir(LS.directory, LS.reset) except: - echo(getCurrentExceptionMsg()) + eWarn() fail(202, "Unable to mount directory '$1'" % [LS.directory]) except: fail(201, "Unable to open datastore '$1'" % [LS.file])
@@ -1,11 +1,16 @@
-#define:release +define:release dynlibOverride:sqlite3 threads:on -# http://crossgcc.rts-software.org/doku.php?id=compiling_for_win32 -i386.windows.gcc.path = "/usr/local/gcc-4.8.0-qt-4.8.4-for-mingw32/win32-gcc/bin" -i386.windows.gcc.exe = "i586-mingw32-gcc" -i386.windows.gcc.linkerexe = "i586-mingw32-gcc" +# https://gist.github.com/Drakulix/9881160 +amd64.windows.gcc.path = "/usr/local/mingw/bin" +amd64.windows.gcc.exe = "x86_64-w64-mingw32-gcc" +amd64.windows.gcc.linkerexe = "x86_64-w64-mingw32-gcc" + +# https://gist.github.com/Drakulix/9881160 +i386.windows.gcc.path = "/usr/local/mingw/bin" +i386.windows.gcc.exe = "i686-w64-mingw32-gcc" +i386.windows.gcc.linkerexe = "i686-w64-mingw32-gcc" # http://crossgcc.rts-software.org/doku.php?id=compiling_for_linux i386.linux.gcc.path = "/usr/local/gcc-4.8.1-for-linux32/bin"
@@ -13,4 +13,4 @@ address = "127.0.0.1"
port = 9500 [Deps] -requires: "nimrod >= 0.10.2" +requires: "nimrod >= 0.11.0"