wip: rejig colors, logging
This commit is contained in:
parent
50fac496ea
commit
806279edaa
4 changed files with 54 additions and 20 deletions
|
@ -1,4 +1,7 @@
|
|||
def highlight(text):
|
||||
GREEN = "\033[92m"
|
||||
ENDC = "\033[0m"
|
||||
return f"{GREEN}{text}{ENDC}"
|
||||
COLORS = {"green": "\033[92m", "yellow": "\033[93m", "red": "\033[91m"}
|
||||
ENDC = "\033[0m"
|
||||
|
||||
|
||||
def highlight(text, color="green"):
|
||||
color_code = COLORS[color]
|
||||
return f"{color_code}{text}{ENDC}"
|
||||
|
|
|
@ -52,20 +52,23 @@ class Indexer(IndexerBase):
|
|||
for path in paths:
|
||||
discovered.extend(self._discover(path))
|
||||
|
||||
logger.info(f"[Discovery] Discovered {len(discovered)} files.")
|
||||
logger.info(f"Discovered {len(discovered)} files.", prefix="Discovery")
|
||||
|
||||
self._preload(discovered)
|
||||
self._populate_indices(self.corpus.collect_unprocessed_documents())
|
||||
end_time = perf_counter()
|
||||
|
||||
logger.info(
|
||||
f"[Index status] {self.corpus.document_count} total files indexed in {end_time - start_time} seconds."
|
||||
f"{self.corpus.document_count} total files indexed in {end_time - start_time} seconds.",
|
||||
prefix="Index status",
|
||||
)
|
||||
|
||||
def query(self, query: str):
|
||||
start_time = perf_counter()
|
||||
leads = self._trigram_index.query(query)
|
||||
logger.info(f"Narrowed down to {len(leads)} files via trigram search")
|
||||
logger.info(
|
||||
f"Narrowed down to {len(leads)} files via trigram search", prefix="Query"
|
||||
)
|
||||
confirmed = []
|
||||
uniques = 0
|
||||
for lead in leads:
|
||||
|
@ -96,7 +99,8 @@ class Indexer(IndexerBase):
|
|||
uniques += 1
|
||||
end_time = perf_counter()
|
||||
logger.info(
|
||||
f"{len(confirmed)} hits in {uniques} files ({end_time - start_time} seconds elapsed)."
|
||||
f"{len(confirmed)} hits in {uniques} files ({end_time - start_time} seconds elapsed).",
|
||||
prefix="Query",
|
||||
)
|
||||
return [r.to_dict() for r in confirmed]
|
||||
|
||||
|
@ -106,7 +110,7 @@ class Indexer(IndexerBase):
|
|||
|
||||
# Avoid any excluded paths
|
||||
if any([current.match(x) for x in settings.EXCLUDES]):
|
||||
logger.info(f"[Discovery] {path_root} excluded.")
|
||||
logger.info(f"{path_root} excluded.", prefix="Discovery")
|
||||
return []
|
||||
|
||||
if current.is_dir():
|
||||
|
@ -118,7 +122,7 @@ class Indexer(IndexerBase):
|
|||
if current.suffix not in settings.FILE_TYPES:
|
||||
return []
|
||||
|
||||
logger.info(f"Collected {path_root}")
|
||||
logger.info(f"Collected {path_root}", prefix="Discovery")
|
||||
return [path_root]
|
||||
|
||||
def _preload(self, discovered: List[str]):
|
||||
|
@ -127,10 +131,11 @@ class Indexer(IndexerBase):
|
|||
with open(discovered_file, "r") as infile:
|
||||
content = infile.read()
|
||||
self.corpus.add_document(key=discovered_file, content=content)
|
||||
logger.info(f"[Preloading] Loaded {discovered_file} in memory")
|
||||
logger.info(f"Loaded {discovered_file} in memory", prefix="Preloading")
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
logger.error(f"Could not read {discovered_file}, skipping.")
|
||||
logger.warning(
|
||||
f"Could not read {discovered_file}, skipping.", prefix="Preloading"
|
||||
)
|
||||
|
||||
def _populate_indices(self, uids):
|
||||
processes = settings.INDEXING_PROCESSES
|
||||
|
@ -154,7 +159,7 @@ class Indexer(IndexerBase):
|
|||
content = document.content
|
||||
trigrams[uid] = TrigramIndex.trigramize(content)
|
||||
self._line_index.index(path, content)
|
||||
logger.info(f"[Indexing] Processed {path}")
|
||||
logger.info(f"Processed {path}", prefix="Indexing")
|
||||
|
||||
return (trigrams, self._line_index._lines)
|
||||
|
||||
|
|
|
@ -1,5 +1,27 @@
|
|||
import logging
|
||||
import sys
|
||||
import attr
|
||||
|
||||
from colors import highlight
|
||||
|
||||
|
||||
@attr.s
|
||||
class Logger:
|
||||
logger = attr.ib()
|
||||
|
||||
def info(self, message, prefix=None):
|
||||
prefix_str = ""
|
||||
if prefix:
|
||||
prefix_str = highlight(f"[{prefix}]", "green")
|
||||
|
||||
self.logger.info(f"{prefix_str} {message}")
|
||||
|
||||
def warning(self, message, prefix=None):
|
||||
prefix_str = ""
|
||||
if prefix:
|
||||
prefix_str = highlight(f"[{prefix}]", "yellow")
|
||||
|
||||
self.logger.warning(f"{prefix_str} {message}")
|
||||
|
||||
|
||||
def get_logger(name):
|
||||
|
@ -8,4 +30,6 @@ def get_logger(name):
|
|||
handler = logging.StreamHandler(sys.stdout)
|
||||
logger.addHandler(handler)
|
||||
|
||||
return logger
|
||||
logger_obj = Logger(logger=logger)
|
||||
|
||||
return logger_obj
|
||||
|
|
|
@ -25,12 +25,15 @@ class Server:
|
|||
socket.bind((settings.SOCKET_HOST, settings.SOCKET_PORT))
|
||||
socket.listen()
|
||||
|
||||
logger.info(f"Listening on {settings.SOCKET_HOST}:{settings.SOCKET_PORT}")
|
||||
logger.info(
|
||||
f"Listening on {settings.SOCKET_HOST}:{settings.SOCKET_PORT}",
|
||||
prefix="Server",
|
||||
)
|
||||
|
||||
while True:
|
||||
conn, _ = socket.accept()
|
||||
query_string = conn.recv(QUERY_STRING_LENGTH).decode()
|
||||
logger.info(f"Query: {query_string}")
|
||||
logger.info(f"Query string: {query_string}", prefix="Query")
|
||||
if query_string:
|
||||
try:
|
||||
query_results = self.indexer.query(query_string)
|
||||
|
@ -41,7 +44,7 @@ class Server:
|
|||
except KeyboardInterrupt:
|
||||
raise e
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
pass
|
||||
|
||||
def _start_socket(self):
|
||||
try:
|
||||
|
@ -49,14 +52,13 @@ class Server:
|
|||
self._socket = socket_obj
|
||||
self._handle_socket(socket=socket_obj)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
raise e
|
||||
|
||||
def _start_watch(self):
|
||||
watch_manager = pyinotify.WatchManager()
|
||||
|
||||
for path in self.watched:
|
||||
logger.info(f"Watching {path}")
|
||||
logger.info(f"Watching {path}", prefix="Server")
|
||||
watch_manager.add_watch(path, pyinotify.ALL_EVENTS, rec=True)
|
||||
|
||||
event_handler = WatchHandler(indexer=self.indexer)
|
||||
|
|
Reference in a new issue