From 89afee0085a03a1dc48b34fb0c50db69f875cc0f Mon Sep 17 00:00:00 2001 From: Matthew Martin Date: Mon, 1 Jan 2024 23:01:05 -0500 Subject: [PATCH] bug_trail --- .gitignore | 2 + bug_trail/README.md | 35 +++++++++ bug_trail/__init__.py | 6 ++ bug_trail/__main__.py | 37 +++++++++ bug_trail/data_code.py | 123 ++++++++++++++++++++++++++++++ bug_trail/example.py | 29 +++++++ bug_trail/fs_utils.py | 83 ++++++++++++++++++++ bug_trail/handlers.py | 82 ++++++++++++++++++++ bug_trail/log_detail.html | 42 +++++++++++ bug_trail/log_overview.html | 55 ++++++++++++++ bug_trail/tests.py | 0 bug_trail/views.py | 147 ++++++++++++++++++++++++++++++++++++ docs/Manual.md | 12 +++ 13 files changed, 653 insertions(+) create mode 100644 bug_trail/README.md create mode 100644 bug_trail/__init__.py create mode 100644 bug_trail/__main__.py create mode 100644 bug_trail/data_code.py create mode 100644 bug_trail/example.py create mode 100644 bug_trail/fs_utils.py create mode 100644 bug_trail/handlers.py create mode 100644 bug_trail/log_detail.html create mode 100644 bug_trail/log_overview.html create mode 100644 bug_trail/tests.py create mode 100644 bug_trail/views.py create mode 100644 docs/Manual.md diff --git a/.gitignore b/.gitignore index 8795a92..e9dca31 100644 --- a/.gitignore +++ b/.gitignore @@ -170,3 +170,5 @@ ai_shell/dialog_log/ /api_logs/ ai_shell.toml +/bug_trail/logs/ +/bug_trail/error_log.db diff --git a/bug_trail/README.md b/bug_trail/README.md new file mode 100644 index 0000000..a1bd5eb --- /dev/null +++ b/bug_trail/README.md @@ -0,0 +1,35 @@ +# Bug Trail + +This is a workstation logger to capture bugs encountered while you are writing code. + +## Installation + +```bash +pip install bug-trail +``` + +## Usage + +```python +import bug_trail +import logging + +db_path = "error_log.db" +handler = bug_trail.ErrorLogSQLiteHandler(db_path) +logging.basicConfig(handlers=[handler], level=logging.ERROR) + +logger = logging.getLogger(__name__) +logger.error("This is an error message") +``` + +To generate to the log folder relative to the current working directory: + +```bash +bug_trail --output logs --db error_log.db +``` + +## Security +None. Do not publish your error log to the internet. Add the log folder to your .gitignore file. + +## Prior Art +Inspired by elmah. Much less ambitious, as this is just a browsable, static HTML report. \ No newline at end of file diff --git a/bug_trail/__init__.py b/bug_trail/__init__.py new file mode 100644 index 0000000..a71ba68 --- /dev/null +++ b/bug_trail/__init__.py @@ -0,0 +1,6 @@ +""" +Captures error logs to sqlite. Use CLI or +""" +__all__ = ["ErrorLogSQLiteHandler"] + +from bug_trail.handlers import ErrorLogSQLiteHandler diff --git a/bug_trail/__main__.py b/bug_trail/__main__.py new file mode 100644 index 0000000..174b708 --- /dev/null +++ b/bug_trail/__main__.py @@ -0,0 +1,37 @@ +import argparse +import sys + +from bug_trail.fs_utils import clear_data, prompt_and_update_gitignore +from bug_trail.views import render_all + + +def main() -> int: + """ + Main entry point for the CLI. + + Returns: + int: 0 if successful, 1 if not + """ + parser = argparse.ArgumentParser(description="Tool for local logging and error reporting.") + parser.add_argument("--clear", action="store_true", help="Clear the database and log files") + + parser.add_argument("--output", action="store_true", help="Where to output the logs", default="logs") + parser.add_argument("--db", action="store_true", help="Where to store the database", default="error_log.db") + + parser.add_argument("--version", action="version", version="%(prog)s 1.0") + + args = parser.parse_args() + db_path = args.db + log_folder = args.output + if args.clear: + clear_data(log_folder, db_path) + return 0 + + prompt_and_update_gitignore(".") + # Default actions + render_all(db_path, log_folder) + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/bug_trail/data_code.py b/bug_trail/data_code.py new file mode 100644 index 0000000..6b5cecf --- /dev/null +++ b/bug_trail/data_code.py @@ -0,0 +1,123 @@ +import datetime +import sqlite3 +from typing import Any + + +def serialize_to_sqlite_supported(value: str) -> Any: + """ + sqlite supports None, int, float, str, bytes by default, and also knows how to adapt datetime.date and datetime.datetime + everything else is str(value) + """ + if value is None: + return value + if isinstance(value, (int, float, str, bytes)): + return value + if isinstance(value, (datetime.date, datetime.datetime)): + return value + return str(value) + + +def fetch_log_data(db_path: str) -> list[dict[str, Any]]: + """ + Fetch all log records from the database. + + Args: + db_path (str): Path to the SQLite database + + Returns: + list[dict[str, Any]]: A list of dictionaries containing all log records + """ + # Connect to the SQLite database + conn = sqlite3.connect(db_path) + cursor = conn.cursor() + + # Query to fetch all rows from the logs table + query = "SELECT * FROM logs" + cursor.execute(query) + + # Fetching column names from the cursor + columns = [description[0] for description in cursor.description] + + # Fetch all rows, and convert each row to a dictionary + rows = cursor.fetchall() + log_data = [] + for row in rows: + log_record = dict(zip(columns, row, strict=True)) + log_data.append(log_record) + + # Close the connection + conn.close() + return log_data + + +def fetch_log_data_grouped(db_path: str) -> Any: + """ + Fetch all log records from the database, and group them into a nested dictionary. + + Args: + db_path (str): Path to the SQLite database + + Returns: + Any: A nested dictionary containing all log records + """ + # Connect to the SQLite database + conn = sqlite3.connect(db_path) + cursor = conn.cursor() + + # Query to fetch all rows from the logs table + query = "SELECT * FROM logs" + cursor.execute(query) + + # Fetching column names from the cursor + columns = [description[0] for description in cursor.description] + + # Fetch all rows, and convert each row to a grouped dictionary + rows = cursor.fetchall() + log_data = [] + for row in rows: + log_record = dict(zip(columns, row, strict=True)) + + # Grouping the log record + grouped_record = { + "MessageDetails": {key: log_record[key] for key in ["msg", "args", "levelname", "levelno"]}, + "SourceContext": { + key: log_record[key] for key in ["name", "pathname", "filename", "module", "funcName", "lineno"] + }, + "TemporalDetails": {key: log_record[key] for key in ["created", "msecs", "relativeCreated"]}, + "ProcessThreadContext": { + key: log_record[key] for key in ["process", "processName", "thread", "threadName"] + }, + "ExceptionDetails": {key: log_record[key] for key in ["exc_info", "exc_text"]}, + "StackDetails": {key: log_record[key] for key in ["stack_info"]}, + "UserData": { + key: log_record[key] + for key in log_record.keys() + - { + "msg", + "args", + "levelname", + "levelno", + "name", + "pathname", + "filename", + "module", + "funcName", + "lineno", + "created", + "msecs", + "relativeCreated", + "process", + "processName", + "thread", + "threadName", + "exc_info", + "exc_text", + "stack_info", + } + }, + } + log_data.append(grouped_record) + + # Close the connection + conn.close() + return log_data diff --git a/bug_trail/example.py b/bug_trail/example.py new file mode 100644 index 0000000..1a71d77 --- /dev/null +++ b/bug_trail/example.py @@ -0,0 +1,29 @@ +"""Example usage""" + +# Set up logging +import logging + +from bug_trail.handlers import ErrorLogSQLiteHandler + +db_path = "error_log.db" +handler = ErrorLogSQLiteHandler(db_path) +logging.basicConfig(handlers=[handler], level=logging.ERROR) + +# Example usage +logger = logging.getLogger(__name__) +logger.error("This is an error message") + + +def run(): + # Example usage + logger2 = logging.getLogger("adhoc") + logger2.error("This is an ad hoc error message") + + logger.error("This is an error message") + try: + _ = 1 / 0 + except ZeroDivisionError as e: + logger.exception(e) + + +run() diff --git a/bug_trail/fs_utils.py b/bug_trail/fs_utils.py new file mode 100644 index 0000000..6fa3eb2 --- /dev/null +++ b/bug_trail/fs_utils.py @@ -0,0 +1,83 @@ +""" +This module contains functions related to file system operations. +""" +import os +import shutil + + +def empty_folder(folder_path: str) -> None: + """ + Empty the folder at the given path + + Args: + folder_path (str): Path to the folder to be emptied + """ + if os.path.exists(folder_path) and os.path.isdir(folder_path): + shutil.rmtree(folder_path) + os.makedirs(folder_path, exist_ok=True) + + +def clear_data(log_folder: str, db_path: str) -> None: + """ + Clear the database and log files + """ + # Code to clear the database and log files + empty_folder(log_folder) + os.remove(db_path) + + +def get_containing_folder_path(file_path: str) -> str: + """ + Get the absolute path of the folder containing the given file. + + Args: + file_path (str): Path to the file (__file__) + + Returns: + str: Absolute path of the containing folder + """ + return os.path.abspath(os.path.dirname(file_path)) + + +def is_git_repo(path: str) -> bool: + """ + Check if the path is inside a git repository by looking for a .git directory. + + Args: + path (str): The directory path to check. + + Returns: + bool: True if inside a git repo, False otherwise. + """ + current_path = path + while current_path != os.path.dirname(current_path): + if os.path.isdir(os.path.join(current_path, ".git")): + return True + current_path = os.path.dirname(current_path) + return False + + +def prompt_and_update_gitignore(repo_path: str) -> None: + """Prompt the user to ignore logs and update .gitignore accordingly.""" + if not is_git_repo(repo_path): + return + + gitignore_path = os.path.join(repo_path, ".gitignore") + + # Check if .gitignore exists and 'logs' is already listed + if os.path.exists(gitignore_path): + with open(gitignore_path, encoding="utf-8") as file: + if "logs" in file.read(): + print("'logs' directory is already ignored in .gitignore.") + return + + # Prompt user for action + response = ( + input("This directory is a Git repository. Do you want to ignore 'logs' directory? (y/n): ").strip().lower() + ) + if (response.lower() + "xxx")[0] == "y": + with open(gitignore_path, "a", encoding="utf-8") as file: + file.write("\nlogs/") + print("'logs' directory is now ignored in .gitignore.") + else: + print("No changes made to .gitignore.") diff --git a/bug_trail/handlers.py b/bug_trail/handlers.py new file mode 100644 index 0000000..d5bed37 --- /dev/null +++ b/bug_trail/handlers.py @@ -0,0 +1,82 @@ +""" +This module contains custom logging handlers. +""" + +import logging +import sqlite3 +import traceback + +from bug_trail.data_code import serialize_to_sqlite_supported + + +class ErrorLogSQLiteHandler(logging.Handler): + """ + A custom logging handler that logs to a SQLite database. + """ + + def __init__(self, db_path: str) -> None: + """ + Initialize the handler + Args: + db_path (str): Path to the SQLite database + """ + super().__init__() + self.db_path = db_path + self.conn = sqlite3.connect(self.db_path) + self.create_table() + + def create_table(self) -> None: + """ + Create the logs table if it doesn't exist + """ + # Create a dummy LogRecord to introspect its attributes + dummy_record = logging.LogRecord( + name="", level=logging.ERROR, pathname="", lineno=0, msg="", args=(), exc_info=None + ) + fields = [ + attr + for attr in dir(dummy_record) + if not callable(getattr(dummy_record, attr)) and not attr.startswith("__") + ] + columns = ", ".join([f"{field} TEXT" for field in fields]) + columns = columns + ", traceback TEXT" + create_table_sql = f"CREATE TABLE IF NOT EXISTS logs ({columns})" + self.conn.execute(create_table_sql) + self.conn.commit() + + def emit(self, record: logging.LogRecord) -> None: + """ + Insert a log record into the database + + Args: + record (logging.LogRecord): The log record to be inserted + """ + if record.levelno < logging.ERROR: + return + # Check if there is exception information + if record.exc_info: + # Format the traceback + traceback_str = "".join(traceback.format_exception(*record.exc_info)) + record.traceback = traceback_str + else: + record.traceback = None + + insert_sql = "INSERT INTO logs ({fields}) VALUES ({values})" + field_names = ", ".join( + [attr for attr in dir(record) if not attr.startswith("__") and not attr == "getMessage"] + ) + field_names = field_names + ", traceback" + field_values = ", ".join(["?" for _ in field_names.split(", ")]) + formatted_sql = insert_sql.format(fields=field_names, values=field_values) + args = [getattr(record, field, "") for field in field_names.split(", ")] + args = [serialize_to_sqlite_supported(arg) for arg in args] + self.conn.execute(formatted_sql, args) + self.conn.commit() + + def close(self) -> None: + """ + Close the connection to the database + """ + if self.conn: + self.conn.close() + super().close() diff --git a/bug_trail/log_detail.html b/bug_trail/log_detail.html new file mode 100644 index 0000000..88ebb28 --- /dev/null +++ b/bug_trail/log_detail.html @@ -0,0 +1,42 @@ + + + + + Error Log Detail + + + +
+

Error Log Detail

+ + +
+ {% for group_name, fields in log.items() %} +
{{ group_name }}
+
+
+ {% for key, value in fields.items() %} +
{{ key|pretty }}:
+
{{ value }}
+ {% endfor %} +
+
+ {% endfor %} +
+
+ + diff --git a/bug_trail/log_overview.html b/bug_trail/log_overview.html new file mode 100644 index 0000000..414257d --- /dev/null +++ b/bug_trail/log_overview.html @@ -0,0 +1,55 @@ + + + + + Error Logs Overview + + + +

Error Logs Overview

+ + + + + + + + + + + + + + + + {% for log in logs %} + + + + + + + + + + + + + + + {% endfor %} +
DetailsTimestampLevelMessageFilenameFunction NameLine NoModuleProcess IDThread Name
View Details{{ log.created }}{{ log.levelname }}{{ log.msg }}{{ log.filename }}{{ log.funcName }}{{ log.lineno }}{{ log.module }}{{ log.process }}{{ log.threadName }}
+ + diff --git a/bug_trail/tests.py b/bug_trail/tests.py new file mode 100644 index 0000000..e69de29 diff --git a/bug_trail/views.py b/bug_trail/views.py new file mode 100644 index 0000000..6a6c317 --- /dev/null +++ b/bug_trail/views.py @@ -0,0 +1,147 @@ +""" +This module contains the functions for rendering the HTML templates +""" +import os + +from jinja2 import Environment, FileSystemLoader + +from bug_trail.data_code import fetch_log_data, fetch_log_data_grouped +from bug_trail.fs_utils import empty_folder, get_containing_folder_path + + +def pretty_column_name(column_name: str) -> str: + """ + Transform a column name into a pretty name for display + Args: + column_name (str): The column name to be transformed + Returns: + str: The transformed column name + """ + # Dictionary for special cases + special_cases = { + "lineno": "Line Number", + "funcName": "Function Name", + "exc_info": "Exception Info", + # Add more special cases here + } + + # Check if the column name is a special case + if column_name in special_cases: + return special_cases[column_name] + + # Rule-based transformation: snake_case to Title Case + pretty_name = column_name.replace("_", " ").title() + return pretty_name + + +def detail_file_name(selected_log: dict[str, str]) -> str: + """ + Generate a filename for the detail page of a log entry + + Args: + selected_log (dict[str, str]): The selected log entry + Returns: + str: The filename for the detail page + """ + key = ( + f"{selected_log['created'].replace('.','_')}_" + f"{selected_log['filename'].replace('.','_')}_" + f"{selected_log['lineno']}" + ) + return f"detail_{key}.html" + + +def detail_file_name_grouped(selected_log: dict[str, dict[str, str]]) -> str: + """ + Generate a filename for the detail page of a log entry with grouped data + + Args: + selected_log (dict[str, str]): The selected log entry + Returns: + str: The filename for the detail page + """ + key = ( + f"{selected_log['TemporalDetails']['created'].replace('.','_')}_" + f"{selected_log['SourceContext']['filename'].replace('.','_')}_" + f"{selected_log['SourceContext']['lineno']}" + ) + return f"detail_{key}.html" + + +def render_main(db_path: str, log_folder: str) -> None: + """ + Render the main page of the log viewer + + Args: + db_path (str): Path to the SQLite database + log_folder (str): Path to the folder containing the log files + """ + # Set up Jinja2 environment + + current = get_containing_folder_path(__file__) + env = Environment(loader=FileSystemLoader(current)) + env.filters["pretty"] = pretty_column_name + template = env.get_template(f"{current}/log_overview.html") + + log_data = fetch_log_data(db_path) + + for log_entry in log_data: + log_entry["detailed_filename"] = detail_file_name(log_entry) + + # Render the template with log data + html_output = template.render(logs=log_data) + + index = f"{log_folder}/index.html" + os.makedirs(index.rsplit("/", 1)[0], exist_ok=True) + with open(index, "w", encoding="utf-8") as f: + f.write(html_output) + + +def render_detail(db_path: str, log_folder: str) -> None: + """ + Render the detail page of a log entry + + Args: + db_path (str): Path to the SQLite database + log_folder (str): Path to the folder containing the log files + """ + # Set up Jinja2 environment + + current = get_containing_folder_path(__file__) + env = Environment(loader=FileSystemLoader(current)) + env.filters["pretty"] = pretty_column_name + log_data = fetch_log_data_grouped(db_path) # Your log records here + + # Render the template with the selected log data + template = env.get_template(f"{current}/log_detail.html") + + for log_entry in log_data: + # Selected log record for display + selected_log = log_entry + html_output = template.render(log=selected_log) + + # Using a unique key for each log entry, like a timestamp or a combination of fields + key = detail_file_name_grouped(selected_log) + + # Write `html_output` to a file + location = f"{log_folder}/{key}" + os.makedirs(location.rsplit("/", 1)[0], exist_ok=True) + with open(location, "w", encoding="utf-8") as f: + f.write(html_output) + + +def render_all(db_path: str, logs_folder: str) -> None: + """ + Render all the pages + + Args: + db_path (str): Path to the SQLite database + logs_folder (str): Path to the folder containing the log files + """ + empty_folder(logs_folder) + render_main(db_path, logs_folder) + render_detail(db_path, logs_folder) + + +if __name__ == "__main__": + render_all(db_path="error_log.db", logs_folder="logs") diff --git a/docs/Manual.md b/docs/Manual.md new file mode 100644 index 0000000..ae20e86 --- /dev/null +++ b/docs/Manual.md @@ -0,0 +1,12 @@ +# Programmer's Manual + +## Possible goals + +- Write a single shot bot, that uses tools but does not loop. +- Write a tool using bot. It uses tools, loops and has a goal function. +- Use the tools in the toolkit with your own bot framework + +## Extension Points +- Goal checking functions +- New tools with plugins +- External tools (with merge request) \ No newline at end of file