If some sites have long pending old data that doesn't get cleared automcatically then this patch now attempts to discard old data by using "big-delete" code instead of typical delete query.version-14
@@ -9,6 +9,7 @@ import click | |||
# imports - module imports | |||
import frappe | |||
from frappe.commands import get_site, pass_context | |||
from frappe.core.doctype.log_settings.log_settings import LOG_DOCTYPES | |||
from frappe.exceptions import SiteNotSpecifiedError | |||
@@ -1088,16 +1089,6 @@ def build_search_index(context): | |||
frappe.destroy() | |||
LOG_DOCTYPES = [ | |||
"Scheduled Job Log", | |||
"Activity Log", | |||
"Route History", | |||
"Email Queue", | |||
"Error Snapshot", | |||
"Error Log", | |||
] | |||
@click.command("clear-log-table") | |||
@click.option("--doctype", default="text", type=click.Choice(LOG_DOCTYPES), help="Log DocType") | |||
@click.option("--days", type=int, help="Keep records for days") | |||
@@ -1111,7 +1102,7 @@ def clear_log_table(context, doctype, days, no_backup): | |||
ref: https://mariadb.com/kb/en/big-deletes/#deleting-more-than-half-a-table | |||
""" | |||
from frappe.utils import get_table_name | |||
from frappe.core.doctype.log_settings.log_settings import clear_log_table as clear_logs | |||
from frappe.utils.backups import scheduled_backup | |||
if not context.sites: | |||
@@ -1124,10 +1115,6 @@ def clear_log_table(context, doctype, days, no_backup): | |||
frappe.init(site=site) | |||
frappe.connect() | |||
if frappe.db.db_type != "mariadb": | |||
click.echo("Postgres database isn't supported by this command") | |||
sys.exit(1) | |||
if not no_backup: | |||
scheduled_backup( | |||
ignore_conf=False, | |||
@@ -1137,29 +1124,13 @@ def clear_log_table(context, doctype, days, no_backup): | |||
) | |||
click.echo(f"Backed up {doctype}") | |||
original = get_table_name(doctype) | |||
temporary = f"{original} temp_table" | |||
backup = f"{original} backup_table" | |||
try: | |||
frappe.db.sql_ddl(f"CREATE TABLE `{temporary}` LIKE `{original}`") | |||
click.echo(f"Copying {doctype} records from last {days} days to temporary table.") | |||
# Copy all recent data to new table | |||
frappe.db.sql( | |||
f"""INSERT INTO `{temporary}` | |||
SELECT * FROM `{original}` | |||
WHERE `{original}`.`modified` > NOW() - INTERVAL '{days}' DAY""" | |||
) | |||
frappe.db.sql_ddl(f"RENAME TABLE `{original}` TO `{backup}`, `{temporary}` TO `{original}`") | |||
clear_logs(doctype, days=days) | |||
except Exception as e: | |||
frappe.db.rollback() | |||
frappe.db.sql_list(f"DROP TABLE IF EXISTS `{temporary}`") | |||
click.echo(f"Log cleanup for {doctype} failed:\n{e}") | |||
sys.exit(1) | |||
else: | |||
frappe.db.commit() | |||
frappe.db.sql_ddl(f"DROP TABLE `{backup}`") | |||
click.secho(f"Cleared {doctype} records older than {days} days", fg="green") | |||
@@ -67,9 +67,9 @@ class LogSettings(Document): | |||
def add_default_logtypes(self): | |||
existing_logtypes = {d.ref_doctype for d in self.logs_to_clear} | |||
added_logtypes = set() | |||
for logtype, frequency in DEFAULT_LOGTYPES_RETENTION.items(): | |||
for logtype, retention in DEFAULT_LOGTYPES_RETENTION.items(): | |||
if logtype not in existing_logtypes and _supports_log_clearing(logtype): | |||
self.append("logs_to_clear", {"ref_doctype": logtype, "days": cint(frequency)}) | |||
self.append("logs_to_clear", {"ref_doctype": logtype, "days": cint(retention)}) | |||
added_logtypes.add(logtype) | |||
if added_logtypes: | |||
@@ -142,3 +142,48 @@ def get_log_doctypes(doctype, txt, searchfield, start, page_len, filters): | |||
supported_doctypes = [(d,) for d in doctypes if _supports_log_clearing(d)] | |||
return supported_doctypes[start:page_len] | |||
LOG_DOCTYPES = [ | |||
"Scheduled Job Log", | |||
"Activity Log", | |||
"Route History", | |||
"Email Queue", | |||
"Email Queue Recipient", | |||
"Error Snapshot", | |||
"Error Log", | |||
] | |||
def clear_log_table(doctype, days=90): | |||
"""If any logtype table grows too large then clearing it with DELETE query | |||
is not feasible in reasonable time. This command copies recent data to new | |||
table and replaces current table with new smaller table. | |||
ref: https://mariadb.com/kb/en/big-deletes/#deleting-more-than-half-a-table | |||
""" | |||
from frappe.utils import get_table_name | |||
if doctype not in LOG_DOCTYPES: | |||
raise frappe.ValidationError(f"Unsupported logging DocType: {doctype}") | |||
original = get_table_name(doctype) | |||
temporary = f"{original} temp_table" | |||
backup = f"{original} backup_table" | |||
try: | |||
frappe.db.sql_ddl(f"CREATE TABLE `{temporary}` LIKE `{original}`") | |||
# Copy all recent data to new table | |||
frappe.db.sql( | |||
f"""INSERT INTO `{temporary}` | |||
SELECT * FROM `{original}` | |||
WHERE `{original}`.`modified` > NOW() - INTERVAL '{days}' DAY""" | |||
) | |||
frappe.db.sql_ddl(f"RENAME TABLE `{original}` TO `{backup}`, `{temporary}` TO `{original}`") | |||
except Exception: | |||
frappe.db.rollback() | |||
frappe.db.sql_ddl(f"DROP TABLE IF EXISTS `{temporary}`") | |||
raise | |||
else: | |||
frappe.db.sql_ddl(f"DROP TABLE `{backup}`") |
@@ -191,6 +191,7 @@ frappe.patches.v14_0.remove_post_and_post_comment | |||
frappe.patches.v14_0.reset_creation_datetime | |||
frappe.patches.v14_0.remove_is_first_startup | |||
frappe.patches.v14_0.reload_workspace_child_tables | |||
frappe.patches.v14_0.clear_long_pending_stale_logs | |||
frappe.patches.v14_0.log_settings_migration | |||
[post_model_sync] | |||
@@ -202,4 +203,4 @@ frappe.patches.v14_0.update_color_names_in_kanban_board_column | |||
frappe.patches.v14_0.update_is_system_generated_flag | |||
frappe.patches.v14_0.update_auto_account_deletion_duration | |||
frappe.patches.v14_0.set_document_expiry_default | |||
frappe.patches.v14_0.delete_data_migration_tool | |||
frappe.patches.v14_0.delete_data_migration_tool |
@@ -0,0 +1,42 @@ | |||
import frappe | |||
from frappe.core.doctype.log_settings.log_settings import clear_log_table | |||
from frappe.utils import add_to_date, today | |||
def execute(): | |||
"""Due to large size of log tables on old sites some table cleanups never finished during daily log clean up. This patch discards such data by using "big delete" code. | |||
ref: https://github.com/frappe/frappe/issues/16971 | |||
""" | |||
DOCTYPE_RETENTION_MAP = { | |||
"Error Log": get_current_setting("clear_error_log_after") or 90, | |||
"Activity Log": get_current_setting("clear_activity_log_after") or 90, | |||
"Email Queue": get_current_setting("clear_email_queue_after") or 30, | |||
# child table on email queue | |||
"Email Queue Recipient": get_current_setting("clear_email_queue_after") or 30, | |||
"Error Snapshot": get_current_setting("clear_error_log_after") or 90, | |||
# newly added | |||
"Scheduled Job Log": 90, | |||
"Route History": 90, | |||
} | |||
for doctype, retention in DOCTYPE_RETENTION_MAP.items(): | |||
if is_log_cleanup_stuck(doctype, retention): | |||
print(f"Clearing old {doctype} records") | |||
clear_log_table(doctype, retention) | |||
def is_log_cleanup_stuck(doctype: str, retention: int) -> bool: | |||
"""Check if doctype has data significantly older than configured cleanup period""" | |||
threshold = add_to_date(today(), days=retention * -2) | |||
return bool(frappe.db.exists(doctype, {"modified": ("<", threshold)})) | |||
def get_current_setting(fieldname): | |||
try: | |||
return frappe.db.get_single_value("Log Settings", fieldname) | |||
except Exception: | |||
# Field might be gone if patch is reattempted | |||
pass |
@@ -14,9 +14,9 @@ def execute(): | |||
log_settings = frappe.get_doc("Log Settings") | |||
log_settings.add_default_logtypes() | |||
for doctype, days in old_settings.items(): | |||
if days: | |||
log_settings.register_doctype(doctype, days) | |||
for doctype, retention in old_settings.items(): | |||
if retention: | |||
log_settings.register_doctype(doctype, retention) | |||
log_settings.save() | |||
@@ -25,4 +25,5 @@ def get_current_setting(fieldname): | |||
try: | |||
return frappe.db.get_single_value("Log Settings", fieldname) | |||
except Exception: | |||
# Field might be gone if patch is reattempted | |||
pass |