|
|
@@ -1,5 +1,5 @@ |
|
|
|
# -*- coding: utf-8 -*- |
|
|
|
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors |
|
|
|
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors |
|
|
|
# MIT License. See license.txt |
|
|
|
|
|
|
|
from __future__ import unicode_literals |
|
|
@@ -10,21 +10,21 @@ import traceback |
|
|
|
import frappe |
|
|
|
import os |
|
|
|
import uuid |
|
|
|
import rq |
|
|
|
|
|
|
|
|
|
|
|
MONITOR_REDIS_KEY = "monitor-transactions" |
|
|
|
MONITOR_MAX_ENTRIES = 1000000 |
|
|
|
|
|
|
|
|
|
|
|
def start(transaction_type="request", method=None, kwargs=None): |
|
|
|
if frappe.conf.monitor: |
|
|
|
frappe.local.monitor = Monitor( |
|
|
|
transaction_type=transaction_type, method=method, kwargs=kwargs |
|
|
|
) |
|
|
|
frappe.local.monitor = Monitor(transaction_type, method, kwargs) |
|
|
|
|
|
|
|
|
|
|
|
def stop(): |
|
|
|
def stop(response=None): |
|
|
|
if frappe.conf.monitor and hasattr(frappe.local, "monitor"): |
|
|
|
frappe.local.monitor.dump() |
|
|
|
frappe.local.monitor.dump(response) |
|
|
|
|
|
|
|
|
|
|
|
def log_file(): |
|
|
@@ -32,75 +32,76 @@ def log_file(): |
|
|
|
|
|
|
|
|
|
|
|
class Monitor: |
|
|
|
def __init__(self, transaction_type=None, method=None, kwargs=None): |
|
|
|
def __init__(self, transaction_type, method, kwargs): |
|
|
|
try: |
|
|
|
self.site = frappe.local.site |
|
|
|
self.timestamp = datetime.utcnow() |
|
|
|
self.transaction_type = transaction_type |
|
|
|
self.uuid = uuid.uuid4() |
|
|
|
|
|
|
|
if self.transaction_type == "request": |
|
|
|
self.data = frappe.form_dict |
|
|
|
self.headers = dict(frappe.request.headers) |
|
|
|
self.ip = frappe.local.request_ip |
|
|
|
self.method = frappe.request.method |
|
|
|
self.path = frappe.request.path |
|
|
|
self.data = frappe._dict( |
|
|
|
{ |
|
|
|
"site": frappe.local.site, |
|
|
|
"timestamp": datetime.utcnow(), |
|
|
|
"transaction_type": transaction_type, |
|
|
|
"uuid": str(uuid.uuid4()), |
|
|
|
} |
|
|
|
) |
|
|
|
|
|
|
|
if transaction_type == "request": |
|
|
|
self.collect_request_meta() |
|
|
|
else: |
|
|
|
self.kwargs = kwargs |
|
|
|
self.method = method |
|
|
|
self.collect_job_meta(method, kwargs) |
|
|
|
except Exception: |
|
|
|
traceback.print_exc() |
|
|
|
|
|
|
|
def dump(self): |
|
|
|
def collect_request_meta(self): |
|
|
|
self.data.request = frappe._dict( |
|
|
|
{ |
|
|
|
"ip": frappe.local.request_ip, |
|
|
|
"method": frappe.request.method, |
|
|
|
"path": frappe.request.path, |
|
|
|
} |
|
|
|
) |
|
|
|
|
|
|
|
def collect_job_meta(self, method, kwargs): |
|
|
|
self.data.job = frappe._dict({"method": method, "scheduled": False, "wait": 0}) |
|
|
|
if "run_scheduled_job" in method: |
|
|
|
self.data.job.method = kwargs["job_type"] |
|
|
|
self.data.job.scheduled = True |
|
|
|
|
|
|
|
job = rq.get_current_job() |
|
|
|
if job: |
|
|
|
self.data.uuid = job.id |
|
|
|
waitdiff = self.data.timestamp - job.enqueued_at |
|
|
|
self.data.job.wait = int(waitdiff.total_seconds() * 1000000) |
|
|
|
|
|
|
|
def dump(self, response=None): |
|
|
|
try: |
|
|
|
timediff = datetime.utcnow() - self.timestamp |
|
|
|
timediff = datetime.utcnow() - self.data.timestamp |
|
|
|
# Obtain duration in microseconds |
|
|
|
self.duration = int(timediff.total_seconds() * 1000000) |
|
|
|
data = { |
|
|
|
"uuid": self.uuid, |
|
|
|
"duration": self.duration, |
|
|
|
"site": self.site, |
|
|
|
"timestamp": self.timestamp.isoformat(sep=" "), |
|
|
|
"transaction_type": self.transaction_type, |
|
|
|
} |
|
|
|
self.data.duration = int(timediff.total_seconds() * 1000000) |
|
|
|
|
|
|
|
if self.transaction_type == "request": |
|
|
|
update = { |
|
|
|
"data": self.data, |
|
|
|
"headers": self.headers, |
|
|
|
"ip": self.ip, |
|
|
|
"method": self.method, |
|
|
|
"path": self.path, |
|
|
|
} |
|
|
|
else: |
|
|
|
update = { |
|
|
|
"kwargs": self.kwargs, |
|
|
|
"method": self.method, |
|
|
|
} |
|
|
|
data.update(update) |
|
|
|
json_data = json.dumps(data, sort_keys=True, default=str) |
|
|
|
store(json_data) |
|
|
|
if self.data.transaction_type == "request": |
|
|
|
self.data.request.status_code = response.status_code |
|
|
|
self.data.request.response_length = int(response.headers["Content-Length"]) |
|
|
|
|
|
|
|
self.store() |
|
|
|
except Exception: |
|
|
|
traceback.print_exc() |
|
|
|
|
|
|
|
|
|
|
|
def store(json_data): |
|
|
|
MAX_LOGS = 1000000 |
|
|
|
if frappe.cache().llen(MONITOR_REDIS_KEY) > MAX_LOGS: |
|
|
|
frappe.cache().ltrim(MONITOR_REDIS_KEY, 1, -1) |
|
|
|
frappe.cache().rpush(MONITOR_REDIS_KEY, json_data) |
|
|
|
def store(self): |
|
|
|
if frappe.cache().llen(MONITOR_REDIS_KEY) > MONITOR_MAX_ENTRIES: |
|
|
|
frappe.cache().ltrim(MONITOR_REDIS_KEY, 1, -1) |
|
|
|
serialized = json.dumps(self.data, sort_keys=True, default=str) |
|
|
|
frappe.cache().rpush(MONITOR_REDIS_KEY, serialized) |
|
|
|
|
|
|
|
|
|
|
|
def flush(): |
|
|
|
try: |
|
|
|
# Fetch all the logs without removing from cache |
|
|
|
logs = frappe.cache().lrange(MONITOR_REDIS_KEY, 0, -1) |
|
|
|
logs = list(map(frappe.safe_decode, logs)) |
|
|
|
with open(log_file(), "a", os.O_NONBLOCK) as f: |
|
|
|
f.write("\n".join(logs)) |
|
|
|
f.write("\n") |
|
|
|
|
|
|
|
# Remove fetched entries from cache |
|
|
|
frappe.cache().ltrim(MONITOR_REDIS_KEY, len(logs) - 1, -1) |
|
|
|
if logs: |
|
|
|
logs = list(map(frappe.safe_decode, logs)) |
|
|
|
with open(log_file(), "a", os.O_NONBLOCK) as f: |
|
|
|
f.write("\n".join(logs)) |
|
|
|
f.write("\n") |
|
|
|
# Remove fetched entries from cache |
|
|
|
frappe.cache().ltrim(MONITOR_REDIS_KEY, len(logs) - 1, -1) |
|
|
|
except Exception: |
|
|
|
traceback.print_exc() |