@@ -105,7 +105,7 @@ def download_frappe_assets(verbose=True): | |||
if frappe_head: | |||
try: | |||
url = get_assets_link(frappe_head) | |||
click.secho("Retreiving assets...", fg="yellow") | |||
click.secho("Retrieving assets...", fg="yellow") | |||
prefix = mkdtemp(prefix="frappe-assets-", suffix=frappe_head) | |||
assets_archive = download_file(url, prefix) | |||
print("\n{0} Downloaded Frappe assets from {1}".format(green('✔'), url)) | |||
@@ -9,7 +9,7 @@ import click | |||
import frappe | |||
from frappe.commands import get_site, pass_context | |||
from frappe.exceptions import SiteNotSpecifiedError | |||
from frappe.utils import get_site_path, touch_file | |||
from frappe.installer import _new_site | |||
@click.command('new-site') | |||
@@ -42,57 +42,6 @@ def new_site(site, mariadb_root_username=None, mariadb_root_password=None, admin | |||
if len(frappe.utils.get_sites()) == 1: | |||
use(site) | |||
def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=None, | |||
admin_password=None, verbose=False, install_apps=None, source_sql=None, force=False, | |||
no_mariadb_socket=False, reinstall=False, db_password=None, db_type=None, db_host=None, | |||
db_port=None, new_site=False): | |||
"""Install a new Frappe site""" | |||
if not force and os.path.exists(site): | |||
print('Site {0} already exists'.format(site)) | |||
sys.exit(1) | |||
if no_mariadb_socket and not db_type == "mariadb": | |||
print('--no-mariadb-socket requires db_type to be set to mariadb.') | |||
sys.exit(1) | |||
if not db_name: | |||
import hashlib | |||
db_name = '_' + hashlib.sha1(site.encode()).hexdigest()[:16] | |||
from frappe.commands.scheduler import _is_scheduler_enabled | |||
from frappe.installer import install_db, make_site_dirs | |||
from frappe.installer import install_app as _install_app | |||
import frappe.utils.scheduler | |||
frappe.init(site=site) | |||
try: | |||
# enable scheduler post install? | |||
enable_scheduler = _is_scheduler_enabled() | |||
except Exception: | |||
enable_scheduler = False | |||
make_site_dirs() | |||
installing = touch_file(get_site_path('locks', 'installing.lock')) | |||
install_db(root_login=mariadb_root_username, root_password=mariadb_root_password, db_name=db_name, | |||
admin_password=admin_password, verbose=verbose, source_sql=source_sql, force=force, reinstall=reinstall, | |||
db_password=db_password, db_type=db_type, db_host=db_host, db_port=db_port, no_mariadb_socket=no_mariadb_socket) | |||
apps_to_install = ['frappe'] + (frappe.conf.get("install_apps") or []) + (list(install_apps) or []) | |||
for app in apps_to_install: | |||
_install_app(app, verbose=verbose, set_as_patched=not source_sql) | |||
os.remove(installing) | |||
frappe.utils.scheduler.toggle_scheduler(enable_scheduler) | |||
frappe.db.commit() | |||
scheduler_status = "disabled" if frappe.utils.scheduler.is_scheduler_disabled() else "enabled" | |||
print("*** Scheduler is", scheduler_status, "***") | |||
@click.command('restore') | |||
@click.argument('sql-file-path') | |||
@@ -107,33 +56,41 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N | |||
@pass_context | |||
def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_password=None, db_name=None, verbose=None, install_app=None, admin_password=None, force=None, with_public_files=None, with_private_files=None): | |||
"Restore site database from an sql file" | |||
from frappe.installer import extract_sql_gzip, extract_files, is_downgrade, validate_database_sql | |||
force = context.force or force | |||
from frappe.installer import ( | |||
extract_sql_from_archive, | |||
extract_files, | |||
is_downgrade, | |||
is_partial, | |||
validate_database_sql | |||
) | |||
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file | |||
if not os.path.exists(sql_file_path): | |||
base_path = '..' | |||
sql_file_path = os.path.join(base_path, sql_file_path) | |||
if not os.path.exists(sql_file_path): | |||
print('Invalid path {0}'.format(sql_file_path[3:])) | |||
sys.exit(1) | |||
elif sql_file_path.startswith(os.sep): | |||
base_path = os.sep | |||
else: | |||
base_path = '.' | |||
force = context.force or force | |||
decompressed_file_name = extract_sql_from_archive(sql_file_path) | |||
if sql_file_path.endswith('sql.gz'): | |||
decompressed_file_name = extract_sql_gzip(os.path.abspath(sql_file_path)) | |||
else: | |||
decompressed_file_name = sql_file_path | |||
# check if partial backup | |||
if is_partial(decompressed_file_name): | |||
click.secho( | |||
"Partial Backup file detected. You cannot use a partial file to restore a Frappe Site.", | |||
fg="red" | |||
) | |||
click.secho( | |||
"Use `bench partial-restore` to restore a partial backup to an existing site.", | |||
fg="yellow" | |||
) | |||
sys.exit(1) | |||
# check if valid SQL file | |||
validate_database_sql(decompressed_file_name, _raise=not force) | |||
site = get_site(context) | |||
frappe.init(site=site) | |||
# dont allow downgrading to older versions of frappe without force | |||
if not force and is_downgrade(decompressed_file_name, verbose=True): | |||
warn_message = "This is not recommended and may lead to unexpected behaviour. Do you want to continue anyway?" | |||
warn_message = ( | |||
"This is not recommended and may lead to unexpected behaviour. " | |||
"Do you want to continue anyway?" | |||
) | |||
click.confirm(warn_message, abort=True) | |||
_new_site(frappe.conf.db_name, site, mariadb_root_username=mariadb_root_username, | |||
@@ -156,9 +113,28 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas | |||
if decompressed_file_name != sql_file_path: | |||
os.remove(decompressed_file_name) | |||
success_message = "Site {0} has been restored{1}".format(site, " with files" if (with_public_files or with_private_files) else "") | |||
success_message = "Site {0} has been restored{1}".format( | |||
site, | |||
" with files" if (with_public_files or with_private_files) else "" | |||
) | |||
click.secho(success_message, fg="green") | |||
@click.command('partial-restore') | |||
@click.argument('sql-file-path') | |||
@click.option("--verbose", "-v", is_flag=True) | |||
@pass_context | |||
def partial_restore(context, sql_file_path, verbose): | |||
from frappe.installer import partial_restore | |||
verbose = context.verbose or verbose | |||
site = get_site(context) | |||
frappe.init(site=site) | |||
frappe.connect(site=site) | |||
partial_restore(sql_file_path, verbose) | |||
frappe.destroy() | |||
@click.command('reinstall') | |||
@click.option('--admin-password', help='Administrator Password for reinstalled site') | |||
@click.option('--mariadb-root-username', help='Root username for MariaDB') | |||
@@ -416,16 +392,20 @@ def use(site, sites_path='.'): | |||
@click.command('backup') | |||
@click.option('--with-files', default=False, is_flag=True, help="Take backup with files") | |||
@click.option('--include', '--only', '-i', default="", type=str, help="Specify the DocTypes to backup seperated by commas") | |||
@click.option('--exclude', '-e', default="", type=str, help="Specify the DocTypes to not backup seperated by commas") | |||
@click.option('--backup-path', default=None, help="Set path for saving all the files in this operation") | |||
@click.option('--backup-path-db', default=None, help="Set path for saving database file") | |||
@click.option('--backup-path-files', default=None, help="Set path for saving public file") | |||
@click.option('--backup-path-private-files', default=None, help="Set path for saving private file") | |||
@click.option('--backup-path-conf', default=None, help="Set path for saving config file") | |||
@click.option('--ignore-backup-conf', default=False, is_flag=True, help="Ignore excludes/includes set in config") | |||
@click.option('--verbose', default=False, is_flag=True, help="Add verbosity") | |||
@click.option('--compress', default=False, is_flag=True, help="Compress private and public files") | |||
@pass_context | |||
def backup(context, with_files=False, backup_path=None, backup_path_db=None, backup_path_files=None, | |||
backup_path_private_files=None, backup_path_conf=None, verbose=False, compress=False): | |||
backup_path_private_files=None, backup_path_conf=None, ignore_backup_conf=False, verbose=False, | |||
compress=False, include="", exclude=""): | |||
"Backup" | |||
from frappe.utils.backups import scheduled_backup | |||
verbose = verbose or context.verbose | |||
@@ -435,11 +415,27 @@ def backup(context, with_files=False, backup_path=None, backup_path_db=None, bac | |||
try: | |||
frappe.init(site=site) | |||
frappe.connect() | |||
odb = scheduled_backup(ignore_files=not with_files, backup_path=backup_path, backup_path_db=backup_path_db, backup_path_files=backup_path_files, backup_path_private_files=backup_path_private_files, backup_path_conf=backup_path_conf, force=True, verbose=verbose, compress=compress) | |||
odb = scheduled_backup( | |||
ignore_files=not with_files, | |||
backup_path=backup_path, | |||
backup_path_db=backup_path_db, | |||
backup_path_files=backup_path_files, | |||
backup_path_private_files=backup_path_private_files, | |||
backup_path_conf=backup_path_conf, | |||
ignore_conf=ignore_backup_conf, | |||
include_doctypes=include, | |||
exclude_doctypes=exclude, | |||
compress=compress, | |||
verbose=verbose, | |||
force=True | |||
) | |||
except Exception: | |||
click.secho("Backup failed for Site {0}. Database or site_config.json may be corrupted".format(site), fg="red") | |||
if verbose: | |||
print(frappe.get_traceback()) | |||
exit_code = 1 | |||
continue | |||
odb.print_summary() | |||
click.secho("Backup for Site {0} has been successfully completed{1}".format(site, " with files" if with_files else ""), fg="green") | |||
frappe.destroy() | |||
@@ -512,13 +508,14 @@ def _drop_site(site, root_login='root', root_password=None, archived_sites_path= | |||
if force: | |||
pass | |||
else: | |||
click.echo("="*80) | |||
click.echo("Error: The operation has stopped because backup of {s}'s database failed.".format(s=site)) | |||
click.echo("Reason: {reason}{sep}".format(reason=str(err), sep="\n")) | |||
click.echo("Fix the issue and try again.") | |||
click.echo( | |||
"Hint: Use 'bench drop-site {s} --force' to force the removal of {s}".format(sep="\n", tab="\t", s=site) | |||
) | |||
messages = [ | |||
"=" * 80, | |||
"Error: The operation has stopped because backup of {0}'s database failed.".format(site), | |||
"Reason: {0}\n".format(str(err)), | |||
"Fix the issue and try again.", | |||
"Hint: Use 'bench drop-site {0} --force' to force the removal of {0}".format(site) | |||
] | |||
click.echo("\n".join(messages)) | |||
sys.exit(1) | |||
drop_user_and_database(frappe.conf.db_name, root_login, root_password) | |||
@@ -734,5 +731,6 @@ commands = [ | |||
stop_recording, | |||
add_to_hosts, | |||
start_ngrok, | |||
build_search_index | |||
build_search_index, | |||
partial_restore | |||
] |
@@ -3,7 +3,6 @@ import frappe | |||
class DbManager: | |||
def __init__(self, db): | |||
""" | |||
Pass root_conn here for access to all databases. | |||
@@ -66,10 +65,10 @@ class DbManager: | |||
esc = make_esc('$ ') | |||
from distutils.spawn import find_executable | |||
pipe = find_executable('pv') | |||
if pipe: | |||
pipe = '{pipe} {source} |'.format( | |||
pipe=pipe, | |||
pv = find_executable('pv') | |||
if pv: | |||
pipe = '{pv} {source} |'.format( | |||
pv=pv, | |||
source=source | |||
) | |||
source = '' | |||
@@ -78,7 +77,7 @@ class DbManager: | |||
source = '< {source}'.format(source=source) | |||
if pipe: | |||
print('Creating Database...') | |||
print('Restoring Database file...') | |||
command = '{pipe} mysql -u {user} -p{password} -h{host} ' + ('-P{port}' if frappe.db.port else '') + ' {target} {source}' | |||
command = command.format( | |||
@@ -1,7 +1,7 @@ | |||
from __future__ import unicode_literals | |||
import frappe | |||
import os, sys | |||
import os | |||
from frappe.database.db_manager import DbManager | |||
expected_settings_10_2_earlier = { | |||
@@ -86,6 +86,8 @@ def drop_user_and_database(db_name, root_login, root_password): | |||
dbman.drop_database(db_name) | |||
def bootstrap_database(db_name, verbose, source_sql=None): | |||
import sys | |||
frappe.connect(db_name=db_name) | |||
if not check_database_settings(): | |||
print('Database settings do not match expected values; stopping database setup.') | |||
@@ -94,9 +96,17 @@ def bootstrap_database(db_name, verbose, source_sql=None): | |||
import_db_from_sql(source_sql, verbose) | |||
frappe.connect(db_name=db_name) | |||
if not 'tabDefaultValue' in frappe.db.get_tables(): | |||
print('''Database not installed, this can due to lack of permission, or that the database name exists. | |||
Check your mysql root password, or use --force to reinstall''') | |||
if 'tabDefaultValue' not in frappe.db.get_tables(): | |||
from click import secho | |||
secho( | |||
"Table 'tabDefaultValue' missing in the restored site. " | |||
"Database not installed correctly, this can due to lack of " | |||
"permission, or that the database name exists. Check your mysql" | |||
" root password, validity of the backup file or use --force to" | |||
" reinstall", | |||
fg="red" | |||
) | |||
sys.exit(1) | |||
def import_db_from_sql(source_sql=None, verbose=False): | |||
@@ -1,5 +1,7 @@ | |||
import frappe, subprocess, os | |||
from six.moves import input | |||
import os | |||
import frappe | |||
def setup_database(force, source_sql=None, verbose=False): | |||
root_conn = get_root_connection() | |||
@@ -10,24 +12,62 @@ def setup_database(force, source_sql=None, verbose=False): | |||
root_conn.sql("CREATE user {0} password '{1}'".format(frappe.conf.db_name, | |||
frappe.conf.db_password)) | |||
root_conn.sql("GRANT ALL PRIVILEGES ON DATABASE `{0}` TO {0}".format(frappe.conf.db_name)) | |||
root_conn.close() | |||
bootstrap_database(frappe.conf.db_name, verbose, source_sql=source_sql) | |||
frappe.connect() | |||
def bootstrap_database(db_name, verbose, source_sql=None): | |||
frappe.connect(db_name=db_name) | |||
import_db_from_sql(source_sql, verbose) | |||
frappe.connect(db_name=db_name) | |||
if 'tabDefaultValue' not in frappe.db.get_tables(): | |||
import sys | |||
from click import secho | |||
secho( | |||
"Table 'tabDefaultValue' missing in the restored site. " | |||
"This may be due to incorrect permissions or the result of a restore from a bad backup file. " | |||
"Database not installed correctly.", | |||
fg="red" | |||
) | |||
sys.exit(1) | |||
def import_db_from_sql(source_sql=None, verbose=False): | |||
from shutil import which | |||
from subprocess import run, PIPE | |||
# we can't pass psql password in arguments in postgresql as mysql. So | |||
# set password connection parameter in environment variable | |||
subprocess_env = os.environ.copy() | |||
subprocess_env['PGPASSWORD'] = str(frappe.conf.db_password) | |||
# bootstrap db | |||
if not source_sql: | |||
source_sql = os.path.join(os.path.dirname(__file__), 'framework_postgres.sql') | |||
subprocess.check_output([ | |||
'psql', frappe.conf.db_name, | |||
'-h', frappe.conf.db_host or 'localhost', | |||
'-p', str(frappe.conf.db_port or '5432'), | |||
'-U', frappe.conf.db_name, | |||
'-f', source_sql | |||
], env=subprocess_env) | |||
pv = which('pv') | |||
frappe.connect() | |||
_command = ( | |||
f"psql {frappe.conf.db_name} " | |||
f"-h {frappe.conf.db_host or 'localhost'} -p {str(frappe.conf.db_port or '5432')} " | |||
f"-U {frappe.conf.db_name}" | |||
) | |||
if pv: | |||
command = f"{pv} {source_sql} | " + _command | |||
else: | |||
command = _command + f" -f {source_sql}" | |||
print("Restoring Database file...") | |||
if verbose: | |||
print(command) | |||
restore_proc = run(command, env=subprocess_env, shell=True, stdout=PIPE) | |||
if verbose: | |||
print(f"\nSTDOUT by psql:\n{restore_proc.stdout.decode()}\nImported from Database File: {source_sql}") | |||
def setup_help_database(help_db_name): | |||
root_conn = get_root_connection() | |||
@@ -38,19 +78,20 @@ def setup_help_database(help_db_name): | |||
root_conn.sql("GRANT ALL PRIVILEGES ON DATABASE `{0}` TO {0}".format(help_db_name)) | |||
def get_root_connection(root_login=None, root_password=None): | |||
import getpass | |||
if not frappe.local.flags.root_connection: | |||
if not root_login: | |||
root_login = frappe.conf.get("root_login") or None | |||
if not root_login: | |||
from six.moves import input | |||
root_login = input("Enter postgres super user: ") | |||
if not root_password: | |||
root_password = frappe.conf.get("root_password") or None | |||
if not root_password: | |||
root_password = getpass.getpass("Postgres super user password: ") | |||
from getpass import getpass | |||
root_password = getpass("Postgres super user password: ") | |||
frappe.local.flags.root_connection = frappe.database.get_db(user=root_login, password=root_password) | |||
@@ -111,4 +111,5 @@ class AttachmentLimitReached(Exception): pass | |||
class InvalidAuthorizationHeader(CSRFTokenError): pass | |||
class InvalidAuthorizationPrefix(CSRFTokenError): pass | |||
class InvalidAuthorizationToken(CSRFTokenError): pass | |||
class InvalidDatabaseFile(ValidationError): pass | |||
class InvalidDatabaseFile(ValidationError): pass | |||
class ExecutableNotFound(FileNotFoundError): pass |
@@ -3,8 +3,90 @@ | |||
import json | |||
import os | |||
from frappe.defaults import _clear_cache | |||
import sys | |||
import frappe | |||
from frappe.defaults import _clear_cache | |||
def _new_site( | |||
db_name, | |||
site, | |||
mariadb_root_username=None, | |||
mariadb_root_password=None, | |||
admin_password=None, | |||
verbose=False, | |||
install_apps=None, | |||
source_sql=None, | |||
force=False, | |||
no_mariadb_socket=False, | |||
reinstall=False, | |||
db_password=None, | |||
db_type=None, | |||
db_host=None, | |||
db_port=None, | |||
new_site=False, | |||
): | |||
"""Install a new Frappe site""" | |||
if not force and os.path.exists(site): | |||
print("Site {0} already exists".format(site)) | |||
sys.exit(1) | |||
if no_mariadb_socket and not db_type == "mariadb": | |||
print("--no-mariadb-socket requires db_type to be set to mariadb.") | |||
sys.exit(1) | |||
if not db_name: | |||
import hashlib | |||
db_name = "_" + hashlib.sha1(site.encode()).hexdigest()[:16] | |||
frappe.init(site=site) | |||
from frappe.commands.scheduler import _is_scheduler_enabled | |||
from frappe.utils import get_site_path, scheduler, touch_file | |||
try: | |||
# enable scheduler post install? | |||
enable_scheduler = _is_scheduler_enabled() | |||
except Exception: | |||
enable_scheduler = False | |||
make_site_dirs() | |||
installing = touch_file(get_site_path("locks", "installing.lock")) | |||
install_db( | |||
root_login=mariadb_root_username, | |||
root_password=mariadb_root_password, | |||
db_name=db_name, | |||
admin_password=admin_password, | |||
verbose=verbose, | |||
source_sql=source_sql, | |||
force=force, | |||
reinstall=reinstall, | |||
db_password=db_password, | |||
db_type=db_type, | |||
db_host=db_host, | |||
db_port=db_port, | |||
no_mariadb_socket=no_mariadb_socket, | |||
) | |||
apps_to_install = ( | |||
["frappe"] + (frappe.conf.get("install_apps") or []) + (list(install_apps) or []) | |||
) | |||
for app in apps_to_install: | |||
install_app(app, verbose=verbose, set_as_patched=not source_sql) | |||
os.remove(installing) | |||
scheduler.toggle_scheduler(enable_scheduler) | |||
frappe.db.commit() | |||
scheduler_status = ( | |||
"disabled" if frappe.utils.scheduler.is_scheduler_disabled() else "enabled" | |||
) | |||
print("*** Scheduler is", scheduler_status, "***") | |||
def install_db(root_login="root", root_password=None, db_name=None, source_sql=None, | |||
@@ -36,9 +118,9 @@ def install_db(root_login="root", root_password=None, db_name=None, source_sql=N | |||
def install_app(name, verbose=False, set_as_patched=True): | |||
from frappe.core.doctype.scheduled_job_type.scheduled_job_type import sync_jobs | |||
from frappe.utils.fixtures import sync_fixtures | |||
from frappe.model.sync import sync_for | |||
from frappe.modules.utils import sync_customizations | |||
from frappe.utils.fixtures import sync_fixtures | |||
frappe.flags.in_install = name | |||
frappe.flags.ignore_in_install = False | |||
@@ -347,6 +429,37 @@ def remove_missing_apps(): | |||
frappe.db.set_global("installed_apps", json.dumps(installed_apps)) | |||
def extract_sql_from_archive(sql_file_path): | |||
"""Return the path of an SQL file if the passed argument is the path of a gzipped | |||
SQL file or an SQL file path. The path may be absolute or relative from the bench | |||
root directory or the sites sub-directory. | |||
Args: | |||
sql_file_path (str): Path of the SQL file | |||
Returns: | |||
str: Path of the decompressed SQL file | |||
""" | |||
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file | |||
if not os.path.exists(sql_file_path): | |||
base_path = '..' | |||
sql_file_path = os.path.join(base_path, sql_file_path) | |||
if not os.path.exists(sql_file_path): | |||
print('Invalid path {0}'.format(sql_file_path[3:])) | |||
sys.exit(1) | |||
elif sql_file_path.startswith(os.sep): | |||
base_path = os.sep | |||
else: | |||
base_path = '.' | |||
if sql_file_path.endswith('sql.gz'): | |||
decompressed_file_name = extract_sql_gzip(os.path.abspath(sql_file_path)) | |||
else: | |||
decompressed_file_name = sql_file_path | |||
return decompressed_file_name | |||
def extract_sql_gzip(sql_gz_path): | |||
import subprocess | |||
@@ -361,9 +474,10 @@ def extract_sql_gzip(sql_gz_path): | |||
return decompressed_file | |||
def extract_files(site_name, file_path, folder_name): | |||
import subprocess | |||
import shutil | |||
import subprocess | |||
# Need to do frappe.init to maintain the site locals | |||
frappe.init(site=site_name) | |||
@@ -391,6 +505,12 @@ def extract_files(site_name, file_path, folder_name): | |||
def is_downgrade(sql_file_path, verbose=False): | |||
"""checks if input db backup will get downgraded on current bench""" | |||
# This function is only tested with mariadb | |||
# TODO: Add postgres support | |||
if frappe.conf.db_type not in (None, "mariadb"): | |||
return False | |||
from semantic_version import Version | |||
head = "INSERT INTO `tabInstalled Application` VALUES" | |||
@@ -424,6 +544,37 @@ def is_downgrade(sql_file_path, verbose=False): | |||
return downgrade | |||
def is_partial(sql_file_path): | |||
with open(sql_file_path) as f: | |||
header = " ".join([f.readline() for _ in range(5)]) | |||
if "Partial Backup" in header: | |||
return True | |||
return False | |||
def partial_restore(sql_file_path, verbose=False): | |||
sql_file = extract_sql_from_archive(sql_file_path) | |||
if frappe.conf.db_type in (None, "mariadb"): | |||
from frappe.database.mariadb.setup_db import import_db_from_sql | |||
elif frappe.conf.db_type == "postgres": | |||
from frappe.database.postgres.setup_db import import_db_from_sql | |||
import warnings | |||
from click import style | |||
warn = style( | |||
"Delete the tables you want to restore manually before attempting" | |||
" partial restore operation for PostreSQL databases", | |||
fg="yellow" | |||
) | |||
warnings.warn(warn) | |||
import_db_from_sql(source_sql=sql_file, verbose=verbose) | |||
# Removing temporarily created file | |||
if sql_file != sql_file_path: | |||
os.remove(sql_file) | |||
def validate_database_sql(path, _raise=True): | |||
"""Check if file has contents and if DefaultValue table exists | |||
@@ -6,7 +6,7 @@ import frappe | |||
def frappecloud_migrator(local_site): | |||
print("Retreiving Site Migrator...") | |||
print("Retrieving Site Migrator...") | |||
remote_site = frappe.conf.frappecloud_url or "frappecloud.com" | |||
request_url = "https://{}/api/method/press.api.script".format(remote_site) | |||
request = requests.get(request_url) | |||
@@ -1,24 +1,88 @@ | |||
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors | |||
# imports - standard imports | |||
import gzip | |||
import json | |||
import os | |||
import shlex | |||
import subprocess | |||
import sys | |||
import unittest | |||
from glob import glob | |||
import glob | |||
# imports - module imports | |||
import frappe | |||
from frappe.utils.backups import fetch_latest_backups | |||
import frappe.recorder | |||
from frappe.installer import add_to_installed_apps | |||
from frappe.utils import add_to_date, now | |||
from frappe.utils.backups import fetch_latest_backups | |||
# TODO: check frappe.cli.coloured_output to set coloured output! | |||
def supports_color(): | |||
""" | |||
Returns True if the running system's terminal supports color, and False | |||
otherwise. | |||
""" | |||
plat = sys.platform | |||
supported_platform = plat != 'Pocket PC' and (plat != 'win32' or 'ANSICON' in os.environ) | |||
# isatty is not always implemented, #6223. | |||
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty() | |||
return supported_platform and is_a_tty | |||
class color(dict): | |||
nc = "\033[0m" | |||
blue = "\033[94m" | |||
green = "\033[92m" | |||
yellow = "\033[93m" | |||
red = "\033[91m" | |||
silver = "\033[90m" | |||
def __getattr__(self, key): | |||
if supports_color(): | |||
ret = self.get(key) | |||
else: | |||
ret = "" | |||
return ret | |||
def clean(value): | |||
if isinstance(value, (bytes, str)): | |||
value = value.decode().strip() | |||
"""Strips and converts bytes to str | |||
Args: | |||
value ([type]): [description] | |||
Returns: | |||
[type]: [description] | |||
""" | |||
if isinstance(value, bytes): | |||
value = value.decode() | |||
if isinstance(value, str): | |||
value = value.strip() | |||
return value | |||
def exists_in_backup(doctypes, file): | |||
"""Checks if the list of doctypes exist in the database.sql.gz file supplied | |||
Args: | |||
doctypes (list): List of DocTypes to be checked | |||
file (str): Path of the database file | |||
Returns: | |||
bool: True if all tables exist | |||
""" | |||
predicate = ( | |||
'COPY public."tab{}"' | |||
if frappe.conf.db_type == "postgres" | |||
else "CREATE TABLE `tab{}`" | |||
) | |||
with gzip.open(file, "rb") as f: | |||
content = f.read().decode("utf8") | |||
return all([predicate.format(doctype).lower() in content.lower() for doctype in doctypes]) | |||
class BaseTestCommands(unittest.TestCase): | |||
def execute(self, command, kwargs=None): | |||
site = {"site": frappe.local.site} | |||
@@ -26,13 +90,26 @@ class BaseTestCommands(unittest.TestCase): | |||
kwargs.update(site) | |||
else: | |||
kwargs = site | |||
command = command.replace("\n", " ").format(**kwargs) | |||
command = shlex.split(command) | |||
self.command = " ".join(command.split()).format(**kwargs) | |||
print("{0}$ {1}{2}".format(color.silver, self.command, color.nc)) | |||
command = shlex.split(self.command) | |||
self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | |||
self.stdout = clean(self._proc.stdout) | |||
self.stderr = clean(self._proc.stderr) | |||
self.returncode = clean(self._proc.returncode) | |||
def _formatMessage(self, msg, standardMsg): | |||
output = super(BaseTestCommands, self)._formatMessage(msg, standardMsg) | |||
cmd_execution_summary = "\n".join([ | |||
"-" * 70, | |||
"Last Command Execution Summary:", | |||
"Command: {}".format(self.command) if self.command else "", | |||
"Standard Output: {}".format(self.stdout) if self.stdout else "", | |||
"Standard Error: {}".format(self.stderr) if self.stderr else "", | |||
"Return Code: {}".format(self.returncode) if self.returncode else "", | |||
]).strip() | |||
return "{}\n\n{}".format(output, cmd_execution_summary) | |||
class TestCommands(BaseTestCommands): | |||
def test_execute(self): | |||
@@ -52,9 +129,24 @@ class TestCommands(BaseTestCommands): | |||
# The returned value has quotes which have been trimmed for the test | |||
self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""") | |||
self.assertEquals(self.returncode, 0) | |||
self.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType')) | |||
self.assertEquals(self.stdout[1:-1], frappe.bold(text="DocType")) | |||
def test_backup(self): | |||
backup = { | |||
"includes": { | |||
"includes": [ | |||
"ToDo", | |||
"Note", | |||
] | |||
}, | |||
"excludes": { | |||
"excludes": [ | |||
"Activity Log", | |||
"Access Log", | |||
"Error Log" | |||
] | |||
} | |||
} | |||
home = os.path.expanduser("~") | |||
site_backup_path = frappe.utils.get_site_path("private", "backups") | |||
@@ -94,16 +186,19 @@ class TestCommands(BaseTestCommands): | |||
"db_path": "database.sql.gz", | |||
"files_path": "public.tar", | |||
"private_path": "private.tar", | |||
"conf_path": "config.json" | |||
"conf_path": "config.json", | |||
}.items() | |||
} | |||
self.execute("""bench | |||
self.execute( | |||
"""bench | |||
--site {site} backup --with-files | |||
--backup-path-db {db_path} | |||
--backup-path-files {files_path} | |||
--backup-path-private-files {private_path} | |||
--backup-path-conf {conf_path}""", kwargs) | |||
--backup-path-conf {conf_path}""", | |||
kwargs, | |||
) | |||
self.assertEquals(self.returncode, 0) | |||
for path in kwargs.values(): | |||
@@ -111,16 +206,122 @@ class TestCommands(BaseTestCommands): | |||
# test 5: take a backup with --compress | |||
self.execute("bench --site {site} backup --with-files --compress") | |||
self.assertEquals(self.returncode, 0) | |||
compressed_files = glob(site_backup_path + "/*.tgz") | |||
compressed_files = glob.glob(site_backup_path + "/*.tgz") | |||
self.assertGreater(len(compressed_files), 0) | |||
# test 6: take a backup with --verbose | |||
self.execute("bench --site {site} backup --verbose") | |||
self.assertEquals(self.returncode, 0) | |||
# test 7: take a backup with frappe.conf.backup.includes | |||
self.execute( | |||
"bench --site {site} set-config backup '{includes}' --as-dict", | |||
{"includes": json.dumps(backup["includes"])}, | |||
) | |||
self.execute("bench --site {site} backup --verbose") | |||
self.assertEquals(self.returncode, 0) | |||
database = fetch_latest_backups(partial=True)["database"] | |||
self.assertTrue(exists_in_backup(backup["includes"]["includes"], database)) | |||
# test 8: take a backup with frappe.conf.backup.excludes | |||
self.execute( | |||
"bench --site {site} set-config backup '{excludes}' --as-dict", | |||
{"excludes": json.dumps(backup["excludes"])}, | |||
) | |||
self.execute("bench --site {site} backup --verbose") | |||
self.assertEquals(self.returncode, 0) | |||
database = fetch_latest_backups(partial=True)["database"] | |||
self.assertFalse(exists_in_backup(backup["excludes"]["excludes"], database)) | |||
self.assertTrue(exists_in_backup(backup["includes"]["includes"], database)) | |||
# test 9: take a backup with --include (with frappe.conf.excludes still set) | |||
self.execute( | |||
"bench --site {site} backup --include '{include}'", | |||
{"include": ",".join(backup["includes"]["includes"])}, | |||
) | |||
self.assertEquals(self.returncode, 0) | |||
database = fetch_latest_backups(partial=True)["database"] | |||
self.assertTrue(exists_in_backup(backup["includes"]["includes"], database)) | |||
# test 10: take a backup with --exclude | |||
self.execute( | |||
"bench --site {site} backup --exclude '{exclude}'", | |||
{"exclude": ",".join(backup["excludes"]["excludes"])}, | |||
) | |||
self.assertEquals(self.returncode, 0) | |||
database = fetch_latest_backups(partial=True)["database"] | |||
self.assertFalse(exists_in_backup(backup["excludes"]["excludes"], database)) | |||
# test 11: take a backup with --ignore-backup-conf | |||
self.execute("bench --site {site} backup --ignore-backup-conf") | |||
self.assertEquals(self.returncode, 0) | |||
database = fetch_latest_backups()["database"] | |||
self.assertTrue(exists_in_backup(backup["excludes"]["excludes"], database)) | |||
def test_restore(self): | |||
# step 0: create a site to run the test on | |||
global_config = { | |||
"admin_password": frappe.conf.admin_password, | |||
"root_login": frappe.conf.root_login, | |||
"root_password": frappe.conf.root_password, | |||
"db_type": frappe.conf.db_type, | |||
} | |||
site_data = {"another_site": f"{frappe.local.site}-restore.test", **global_config} | |||
for key, value in global_config.items(): | |||
if value: | |||
self.execute(f"bench set-config {key} {value} -g") | |||
self.execute( | |||
"bench new-site {another_site} --admin-password {admin_password} --db-type" | |||
" {db_type}", | |||
site_data, | |||
) | |||
# test 1: bench restore from full backup | |||
self.execute("bench --site {another_site} backup --ignore-backup-conf", site_data) | |||
self.execute( | |||
"bench --site {another_site} execute frappe.utils.backups.fetch_latest_backups", | |||
site_data, | |||
) | |||
site_data.update({"database": json.loads(self.stdout)["database"]}) | |||
self.execute("bench --site {another_site} restore {database}", site_data) | |||
# test 2: restore from partial backup | |||
self.execute("bench --site {another_site} backup --exclude 'ToDo'", site_data) | |||
site_data.update({"kw": "\"{'partial':True}\""}) | |||
self.execute( | |||
"bench --site {another_site} execute" | |||
" frappe.utils.backups.fetch_latest_backups --kwargs {kw}", | |||
site_data, | |||
) | |||
site_data.update({"database": json.loads(self.stdout)["database"]}) | |||
self.execute("bench --site {another_site} restore {database}", site_data) | |||
self.assertEquals(self.returncode, 1) | |||
def test_partial_restore(self): | |||
_now = now() | |||
for num in range(10): | |||
frappe.get_doc({ | |||
"doctype": "ToDo", | |||
"date": add_to_date(_now, days=num), | |||
"description": frappe.mock("paragraph") | |||
}).insert() | |||
frappe.db.commit() | |||
todo_count = frappe.db.count("ToDo") | |||
# check if todos exist, create a partial backup and see if the state is the same after restore | |||
self.assertIsNot(todo_count, 0) | |||
self.execute("bench --site {site} backup --only 'ToDo'") | |||
db_path = fetch_latest_backups(partial=True)["database"] | |||
self.assertTrue("partial" in db_path) | |||
frappe.db.sql_ddl("DROP TABLE IF EXISTS `tabToDo`") | |||
frappe.db.commit() | |||
self.execute("bench --site {site} partial-restore {path}", {"path": db_path}) | |||
self.assertEquals(self.returncode, 0) | |||
self.assertEquals(frappe.db.count("ToDo"), todo_count) | |||
def test_recorder(self): | |||
frappe.recorder.stop() | |||
@@ -133,7 +334,6 @@ class TestCommands(BaseTestCommands): | |||
self.assertEqual(frappe.recorder.status(), False) | |||
def test_remove_from_installed_apps(self): | |||
from frappe.installer import add_to_installed_apps | |||
app = "test_remove_app" | |||
add_to_installed_apps(app) | |||
@@ -2,11 +2,12 @@ | |||
# MIT License. See license.txt | |||
# imports - standard imports | |||
import json | |||
import gzip | |||
import os | |||
from calendar import timegm | |||
from datetime import datetime | |||
from glob import glob | |||
from shutil import which | |||
# imports - third party imports | |||
import click | |||
@@ -14,24 +15,42 @@ import click | |||
# imports - module imports | |||
import frappe | |||
from frappe import _, conf | |||
from frappe.utils import get_url, now, now_datetime, get_file_size | |||
from frappe.utils import get_file_size, get_url, now, now_datetime | |||
# backup variable for backwards compatibility | |||
verbose = False | |||
compress = False | |||
_verbose = verbose | |||
base_tables = ["__Auth", "__global_search", "__UserSettings"] | |||
class BackupGenerator: | |||
""" | |||
This class contains methods to perform On Demand Backup | |||
This class contains methods to perform On Demand Backup | |||
To initialize, specify (db_name, user, password, db_file_name=None, db_host="localhost") | |||
If specifying db_file_name, also append ".sql.gz" | |||
To initialize, specify (db_name, user, password, db_file_name=None, db_host="localhost") | |||
If specifying db_file_name, also append ".sql.gz" | |||
""" | |||
def __init__(self, db_name, user, password, backup_path=None, backup_path_db=None, | |||
backup_path_files=None, backup_path_private_files=None, db_host="localhost", db_port=None, | |||
verbose=False, db_type='mariadb', backup_path_conf=None, compress_files=False): | |||
def __init__( | |||
self, | |||
db_name, | |||
user, | |||
password, | |||
backup_path=None, | |||
backup_path_db=None, | |||
backup_path_files=None, | |||
backup_path_private_files=None, | |||
db_host="localhost", | |||
db_port=None, | |||
db_type="mariadb", | |||
backup_path_conf=None, | |||
ignore_conf=False, | |||
compress_files=False, | |||
include_doctypes="", | |||
exclude_doctypes="", | |||
verbose=False, | |||
): | |||
global _verbose | |||
self.compress_files = compress_files or compress | |||
self.db_host = db_host | |||
@@ -45,23 +64,35 @@ class BackupGenerator: | |||
self.backup_path_db = backup_path_db | |||
self.backup_path_files = backup_path_files | |||
self.backup_path_private_files = backup_path_private_files | |||
self.ignore_conf = ignore_conf | |||
self.include_doctypes = include_doctypes | |||
self.exclude_doctypes = exclude_doctypes | |||
self.partial = False | |||
if not self.db_type: | |||
self.db_type = 'mariadb' | |||
self.db_type = "mariadb" | |||
if not self.db_port and self.db_type == 'mariadb': | |||
self.db_port = 3306 | |||
elif not self.db_port and self.db_type == 'postgres': | |||
self.db_port = 5432 | |||
if not self.db_port: | |||
if self.db_type == "mariadb": | |||
self.db_port = 3306 | |||
if self.db_type == "postgres": | |||
self.db_port = 5432 | |||
site = frappe.local.site or frappe.generate_hash(length=8) | |||
self.site_slug = site.replace('.', '_') | |||
self.site_slug = site.replace(".", "_") | |||
self.verbose = verbose | |||
self.setup_backup_directory() | |||
self.setup_backup_tables() | |||
_verbose = verbose | |||
def setup_backup_directory(self): | |||
specified = self.backup_path or self.backup_path_db or self.backup_path_files or self.backup_path_private_files or self.backup_path_conf | |||
specified = ( | |||
self.backup_path | |||
or self.backup_path_db | |||
or self.backup_path_files | |||
or self.backup_path_private_files | |||
or self.backup_path_conf | |||
) | |||
if not specified: | |||
backups_folder = get_backup_path() | |||
@@ -71,32 +102,93 @@ class BackupGenerator: | |||
if self.backup_path: | |||
os.makedirs(self.backup_path, exist_ok=True) | |||
for file_path in set([self.backup_path_files, self.backup_path_db, self.backup_path_private_files, self.backup_path_conf]): | |||
for file_path in set( | |||
[ | |||
self.backup_path_files, | |||
self.backup_path_db, | |||
self.backup_path_private_files, | |||
self.backup_path_conf, | |||
] | |||
): | |||
if file_path: | |||
dir = os.path.dirname(file_path) | |||
os.makedirs(dir, exist_ok=True) | |||
def setup_backup_tables(self): | |||
"""Sets self.backup_includes, self.backup_excludes based on passed args""" | |||
existing_doctypes = set([x.name for x in frappe.get_all("DocType")]) | |||
def get_tables(doctypes): | |||
tables = [] | |||
for doctype in doctypes: | |||
if doctype and doctype in existing_doctypes: | |||
if doctype.startswith("tab"): | |||
tables.append(doctype) | |||
else: | |||
tables.append("tab" + doctype) | |||
return tables | |||
passed_tables = { | |||
"include": get_tables(self.include_doctypes.strip().split(",")), | |||
"exclude": get_tables(self.exclude_doctypes.strip().split(",")), | |||
} | |||
specified_tables = get_tables(frappe.conf.get("backup", {}).get("includes", [])) | |||
include_tables = (specified_tables + base_tables) if specified_tables else [] | |||
conf_tables = { | |||
"include": include_tables, | |||
"exclude": get_tables(frappe.conf.get("backup", {}).get("excludes", [])), | |||
} | |||
self.backup_includes = passed_tables["include"] | |||
self.backup_excludes = passed_tables["exclude"] | |||
if not (self.backup_includes or self.backup_excludes) and not self.ignore_conf: | |||
self.backup_includes = self.backup_includes or conf_tables["include"] | |||
self.backup_excludes = self.backup_excludes or conf_tables["exclude"] | |||
self.partial = (self.backup_includes or self.backup_excludes) and not self.ignore_conf | |||
@property | |||
def site_config_backup_path(self): | |||
# For backwards compatibility | |||
click.secho("BackupGenerator.site_config_backup_path has been deprecated in favour of BackupGenerator.backup_path_conf", fg="yellow") | |||
click.secho( | |||
"BackupGenerator.site_config_backup_path has been deprecated in favour of" | |||
" BackupGenerator.backup_path_conf", | |||
fg="yellow", | |||
) | |||
return getattr(self, "backup_path_conf", None) | |||
def get_backup(self, older_than=24, ignore_files=False, force=False): | |||
""" | |||
Takes a new dump if existing file is old | |||
and sends the link to the file as email | |||
Takes a new dump if existing file is old | |||
and sends the link to the file as email | |||
""" | |||
#Check if file exists and is less than a day old | |||
#If not Take Dump | |||
# Check if file exists and is less than a day old | |||
# If not Take Dump | |||
if not force: | |||
last_db, last_file, last_private_file, site_config_backup_path = self.get_recent_backup(older_than) | |||
( | |||
last_db, | |||
last_file, | |||
last_private_file, | |||
site_config_backup_path, | |||
) = self.get_recent_backup(older_than) | |||
else: | |||
last_db, last_file, last_private_file, site_config_backup_path = False, False, False, False | |||
last_db, last_file, last_private_file, site_config_backup_path = ( | |||
False, | |||
False, | |||
False, | |||
False, | |||
) | |||
self.todays_date = now_datetime().strftime('%Y%m%d_%H%M%S') | |||
self.todays_date = now_datetime().strftime("%Y%m%d_%H%M%S") | |||
if not (self.backup_path_conf and self.backup_path_db and self.backup_path_files and self.backup_path_private_files): | |||
if not ( | |||
self.backup_path_conf | |||
and self.backup_path_db | |||
and self.backup_path_files | |||
and self.backup_path_private_files | |||
): | |||
self.set_backup_file_name() | |||
if not (last_db and last_file and last_private_file and site_config_backup_path): | |||
@@ -112,13 +204,13 @@ class BackupGenerator: | |||
self.backup_path_conf = site_config_backup_path | |||
def set_backup_file_name(self): | |||
#Generate a random name using today's date and a 8 digit random number | |||
for_conf = self.todays_date + "-" + self.site_slug + "-site_config_backup.json" | |||
for_db = self.todays_date + "-" + self.site_slug + "-database.sql.gz" | |||
partial = "-partial" if self.partial else "" | |||
ext = "tgz" if self.compress_files else "tar" | |||
for_public_files = self.todays_date + "-" + self.site_slug + "-files." + ext | |||
for_private_files = self.todays_date + "-" + self.site_slug + "-private-files." + ext | |||
for_conf = f"{self.todays_date}-{self.site_slug}-site_config_backup.json" | |||
for_db = f"{self.todays_date}-{self.site_slug}{partial}-database.sql.gz" | |||
for_public_files = f"{self.todays_date}-{self.site_slug}-files.{ext}" | |||
for_private_files = f"{self.todays_date}-{self.site_slug}-private-files.{ext}" | |||
backup_path = self.backup_path or get_backup_path() | |||
if not self.backup_path_conf: | |||
@@ -130,11 +222,11 @@ class BackupGenerator: | |||
if not self.backup_path_private_files: | |||
self.backup_path_private_files = os.path.join(backup_path, for_private_files) | |||
def get_recent_backup(self, older_than): | |||
def get_recent_backup(self, older_than, partial=False): | |||
backup_path = get_backup_path() | |||
file_type_slugs = { | |||
"database": "*-{}-database.sql.gz", | |||
"database": "*-{{}}-{}database.sql.gz".format('*' if partial else ''), | |||
"public": "*-{}-files.tar", | |||
"private": "*-{}-private-files.tar", | |||
"config": "*-{}-site_config_backup.json", | |||
@@ -158,8 +250,7 @@ class BackupGenerator: | |||
return file_path | |||
latest_backups = { | |||
file_type: get_latest(pattern) | |||
for file_type, pattern in file_type_slugs.items() | |||
file_type: get_latest(pattern) for file_type, pattern in file_type_slugs.items() | |||
} | |||
recent_backups = { | |||
@@ -175,32 +266,40 @@ class BackupGenerator: | |||
def zip_files(self): | |||
# For backwards compatibility - pre v13 | |||
click.secho("BackupGenerator.zip_files has been deprecated in favour of BackupGenerator.backup_files", fg="yellow") | |||
click.secho( | |||
"BackupGenerator.zip_files has been deprecated in favour of" | |||
" BackupGenerator.backup_files", | |||
fg="yellow", | |||
) | |||
return self.backup_files() | |||
def get_summary(self): | |||
summary = { | |||
"config": { | |||
"path": self.backup_path_conf, | |||
"size": get_file_size(self.backup_path_conf, format=True) | |||
"size": get_file_size(self.backup_path_conf, format=True), | |||
}, | |||
"database": { | |||
"path": self.backup_path_db, | |||
"size": get_file_size(self.backup_path_db, format=True) | |||
} | |||
"size": get_file_size(self.backup_path_db, format=True), | |||
}, | |||
} | |||
if os.path.exists(self.backup_path_files) and os.path.exists(self.backup_path_private_files): | |||
summary.update({ | |||
"public": { | |||
"path": self.backup_path_files, | |||
"size": get_file_size(self.backup_path_files, format=True) | |||
}, | |||
"private": { | |||
"path": self.backup_path_private_files, | |||
"size": get_file_size(self.backup_path_private_files, format=True) | |||
if os.path.exists(self.backup_path_files) and os.path.exists( | |||
self.backup_path_private_files | |||
): | |||
summary.update( | |||
{ | |||
"public": { | |||
"path": self.backup_path_files, | |||
"size": get_file_size(self.backup_path_files, format=True), | |||
}, | |||
"private": { | |||
"path": self.backup_path_private_files, | |||
"size": get_file_size(self.backup_path_private_files, format=True), | |||
}, | |||
} | |||
}) | |||
) | |||
return summary | |||
@@ -208,21 +307,29 @@ class BackupGenerator: | |||
backup_summary = self.get_summary() | |||
print("Backup Summary for {0} at {1}".format(frappe.local.site, now())) | |||
title = max([len(x) for x in backup_summary]) | |||
path = max([len(x["path"]) for x in backup_summary.values()]) | |||
for _type, info in backup_summary.items(): | |||
print("{0:8}: {1:85} {2}".format(_type.title(), info["path"], info["size"])) | |||
template = "{{0:{0}}}: {{1:{1}}} {{2}}".format(title, path) | |||
print(template.format(_type.title(), info["path"], info["size"])) | |||
def backup_files(self): | |||
import subprocess | |||
for folder in ("public", "private"): | |||
files_path = frappe.get_site_path(folder, "files") | |||
backup_path = self.backup_path_files if folder=="public" else self.backup_path_private_files | |||
backup_path = ( | |||
self.backup_path_files if folder == "public" else self.backup_path_private_files | |||
) | |||
if self.compress_files: | |||
cmd_string = "tar cf - {1} | gzip > {0}" | |||
else: | |||
cmd_string = "tar -cf {0} {1}" | |||
output = subprocess.check_output(cmd_string.format(backup_path, files_path), shell=True) | |||
output = subprocess.check_output( | |||
cmd_string.format(backup_path, files_path), shell=True | |||
) | |||
if self.verbose and output: | |||
print(output.decode("utf8")) | |||
@@ -236,34 +343,114 @@ class BackupGenerator: | |||
def take_dump(self): | |||
import frappe.utils | |||
from frappe.utils.change_log import get_app_branch | |||
db_exc = { | |||
"mariadb": ("mysqldump", which("mysqldump")), | |||
"postgres": ("pg_dump", which("pg_dump")), | |||
}[self.db_type] | |||
gzip_exc = which("gzip") | |||
if not (gzip_exc and db_exc[1]): | |||
_exc = "gzip" if not gzip_exc else db_exc[0] | |||
frappe.throw( | |||
f"{_exc} not found in PATH! This is required to take a backup.", | |||
exc=frappe.ExecutableNotFound | |||
) | |||
db_exc = db_exc[0] | |||
database_header_content = [ | |||
f"Backup generated by Frappe {frappe.__version__} on branch {get_app_branch('frappe') or 'N/A'}", | |||
"", | |||
] | |||
# escape reserved characters | |||
args = dict([item[0], frappe.utils.esc(str(item[1]), '$ ')] | |||
for item in self.__dict__.copy().items()) | |||
cmd_string = """mysqldump --single-transaction --quick --lock-tables=false -u %(user)s -p%(password)s %(db_name)s -h %(db_host)s -P %(db_port)s | gzip > %(backup_path_db)s """ % args | |||
if self.db_type == 'postgres': | |||
cmd_string = "pg_dump postgres://{user}:{password}@{db_host}:{db_port}/{db_name} | gzip > {backup_path_db}".format( | |||
user=args.get('user'), | |||
password=args.get('password'), | |||
db_host=args.get('db_host'), | |||
db_port=args.get('db_port'), | |||
db_name=args.get('db_name'), | |||
backup_path_db=args.get('backup_path_db') | |||
args = frappe._dict( | |||
[item[0], frappe.utils.esc(str(item[1]), "$ ")] | |||
for item in self.__dict__.copy().items() | |||
) | |||
if self.backup_includes: | |||
backup_info = ("Backing Up Tables: ", ", ".join(self.backup_includes)) | |||
elif self.backup_excludes: | |||
backup_info = ("Skipping Tables: ", ", ".join(self.backup_excludes)) | |||
if self.partial: | |||
print(''.join(backup_info), "\n") | |||
database_header_content.extend([ | |||
f"Partial Backup of Frappe Site {frappe.local.site}", | |||
("Backup contains: " if self.backup_includes else "Backup excludes: ") + backup_info[1], | |||
"", | |||
]) | |||
generated_header = "\n".join([f"-- {x}" for x in database_header_content]) + "\n" | |||
with gzip.open(args.backup_path_db, "wt") as f: | |||
f.write(generated_header) | |||
if self.db_type == "postgres": | |||
if self.backup_includes: | |||
args["include"] = " ".join( | |||
["--table='public.\"{0}\"'".format(table) for table in self.backup_includes] | |||
) | |||
elif self.backup_excludes: | |||
args["exclude"] = " ".join( | |||
["--exclude-table-data='public.\"{0}\"'".format(table) for table in self.backup_excludes] | |||
) | |||
cmd_string = ( | |||
"{db_exc} postgres://{user}:{password}@{db_host}:{db_port}/{db_name}" | |||
" {include} {exclude} | {gzip} >> {backup_path_db}" | |||
) | |||
err, out = frappe.utils.execute_in_shell(cmd_string) | |||
else: | |||
if self.backup_includes: | |||
args["include"] = " ".join(["'{0}'".format(x) for x in self.backup_includes]) | |||
elif self.backup_excludes: | |||
args["exclude"] = " ".join( | |||
[ | |||
"--ignore-table='{0}.{1}'".format(frappe.conf.db_name, table) | |||
for table in self.backup_excludes | |||
] | |||
) | |||
cmd_string = ( | |||
"{db_exc} --single-transaction --quick --lock-tables=false -u {user}" | |||
" -p{password} {db_name} -h {db_host} -P {db_port} {include} {exclude}" | |||
" | {gzip} >> {backup_path_db}" | |||
) | |||
command = cmd_string.format( | |||
user=args.user, | |||
password=args.password, | |||
db_exc=db_exc, | |||
db_host=args.db_host, | |||
db_port=args.db_port, | |||
db_name=args.db_name, | |||
backup_path_db=args.backup_path_db, | |||
exclude=args.get("exclude", ""), | |||
include=args.get("include", ""), | |||
gzip=gzip_exc, | |||
) | |||
if self.verbose: | |||
print(command + "\n") | |||
err, out = frappe.utils.execute_in_shell(command) | |||
def send_email(self): | |||
""" | |||
Sends the link to backup file located at erpnext/backups | |||
Sends the link to backup file located at erpnext/backups | |||
""" | |||
from frappe.email import get_system_managers | |||
recipient_list = get_system_managers() | |||
db_backup_url = get_url(os.path.join('backups', os.path.basename(self.backup_path_db))) | |||
files_backup_url = get_url(os.path.join('backups', os.path.basename(self.backup_path_files))) | |||
db_backup_url = get_url( | |||
os.path.join("backups", os.path.basename(self.backup_path_db)) | |||
) | |||
files_backup_url = get_url( | |||
os.path.join("backups", os.path.basename(self.backup_path_files)) | |||
) | |||
msg = """Hello, | |||
@@ -275,11 +462,13 @@ Your backups are ready to be downloaded. | |||
This link will be valid for 24 hours. A new backup will be available for | |||
download only after 24 hours.""" % { | |||
"db_backup_url": db_backup_url, | |||
"files_backup_url": files_backup_url | |||
"files_backup_url": files_backup_url, | |||
} | |||
datetime_str = datetime.fromtimestamp(os.stat(self.backup_path_db).st_ctime) | |||
subject = datetime_str.strftime("%d/%m/%Y %H:%M:%S") + """ - Backup ready to be downloaded""" | |||
subject = ( | |||
datetime_str.strftime("%d/%m/%Y %H:%M:%S") + """ - Backup ready to be downloaded""" | |||
) | |||
frappe.sendmail(recipients=recipient_list, msg=msg, subject=subject) | |||
return recipient_list | |||
@@ -288,20 +477,29 @@ download only after 24 hours.""" % { | |||
@frappe.whitelist() | |||
def get_backup(): | |||
""" | |||
This function is executed when the user clicks on | |||
Toos > Download Backup | |||
This function is executed when the user clicks on | |||
Toos > Download Backup | |||
""" | |||
delete_temp_backups() | |||
odb = BackupGenerator(frappe.conf.db_name, frappe.conf.db_name,\ | |||
frappe.conf.db_password, db_host = frappe.db.host,\ | |||
db_type=frappe.conf.db_type, db_port=frappe.conf.db_port) | |||
odb = BackupGenerator( | |||
frappe.conf.db_name, | |||
frappe.conf.db_name, | |||
frappe.conf.db_password, | |||
db_host=frappe.db.host, | |||
db_type=frappe.conf.db_type, | |||
db_port=frappe.conf.db_port, | |||
) | |||
odb.get_backup() | |||
recipient_list = odb.send_email() | |||
frappe.msgprint(_("Download link for your backup will be emailed on the following email address: {0}").format(', '.join(recipient_list))) | |||
frappe.msgprint( | |||
_( | |||
"Download link for your backup will be emailed on the following email address: {0}" | |||
).format(", ".join(recipient_list)) | |||
) | |||
@frappe.whitelist() | |||
def fetch_latest_backups(): | |||
def fetch_latest_backups(partial=False): | |||
"""Fetches paths of the latest backup taken in the last 30 days | |||
Only for: System Managers | |||
@@ -317,43 +515,88 @@ def fetch_latest_backups(): | |||
db_type=frappe.conf.db_type, | |||
db_port=frappe.conf.db_port, | |||
) | |||
database, public, private, config = odb.get_recent_backup(older_than=24 * 30) | |||
return { | |||
"database": database, | |||
"public": public, | |||
"private": private, | |||
"config": config | |||
} | |||
def scheduled_backup(older_than=6, ignore_files=False, backup_path=None, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, backup_path_conf=None, force=False, verbose=False, compress=False): | |||
database, public, private, config = odb.get_recent_backup(older_than=24 * 30, partial=partial) | |||
return {"database": database, "public": public, "private": private, "config": config} | |||
def scheduled_backup( | |||
older_than=6, | |||
ignore_files=False, | |||
backup_path=None, | |||
backup_path_db=None, | |||
backup_path_files=None, | |||
backup_path_private_files=None, | |||
backup_path_conf=None, | |||
ignore_conf=False, | |||
include_doctypes="", | |||
exclude_doctypes="", | |||
compress=False, | |||
force=False, | |||
verbose=False, | |||
): | |||
"""this function is called from scheduler | |||
deletes backups older than 7 days | |||
takes backup""" | |||
odb = new_backup(older_than, ignore_files, backup_path=backup_path, backup_path_db=backup_path_db, backup_path_files=backup_path_files, backup_path_private_files=backup_path_private_files, backup_path_conf=backup_path_conf, force=force, verbose=verbose, compress=compress) | |||
deletes backups older than 7 days | |||
takes backup""" | |||
odb = new_backup( | |||
older_than=older_than, | |||
ignore_files=ignore_files, | |||
backup_path=backup_path, | |||
backup_path_db=backup_path_db, | |||
backup_path_files=backup_path_files, | |||
backup_path_private_files=backup_path_private_files, | |||
backup_path_conf=backup_path_conf, | |||
ignore_conf=ignore_conf, | |||
include_doctypes=include_doctypes, | |||
exclude_doctypes=exclude_doctypes, | |||
compress=compress, | |||
force=force, | |||
verbose=verbose, | |||
) | |||
return odb | |||
def new_backup(older_than=6, ignore_files=False, backup_path=None, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, backup_path_conf=None, force=False, verbose=False, compress=False): | |||
delete_temp_backups(older_than = frappe.conf.keep_backups_for_hours or 24) | |||
odb = BackupGenerator(frappe.conf.db_name, frappe.conf.db_name,\ | |||
frappe.conf.db_password, | |||
backup_path=backup_path, | |||
backup_path_db=backup_path_db, | |||
backup_path_files=backup_path_files, | |||
backup_path_private_files=backup_path_private_files, | |||
backup_path_conf=backup_path_conf, | |||
db_host = frappe.db.host, | |||
db_port = frappe.db.port, | |||
db_type = frappe.conf.db_type, | |||
verbose=verbose, | |||
compress_files=compress) | |||
def new_backup( | |||
older_than=6, | |||
ignore_files=False, | |||
backup_path=None, | |||
backup_path_db=None, | |||
backup_path_files=None, | |||
backup_path_private_files=None, | |||
backup_path_conf=None, | |||
ignore_conf=False, | |||
include_doctypes="", | |||
exclude_doctypes="", | |||
compress=False, | |||
force=False, | |||
verbose=False, | |||
): | |||
delete_temp_backups(older_than=frappe.conf.keep_backups_for_hours or 24) | |||
odb = BackupGenerator( | |||
frappe.conf.db_name, | |||
frappe.conf.db_name, | |||
frappe.conf.db_password, | |||
db_host=frappe.db.host, | |||
db_port=frappe.db.port, | |||
db_type=frappe.conf.db_type, | |||
backup_path=backup_path, | |||
backup_path_db=backup_path_db, | |||
backup_path_files=backup_path_files, | |||
backup_path_private_files=backup_path_private_files, | |||
backup_path_conf=backup_path_conf, | |||
ignore_conf=ignore_conf, | |||
include_doctypes=include_doctypes, | |||
exclude_doctypes=exclude_doctypes, | |||
verbose=verbose, | |||
compress_files=compress, | |||
) | |||
odb.get_backup(older_than, ignore_files, force=force) | |||
return odb | |||
def delete_temp_backups(older_than=24): | |||
""" | |||
Cleans up the backup_link_path directory by deleting files older than 24 hours | |||
Cleans up the backup_link_path directory by deleting files older than 24 hours | |||
""" | |||
backup_path = get_backup_path() | |||
if os.path.exists(backup_path): | |||
@@ -363,54 +606,68 @@ def delete_temp_backups(older_than=24): | |||
if is_file_old(this_file_path, older_than): | |||
os.remove(this_file_path) | |||
def is_file_old(db_file_name, older_than=24): | |||
""" | |||
Checks if file exists and is older than specified hours | |||
Returns -> | |||
True: file does not exist or file is old | |||
False: file is new | |||
""" | |||
if os.path.isfile(db_file_name): | |||
from datetime import timedelta | |||
#Get timestamp of the file | |||
file_datetime = datetime.fromtimestamp\ | |||
(os.stat(db_file_name).st_ctime) | |||
if datetime.today() - file_datetime >= timedelta(hours = older_than): | |||
if _verbose: | |||
print("File is old") | |||
return True | |||
else: | |||
if _verbose: | |||
print("File is recent") | |||
return False | |||
else: | |||
def is_file_old(file_path, older_than=24): | |||
""" | |||
Checks if file exists and is older than specified hours | |||
Returns -> | |||
True: file does not exist or file is old | |||
False: file is new | |||
""" | |||
if os.path.isfile(file_path): | |||
from datetime import timedelta | |||
# Get timestamp of the file | |||
file_datetime = datetime.fromtimestamp(os.stat(file_path).st_ctime) | |||
if datetime.today() - file_datetime >= timedelta(hours=older_than): | |||
if _verbose: | |||
print("File does not exist") | |||
print(f"File {file_path} is older than {older_than} hours") | |||
return True | |||
else: | |||
if _verbose: | |||
print(f"File {file_path} is recent") | |||
return False | |||
else: | |||
if _verbose: | |||
print(f"File {file_path} does not exist") | |||
return True | |||
def get_backup_path(): | |||
backup_path = frappe.utils.get_site_path(conf.get("backup_path", "private/backups")) | |||
return backup_path | |||
def backup(with_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, backup_path_conf=None, quiet=False): | |||
def backup( | |||
with_files=False, | |||
backup_path_db=None, | |||
backup_path_files=None, | |||
backup_path_private_files=None, | |||
backup_path_conf=None, | |||
quiet=False, | |||
): | |||
"Backup" | |||
odb = scheduled_backup(ignore_files=not with_files, backup_path_db=backup_path_db, backup_path_files=backup_path_files, backup_path_private_files=backup_path_private_files, backup_path_conf=backup_path_conf, force=True) | |||
odb = scheduled_backup( | |||
ignore_files=not with_files, | |||
backup_path_db=backup_path_db, | |||
backup_path_files=backup_path_files, | |||
backup_path_private_files=backup_path_private_files, | |||
backup_path_conf=backup_path_conf, | |||
force=True, | |||
) | |||
return { | |||
"backup_path_db": odb.backup_path_db, | |||
"backup_path_files": odb.backup_path_files, | |||
"backup_path_private_files": odb.backup_path_private_files | |||
"backup_path_private_files": odb.backup_path_private_files, | |||
} | |||
if __name__ == "__main__": | |||
""" | |||
is_file_old db_name user password db_host db_type db_port | |||
get_backup db_name user password db_host db_type db_port | |||
""" | |||
import sys | |||
cmd = sys.argv[1] | |||
db_type = 'mariadb' | |||
db_type = "mariadb" | |||
try: | |||
db_type = sys.argv[6] | |||
except IndexError: | |||
@@ -423,19 +680,47 @@ if __name__ == "__main__": | |||
pass | |||
if cmd == "is_file_old": | |||
odb = BackupGenerator(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5] or "localhost", db_type=db_type, db_port=db_port) | |||
odb = BackupGenerator( | |||
sys.argv[2], | |||
sys.argv[3], | |||
sys.argv[4], | |||
sys.argv[5] or "localhost", | |||
db_type=db_type, | |||
db_port=db_port, | |||
) | |||
is_file_old(odb.db_file_name) | |||
if cmd == "get_backup": | |||
odb = BackupGenerator(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5] or "localhost", db_type=db_type, db_port=db_port) | |||
odb = BackupGenerator( | |||
sys.argv[2], | |||
sys.argv[3], | |||
sys.argv[4], | |||
sys.argv[5] or "localhost", | |||
db_type=db_type, | |||
db_port=db_port, | |||
) | |||
odb.get_backup() | |||
if cmd == "take_dump": | |||
odb = BackupGenerator(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5] or "localhost", db_type=db_type, db_port=db_port) | |||
odb = BackupGenerator( | |||
sys.argv[2], | |||
sys.argv[3], | |||
sys.argv[4], | |||
sys.argv[5] or "localhost", | |||
db_type=db_type, | |||
db_port=db_port, | |||
) | |||
odb.take_dump() | |||
if cmd == "send_email": | |||
odb = BackupGenerator(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5] or "localhost", db_type=db_type, db_port=db_port) | |||
odb = BackupGenerator( | |||
sys.argv[2], | |||
sys.argv[3], | |||
sys.argv[4], | |||
sys.argv[5] or "localhost", | |||
db_type=db_type, | |||
db_port=db_port, | |||
) | |||
odb.send_email("abc.sql.gz") | |||
if cmd == "delete_temp_backups": | |||