@@ -98,8 +98,6 @@ rules: | |||||
languages: [python] | languages: [python] | ||||
severity: WARNING | severity: WARNING | ||||
paths: | paths: | ||||
exclude: | |||||
- test_*.py | |||||
include: | include: | ||||
- "*/**/doctype/*" | - "*/**/doctype/*" | ||||
@@ -8,10 +8,6 @@ rules: | |||||
dynamic content. Avoid it or use safe_eval(). | dynamic content. Avoid it or use safe_eval(). | ||||
languages: [python] | languages: [python] | ||||
severity: ERROR | severity: ERROR | ||||
paths: | |||||
exclude: | |||||
- frappe/__init__.py | |||||
- frappe/commands/utils.py | |||||
- id: frappe-sqli-format-strings | - id: frappe-sqli-format-strings | ||||
patterns: | patterns: | ||||
@@ -1,34 +1,18 @@ | |||||
name: Semgrep | name: Semgrep | ||||
on: | on: | ||||
pull_request: | |||||
branches: | |||||
- develop | |||||
- version-13-hotfix | |||||
- version-13-pre-release | |||||
pull_request: { } | |||||
jobs: | jobs: | ||||
semgrep: | semgrep: | ||||
name: Frappe Linter | name: Frappe Linter | ||||
runs-on: ubuntu-latest | runs-on: ubuntu-latest | ||||
steps: | steps: | ||||
- uses: actions/checkout@v2 | |||||
- name: Setup python3 | |||||
uses: actions/setup-python@v2 | |||||
with: | |||||
python-version: 3.8 | |||||
- name: Setup semgrep | |||||
run: | | |||||
python -m pip install -q semgrep | |||||
git fetch origin $GITHUB_BASE_REF:$GITHUB_BASE_REF -q | |||||
- name: Semgrep errors | |||||
run: | | |||||
files=$(git diff --name-only --diff-filter=d $GITHUB_BASE_REF) | |||||
[[ -d .github/helper/semgrep_rules ]] && semgrep --severity ERROR --config=.github/helper/semgrep_rules --quiet --error $files | |||||
semgrep --config="r/python.lang.correctness" --quiet --error $files | |||||
- name: Semgrep warnings | |||||
run: | | |||||
files=$(git diff --name-only --diff-filter=d $GITHUB_BASE_REF) | |||||
[[ -d .github/helper/semgrep_rules ]] && semgrep --severity WARNING --severity INFO --config=.github/helper/semgrep_rules --quiet $files | |||||
- uses: actions/checkout@v2 | |||||
- uses: returntocorp/semgrep-action@v1 | |||||
env: | |||||
SEMGREP_TIMEOUT: 120 | |||||
with: | |||||
config: >- | |||||
r/python.lang.correctness | |||||
.github/helper/semgrep_rules |
@@ -1,5 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
import json | import json | ||||
import os | import os | ||||
import subprocess | import subprocess | ||||
@@ -14,6 +12,13 @@ from frappe.exceptions import SiteNotSpecifiedError | |||||
from frappe.utils import get_bench_path, update_progress_bar, cint | from frappe.utils import get_bench_path, update_progress_bar, cint | ||||
DATA_IMPORT_DEPRECATION = click.style( | |||||
"[DEPRECATED] The `import-csv` command used 'Data Import Legacy' which has been deprecated.\n" | |||||
"Use `data-import` command instead to import data via 'Data Import'.", | |||||
fg="yellow" | |||||
) | |||||
@click.command('build') | @click.command('build') | ||||
@click.option('--app', help='Build assets for app') | @click.option('--app', help='Build assets for app') | ||||
@click.option('--apps', help='Build assets for specific apps') | @click.option('--apps', help='Build assets for specific apps') | ||||
@@ -350,7 +355,8 @@ def import_doc(context, path, force=False): | |||||
if not context.sites: | if not context.sites: | ||||
raise SiteNotSpecifiedError | raise SiteNotSpecifiedError | ||||
@click.command('import-csv') | |||||
@click.command('import-csv', help=DATA_IMPORT_DEPRECATION) | |||||
@click.argument('path') | @click.argument('path') | ||||
@click.option('--only-insert', default=False, is_flag=True, help='Do not overwrite existing records') | @click.option('--only-insert', default=False, is_flag=True, help='Do not overwrite existing records') | ||||
@click.option('--submit-after-import', default=False, is_flag=True, help='Submit document after importing it') | @click.option('--submit-after-import', default=False, is_flag=True, help='Submit document after importing it') | ||||
@@ -358,32 +364,8 @@ def import_doc(context, path, force=False): | |||||
@click.option('--no-email', default=True, is_flag=True, help='Send email if applicable') | @click.option('--no-email', default=True, is_flag=True, help='Send email if applicable') | ||||
@pass_context | @pass_context | ||||
def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False, no_email=True): | def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False, no_email=True): | ||||
"Import CSV using data import" | |||||
from frappe.core.doctype.data_import_legacy import importer | |||||
from frappe.utils.csvutils import read_csv_content | |||||
site = get_site(context) | |||||
if not os.path.exists(path): | |||||
path = os.path.join('..', path) | |||||
if not os.path.exists(path): | |||||
print('Invalid path {0}'.format(path)) | |||||
sys.exit(1) | |||||
with open(path, 'r') as csvfile: | |||||
content = read_csv_content(csvfile.read()) | |||||
frappe.init(site=site) | |||||
frappe.connect() | |||||
try: | |||||
importer.upload(content, submit_after_import=submit_after_import, no_email=no_email, | |||||
ignore_encoding_errors=ignore_encoding_errors, overwrite=not only_insert, | |||||
via_console=True) | |||||
frappe.db.commit() | |||||
except Exception: | |||||
print(frappe.get_traceback()) | |||||
frappe.destroy() | |||||
click.secho(DATA_IMPORT_DEPRECATION) | |||||
sys.exit(1) | |||||
@click.command('data-import') | @click.command('data-import') | ||||
@@ -767,26 +749,49 @@ def set_config(context, key, value, global_=False, parse=False, as_dict=False): | |||||
frappe.destroy() | frappe.destroy() | ||||
@click.command('version') | |||||
def get_version(): | |||||
"Show the versions of all the installed apps" | |||||
@click.command("version") | |||||
@click.option("-f", "--format", "output", | |||||
type=click.Choice(["plain", "table", "json", "legacy"]), help="Output format", default="legacy") | |||||
def get_version(output): | |||||
"""Show the versions of all the installed apps.""" | |||||
from git import Repo | from git import Repo | ||||
from frappe.utils.commands import render_table | |||||
from frappe.utils.change_log import get_app_branch | from frappe.utils.change_log import get_app_branch | ||||
frappe.init('') | |||||
frappe.init("") | |||||
data = [] | |||||
for app in sorted(frappe.get_all_apps()): | for app in sorted(frappe.get_all_apps()): | ||||
branch_name = get_app_branch(app) | |||||
module = frappe.get_module(app) | module = frappe.get_module(app) | ||||
app_hooks = frappe.get_module(app + ".hooks") | app_hooks = frappe.get_module(app + ".hooks") | ||||
repo = Repo(frappe.get_app_path(app, "..")) | repo = Repo(frappe.get_app_path(app, "..")) | ||||
branch = repo.head.ref.name | |||||
commit = repo.head.ref.commit.hexsha[:7] | |||||
if hasattr(app_hooks, '{0}_version'.format(branch_name)): | |||||
click.echo("{0} {1} {2} ({3})".format(app, getattr(app_hooks, '{0}_version'.format(branch_name)), branch, commit)) | |||||
elif hasattr(module, "__version__"): | |||||
click.echo("{0} {1} {2} ({3})".format(app, module.__version__, branch, commit)) | |||||
app_info = frappe._dict() | |||||
app_info.app = app | |||||
app_info.branch = get_app_branch(app) | |||||
app_info.commit = repo.head.object.hexsha[:7] | |||||
app_info.version = getattr(app_hooks, f"{app_info.branch}_version", None) or module.__version__ | |||||
data.append(app_info) | |||||
{ | |||||
"legacy": lambda: [ | |||||
click.echo(f"{app_info.app} {app_info.version}") | |||||
for app_info in data | |||||
], | |||||
"plain": lambda: [ | |||||
click.echo(f"{app_info.app} {app_info.version} {app_info.branch} ({app_info.commit})") | |||||
for app_info in data | |||||
], | |||||
"table": lambda: render_table( | |||||
[["App", "Version", "Branch", "Commit"]] + | |||||
[ | |||||
[app_info.app, app_info.version, app_info.branch, app_info.commit] | |||||
for app_info in data | |||||
] | |||||
), | |||||
"json": lambda: click.echo(json.dumps(data, indent=4)), | |||||
}[output]() | |||||
@click.command('rebuild-global-search') | @click.command('rebuild-global-search') | ||||
@@ -1,9 +1,5 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# For license information, please see license.txt | # For license information, please see license.txt | ||||
# imports - standard imports | |||||
# imports - module imports | |||||
import frappe | import frappe | ||||
from frappe.model.document import Document | from frappe.model.document import Document | ||||
@@ -89,7 +89,7 @@ class CommunicationEmailMixin: | |||||
return self._final_cc | return self._final_cc | ||||
def get_mail_cc_with_displayname(self, is_inbound_mail_communcation=False, include_sender = False): | def get_mail_cc_with_displayname(self, is_inbound_mail_communcation=False, include_sender = False): | ||||
cc_list = self.mail_cc(is_inbound_mail_communcation=False, include_sender = False) | |||||
cc_list = self.mail_cc(is_inbound_mail_communcation=is_inbound_mail_communcation, include_sender = include_sender) | |||||
return [self.get_email_with_displayname(email) for email in cc_list] | return [self.get_email_with_displayname(email) for email in cc_list] | ||||
def mail_bcc(self, is_inbound_mail_communcation=False): | def mail_bcc(self, is_inbound_mail_communcation=False): | ||||
@@ -176,8 +176,8 @@ class CommunicationEmailMixin: | |||||
def mail_attachments(self, print_format=None, print_html=None): | def mail_attachments(self, print_format=None, print_html=None): | ||||
final_attachments = [] | final_attachments = [] | ||||
if print_format and print_html: | |||||
d = {'print_format': print_format, 'print_html': print_html, 'print_format_attachment': 1, | |||||
if print_format or print_html: | |||||
d = {'print_format': print_format, 'html': print_html, 'print_format_attachment': 1, | |||||
'doctype': self.reference_doctype, 'name': self.reference_name} | 'doctype': self.reference_doctype, 'name': self.reference_name} | ||||
final_attachments.append(d) | final_attachments.append(d) | ||||
@@ -7,7 +7,6 @@ import frappe.permissions | |||||
import re, csv, os | import re, csv, os | ||||
from frappe.utils.csvutils import UnicodeWriter | from frappe.utils.csvutils import UnicodeWriter | ||||
from frappe.utils import cstr, formatdate, format_datetime, parse_json, cint, format_duration | from frappe.utils import cstr, formatdate, format_datetime, parse_json, cint, format_duration | ||||
from frappe.core.doctype.data_import_legacy.importer import get_data_keys | |||||
from frappe.core.doctype.access_log.access_log import make_access_log | from frappe.core.doctype.access_log.access_log import make_access_log | ||||
reflags = { | reflags = { | ||||
@@ -20,6 +19,15 @@ reflags = { | |||||
"D": re.DEBUG | "D": re.DEBUG | ||||
} | } | ||||
def get_data_keys(): | |||||
return frappe._dict({ | |||||
"data_separator": _('Start entering data below this line'), | |||||
"main_table": _("Table") + ":", | |||||
"parent_table": _("Parent Table") + ":", | |||||
"columns": _("Column Name") + ":", | |||||
"doctype": _("DocType") + ":" | |||||
}) | |||||
@frappe.whitelist() | @frappe.whitelist() | ||||
def export_data(doctype=None, parent_doctype=None, all_doctypes=True, with_data=False, | def export_data(doctype=None, parent_doctype=None, all_doctypes=True, with_data=False, | ||||
select_columns=None, file_type='CSV', template=False, filters=None): | select_columns=None, file_type='CSV', template=False, filters=None): | ||||
@@ -171,9 +171,6 @@ def import_file( | |||||
i.import_data() | i.import_data() | ||||
############## | |||||
def import_doc(path, pre_process=None): | def import_doc(path, pre_process=None): | ||||
if os.path.isdir(path): | if os.path.isdir(path): | ||||
files = [os.path.join(path, f) for f in os.listdir(path)] | files = [os.path.join(path, f) for f in os.listdir(path)] | ||||
@@ -192,19 +189,8 @@ def import_doc(path, pre_process=None): | |||||
) | ) | ||||
frappe.flags.mute_emails = False | frappe.flags.mute_emails = False | ||||
frappe.db.commit() | frappe.db.commit() | ||||
elif f.endswith(".csv"): | |||||
validate_csv_import_file(f) | |||||
frappe.db.commit() | |||||
def validate_csv_import_file(path): | |||||
if path.endswith(".csv"): | |||||
print() | |||||
print("This method is deprecated.") | |||||
print('Import CSV files using the command "bench --site sitename data-import"') | |||||
print("Or use the method frappe.core.doctype.data_import.data_import.import_file") | |||||
print() | |||||
raise Exception("Method deprecated") | |||||
else: | |||||
raise NotImplementedError("Only .json files can be imported") | |||||
def export_json( | def export_json( | ||||
@@ -1,324 +0,0 @@ | |||||
// Copyright (c) 2017, Frappe Technologies and contributors | |||||
// For license information, please see license.txt | |||||
frappe.ui.form.on('Data Import Legacy', { | |||||
onload: function(frm) { | |||||
if (frm.doc.__islocal) { | |||||
frm.set_value("action", ""); | |||||
} | |||||
frappe.call({ | |||||
method: "frappe.core.doctype.data_import_legacy.data_import_legacy.get_importable_doctypes", | |||||
callback: function (r) { | |||||
let importable_doctypes = r.message; | |||||
frm.set_query("reference_doctype", function () { | |||||
return { | |||||
"filters": { | |||||
"issingle": 0, | |||||
"istable": 0, | |||||
"name": ['in', importable_doctypes] | |||||
} | |||||
}; | |||||
}); | |||||
} | |||||
}), | |||||
// should never check public | |||||
frm.fields_dict["import_file"].df.is_private = 1; | |||||
frappe.realtime.on("data_import_progress", function(data) { | |||||
if (data.data_import === frm.doc.name) { | |||||
if (data.reload && data.reload === true) { | |||||
frm.reload_doc(); | |||||
} | |||||
if (data.progress) { | |||||
let progress_bar = $(frm.dashboard.progress_area.body).find(".progress-bar"); | |||||
if (progress_bar) { | |||||
$(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped"); | |||||
$(progress_bar).css("width", data.progress + "%"); | |||||
} | |||||
} | |||||
} | |||||
}); | |||||
}, | |||||
reference_doctype: function(frm){ | |||||
if (frm.doc.reference_doctype) { | |||||
frappe.model.with_doctype(frm.doc.reference_doctype); | |||||
} | |||||
}, | |||||
refresh: function(frm) { | |||||
frm.disable_save(); | |||||
frm.dashboard.clear_headline(); | |||||
if (frm.doc.reference_doctype && !frm.doc.import_file) { | |||||
frm.page.set_indicator(__('Attach file'), 'orange'); | |||||
} else { | |||||
if (frm.doc.import_status) { | |||||
const listview_settings = frappe.listview_settings['Data Import Legacy']; | |||||
const indicator = listview_settings.get_indicator(frm.doc); | |||||
frm.page.set_indicator(indicator[0], indicator[1]); | |||||
if (frm.doc.import_status === "In Progress") { | |||||
frm.dashboard.add_progress("Data Import Progress", "0"); | |||||
frm.set_read_only(); | |||||
frm.refresh_fields(); | |||||
} | |||||
} | |||||
} | |||||
if (frm.doc.reference_doctype) { | |||||
frappe.model.with_doctype(frm.doc.reference_doctype); | |||||
} | |||||
if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") { | |||||
frm.set_df_property("action", "read_only", 1); | |||||
} | |||||
frm.add_custom_button(__("Help"), function() { | |||||
frappe.help.show_video("6wiriRKPhmg"); | |||||
}); | |||||
if (frm.doc.reference_doctype && frm.doc.docstatus === 0) { | |||||
frm.add_custom_button(__("Download template"), function() { | |||||
frappe.data_import.download_dialog(frm).show(); | |||||
}); | |||||
} | |||||
if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows && | |||||
frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) { | |||||
frm.page.set_primary_action(__("Start Import"), function() { | |||||
frappe.call({ | |||||
btn: frm.page.btn_primary, | |||||
method: "frappe.core.doctype.data_import_legacy.data_import_legacy.import_data", | |||||
args: { | |||||
data_import: frm.doc.name | |||||
} | |||||
}); | |||||
}).addClass('btn btn-primary'); | |||||
} | |||||
if (frm.doc.log_details) { | |||||
frm.events.create_log_table(frm); | |||||
} else { | |||||
$(frm.fields_dict.import_log.wrapper).empty(); | |||||
} | |||||
}, | |||||
action: function(frm) { | |||||
if(!frm.doc.action) return; | |||||
if(!frm.doc.reference_doctype) { | |||||
frappe.msgprint(__("Please select document type first.")); | |||||
frm.set_value("action", ""); | |||||
return; | |||||
} | |||||
if(frm.doc.action == "Insert new records") { | |||||
frm.doc.insert_new = 1; | |||||
} else if (frm.doc.action == "Update records"){ | |||||
frm.doc.overwrite = 1; | |||||
} | |||||
frm.save(); | |||||
}, | |||||
only_update: function(frm) { | |||||
frm.save(); | |||||
}, | |||||
submit_after_import: function(frm) { | |||||
frm.save(); | |||||
}, | |||||
skip_errors: function(frm) { | |||||
frm.save(); | |||||
}, | |||||
ignore_encoding_errors: function(frm) { | |||||
frm.save(); | |||||
}, | |||||
no_email: function(frm) { | |||||
frm.save(); | |||||
}, | |||||
show_only_errors: function(frm) { | |||||
frm.events.create_log_table(frm); | |||||
}, | |||||
create_log_table: function(frm) { | |||||
let msg = JSON.parse(frm.doc.log_details); | |||||
var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty(); | |||||
$(frappe.render_template("log_details", { | |||||
data: msg.messages, | |||||
import_status: frm.doc.import_status, | |||||
show_only_errors: frm.doc.show_only_errors, | |||||
})).appendTo($log_wrapper); | |||||
} | |||||
}); | |||||
frappe.provide('frappe.data_import'); | |||||
frappe.data_import.download_dialog = function(frm) { | |||||
var dialog; | |||||
const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden; | |||||
const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields); | |||||
const get_doctype_checkbox_fields = () => { | |||||
return dialog.fields.filter(df => df.fieldname.endsWith('_fields')) | |||||
.map(df => dialog.fields_dict[df.fieldname]); | |||||
}; | |||||
const doctype_fields = get_fields(frm.doc.reference_doctype) | |||||
.map(df => { | |||||
let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0; | |||||
return { | |||||
label: df.label, | |||||
reqd: reqd, | |||||
danger: reqd, | |||||
value: df.fieldname, | |||||
checked: 1 | |||||
}; | |||||
}); | |||||
let fields = [ | |||||
{ | |||||
"label": __("Select Columns"), | |||||
"fieldname": "select_columns", | |||||
"fieldtype": "Select", | |||||
"options": "All\nMandatory\nManually", | |||||
"reqd": 1, | |||||
"onchange": function() { | |||||
const fields = get_doctype_checkbox_fields(); | |||||
fields.map(f => f.toggle(true)); | |||||
if(this.value == 'Mandatory' || this.value == 'Manually') { | |||||
checkbox_toggle(true); | |||||
fields.map(multicheck_field => { | |||||
multicheck_field.options.map(option => { | |||||
if(!option.reqd) return; | |||||
$(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`) | |||||
.prop('checked', false) | |||||
.trigger('click'); | |||||
}); | |||||
}); | |||||
} else if(this.value == 'All'){ | |||||
$(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`) | |||||
.prop('disabled', true); | |||||
} | |||||
} | |||||
}, | |||||
{ | |||||
"label": __("File Type"), | |||||
"fieldname": "file_type", | |||||
"fieldtype": "Select", | |||||
"options": "Excel\nCSV", | |||||
"default": "Excel" | |||||
}, | |||||
{ | |||||
"label": __("Download with Data"), | |||||
"fieldname": "with_data", | |||||
"fieldtype": "Check", | |||||
"hidden": !frm.doc.overwrite, | |||||
"default": 1 | |||||
}, | |||||
{ | |||||
"label": __("Select All"), | |||||
"fieldname": "select_all", | |||||
"fieldtype": "Button", | |||||
"depends_on": "eval:doc.select_columns=='Manually'", | |||||
click: function() { | |||||
checkbox_toggle(); | |||||
} | |||||
}, | |||||
{ | |||||
"label": __("Unselect All"), | |||||
"fieldname": "unselect_all", | |||||
"fieldtype": "Button", | |||||
"depends_on": "eval:doc.select_columns=='Manually'", | |||||
click: function() { | |||||
checkbox_toggle(true); | |||||
} | |||||
}, | |||||
{ | |||||
"label": frm.doc.reference_doctype, | |||||
"fieldname": "doctype_fields", | |||||
"fieldtype": "MultiCheck", | |||||
"options": doctype_fields, | |||||
"columns": 2, | |||||
"hidden": 1 | |||||
} | |||||
]; | |||||
const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype) | |||||
.map(df => { | |||||
return { | |||||
"label": df.options, | |||||
"fieldname": df.fieldname + '_fields', | |||||
"fieldtype": "MultiCheck", | |||||
"options": frappe.meta.get_docfields(df.options) | |||||
.filter(filter_fields) | |||||
.map(df => ({ | |||||
label: df.label, | |||||
reqd: df.reqd ? 1 : 0, | |||||
value: df.fieldname, | |||||
checked: 1, | |||||
danger: df.reqd | |||||
})), | |||||
"columns": 2, | |||||
"hidden": 1 | |||||
}; | |||||
}); | |||||
fields = fields.concat(child_table_fields); | |||||
dialog = new frappe.ui.Dialog({ | |||||
title: __('Download Template'), | |||||
fields: fields, | |||||
primary_action: function(values) { | |||||
var data = values; | |||||
if (frm.doc.reference_doctype) { | |||||
var export_params = () => { | |||||
let columns = {}; | |||||
if(values.select_columns) { | |||||
columns = get_doctype_checkbox_fields().reduce((columns, field) => { | |||||
const options = field.get_checked_options(); | |||||
columns[field.df.label] = options; | |||||
return columns; | |||||
}, {}); | |||||
} | |||||
return { | |||||
doctype: frm.doc.reference_doctype, | |||||
parent_doctype: frm.doc.reference_doctype, | |||||
select_columns: JSON.stringify(columns), | |||||
with_data: frm.doc.overwrite && data.with_data, | |||||
all_doctypes: true, | |||||
file_type: data.file_type, | |||||
template: true | |||||
}; | |||||
}; | |||||
let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data'; | |||||
open_url_post(get_template_url, export_params()); | |||||
} else { | |||||
frappe.msgprint(__("Please select the Document Type.")); | |||||
} | |||||
dialog.hide(); | |||||
}, | |||||
primary_action_label: __('Download') | |||||
}); | |||||
$(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]') | |||||
.wrapAll('<div class="inline-buttons" />'); | |||||
const button_container = $(dialog.body).find('.inline-buttons'); | |||||
button_container.addClass('flex'); | |||||
$(button_container).find('.frappe-control').map((index, button) => { | |||||
$(button).css({"margin-right": "1em"}); | |||||
}); | |||||
function checkbox_toggle(checked=false) { | |||||
$(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => { | |||||
$(element).find(`:checkbox`).prop("checked", checked).trigger('click'); | |||||
}); | |||||
} | |||||
return dialog; | |||||
}; |
@@ -1,218 +0,0 @@ | |||||
{ | |||||
"actions": [], | |||||
"allow_copy": 1, | |||||
"creation": "2020-06-11 16:13:23.813709", | |||||
"doctype": "DocType", | |||||
"document_type": "Document", | |||||
"editable_grid": 1, | |||||
"engine": "InnoDB", | |||||
"field_order": [ | |||||
"reference_doctype", | |||||
"action", | |||||
"insert_new", | |||||
"overwrite", | |||||
"only_update", | |||||
"section_break_4", | |||||
"import_file", | |||||
"column_break_4", | |||||
"error_file", | |||||
"section_break_6", | |||||
"skip_errors", | |||||
"submit_after_import", | |||||
"ignore_encoding_errors", | |||||
"no_email", | |||||
"import_detail", | |||||
"import_status", | |||||
"show_only_errors", | |||||
"import_log", | |||||
"log_details", | |||||
"amended_from", | |||||
"total_rows", | |||||
"amended_from" | |||||
], | |||||
"fields": [ | |||||
{ | |||||
"fieldname": "reference_doctype", | |||||
"fieldtype": "Link", | |||||
"ignore_user_permissions": 1, | |||||
"in_list_view": 1, | |||||
"label": "Document Type", | |||||
"options": "DocType", | |||||
"reqd": 1 | |||||
}, | |||||
{ | |||||
"fieldname": "action", | |||||
"fieldtype": "Select", | |||||
"label": "Action", | |||||
"options": "Insert new records\nUpdate records", | |||||
"reqd": 1 | |||||
}, | |||||
{ | |||||
"default": "0", | |||||
"depends_on": "eval:!doc.overwrite", | |||||
"description": "New data will be inserted.", | |||||
"fieldname": "insert_new", | |||||
"fieldtype": "Check", | |||||
"hidden": 1, | |||||
"label": "Insert new records", | |||||
"set_only_once": 1 | |||||
}, | |||||
{ | |||||
"default": "0", | |||||
"depends_on": "eval:!doc.insert_new", | |||||
"description": "If you are updating/overwriting already created records.", | |||||
"fieldname": "overwrite", | |||||
"fieldtype": "Check", | |||||
"hidden": 1, | |||||
"label": "Update records", | |||||
"set_only_once": 1 | |||||
}, | |||||
{ | |||||
"default": "0", | |||||
"depends_on": "overwrite", | |||||
"description": "If you don't want to create any new records while updating the older records.", | |||||
"fieldname": "only_update", | |||||
"fieldtype": "Check", | |||||
"label": "Don't create new records" | |||||
}, | |||||
{ | |||||
"depends_on": "eval:(!doc.__islocal)", | |||||
"fieldname": "section_break_4", | |||||
"fieldtype": "Section Break" | |||||
}, | |||||
{ | |||||
"fieldname": "import_file", | |||||
"fieldtype": "Attach", | |||||
"label": "Attach file for Import" | |||||
}, | |||||
{ | |||||
"fieldname": "column_break_4", | |||||
"fieldtype": "Column Break" | |||||
}, | |||||
{ | |||||
"depends_on": "eval: doc.import_status == \"Partially Successful\"", | |||||
"description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.", | |||||
"fieldname": "error_file", | |||||
"fieldtype": "Attach", | |||||
"label": "Generated File" | |||||
}, | |||||
{ | |||||
"depends_on": "eval:(!doc.__islocal)", | |||||
"fieldname": "section_break_6", | |||||
"fieldtype": "Section Break" | |||||
}, | |||||
{ | |||||
"default": "0", | |||||
"description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.", | |||||
"fieldname": "skip_errors", | |||||
"fieldtype": "Check", | |||||
"label": "Skip rows with errors" | |||||
}, | |||||
{ | |||||
"default": "0", | |||||
"fieldname": "submit_after_import", | |||||
"fieldtype": "Check", | |||||
"label": "Submit after importing" | |||||
}, | |||||
{ | |||||
"default": "0", | |||||
"fieldname": "ignore_encoding_errors", | |||||
"fieldtype": "Check", | |||||
"label": "Ignore encoding errors" | |||||
}, | |||||
{ | |||||
"default": "1", | |||||
"fieldname": "no_email", | |||||
"fieldtype": "Check", | |||||
"label": "Do not send Emails" | |||||
}, | |||||
{ | |||||
"collapsible": 1, | |||||
"collapsible_depends_on": "eval: doc.import_status == \"Failed\"", | |||||
"depends_on": "import_status", | |||||
"fieldname": "import_detail", | |||||
"fieldtype": "Section Break", | |||||
"label": "Import Log" | |||||
}, | |||||
{ | |||||
"fieldname": "import_status", | |||||
"fieldtype": "Select", | |||||
"label": "Import Status", | |||||
"options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful", | |||||
"read_only": 1 | |||||
}, | |||||
{ | |||||
"allow_on_submit": 1, | |||||
"default": "1", | |||||
"fieldname": "show_only_errors", | |||||
"fieldtype": "Check", | |||||
"label": "Show only errors", | |||||
"no_copy": 1, | |||||
"print_hide": 1 | |||||
}, | |||||
{ | |||||
"allow_on_submit": 1, | |||||
"depends_on": "import_status", | |||||
"fieldname": "import_log", | |||||
"fieldtype": "HTML", | |||||
"label": "Import Log" | |||||
}, | |||||
{ | |||||
"allow_on_submit": 1, | |||||
"fieldname": "log_details", | |||||
"fieldtype": "Code", | |||||
"hidden": 1, | |||||
"label": "Log Details", | |||||
"read_only": 1 | |||||
}, | |||||
{ | |||||
"fieldname": "amended_from", | |||||
"fieldtype": "Link", | |||||
"label": "Amended From", | |||||
"no_copy": 1, | |||||
"options": "Data Import", | |||||
"print_hide": 1, | |||||
"read_only": 1 | |||||
}, | |||||
{ | |||||
"fieldname": "total_rows", | |||||
"fieldtype": "Int", | |||||
"hidden": 1, | |||||
"label": "Total Rows", | |||||
"read_only": 1 | |||||
}, | |||||
{ | |||||
"fieldname": "amended_from", | |||||
"fieldtype": "Link", | |||||
"label": "Amended From", | |||||
"no_copy": 1, | |||||
"options": "Data Import Legacy", | |||||
"print_hide": 1, | |||||
"read_only": 1 | |||||
} | |||||
], | |||||
"is_submittable": 1, | |||||
"links": [], | |||||
"max_attachments": 1, | |||||
"modified": "2020-06-11 16:13:23.813709", | |||||
"modified_by": "Administrator", | |||||
"module": "Core", | |||||
"name": "Data Import Legacy", | |||||
"owner": "Administrator", | |||||
"permissions": [ | |||||
{ | |||||
"create": 1, | |||||
"delete": 1, | |||||
"email": 1, | |||||
"read": 1, | |||||
"role": "System Manager", | |||||
"share": 1, | |||||
"submit": 1, | |||||
"write": 1 | |||||
} | |||||
], | |||||
"sort_field": "modified", | |||||
"sort_order": "DESC", | |||||
"track_changes": 1, | |||||
"track_seen": 1 | |||||
} |
@@ -1,126 +0,0 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and contributors | |||||
# For license information, please see license.txt | |||||
import os | |||||
import frappe | |||||
import frappe.modules.import_file | |||||
from frappe import _ | |||||
from frappe.core.doctype.data_import_legacy.importer import upload | |||||
from frappe.model.document import Document | |||||
from frappe.modules.import_file import import_file_by_path as _import_file_by_path | |||||
from frappe.utils.background_jobs import enqueue | |||||
from frappe.utils.data import format_datetime | |||||
class DataImportLegacy(Document): | |||||
def autoname(self): | |||||
if not self.name: | |||||
self.name = "Import on " + format_datetime(self.creation) | |||||
def validate(self): | |||||
if not self.import_file: | |||||
self.db_set("total_rows", 0) | |||||
if self.import_status == "In Progress": | |||||
frappe.throw(_("Can't save the form as data import is in progress.")) | |||||
# validate the template just after the upload | |||||
# if there is total_rows in the doc, it means that the template is already validated and error free | |||||
if self.import_file and not self.total_rows: | |||||
upload(data_import_doc=self, from_data_import="Yes", validate_template=True) | |||||
@frappe.whitelist() | |||||
def get_importable_doctypes(): | |||||
return frappe.cache().hget("can_import", frappe.session.user) | |||||
@frappe.whitelist() | |||||
def import_data(data_import): | |||||
frappe.db.set_value("Data Import Legacy", data_import, "import_status", "In Progress", update_modified=False) | |||||
frappe.publish_realtime("data_import_progress", {"progress": "0", | |||||
"data_import": data_import, "reload": True}, user=frappe.session.user) | |||||
from frappe.core.page.background_jobs.background_jobs import get_info | |||||
enqueued_jobs = [d.get("job_name") for d in get_info()] | |||||
if data_import not in enqueued_jobs: | |||||
enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import, | |||||
data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user) | |||||
def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False, | |||||
insert=False, submit=False, pre_process=None): | |||||
if os.path.isdir(path): | |||||
files = [os.path.join(path, f) for f in os.listdir(path)] | |||||
else: | |||||
files = [path] | |||||
for f in files: | |||||
if f.endswith(".json"): | |||||
frappe.flags.mute_emails = True | |||||
_import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True) | |||||
frappe.flags.mute_emails = False | |||||
frappe.db.commit() | |||||
elif f.endswith(".csv"): | |||||
import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process) | |||||
frappe.db.commit() | |||||
def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True): | |||||
from frappe.utils.csvutils import read_csv_content | |||||
print("Importing " + path) | |||||
with open(path, "r") as infile: | |||||
upload(rows=read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite, | |||||
submit_after_import=submit, pre_process=pre_process) | |||||
def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"): | |||||
def post_process(out): | |||||
del_keys = ('modified_by', 'creation', 'owner', 'idx') | |||||
for doc in out: | |||||
for key in del_keys: | |||||
if key in doc: | |||||
del doc[key] | |||||
for k, v in doc.items(): | |||||
if isinstance(v, list): | |||||
for child in v: | |||||
for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'): | |||||
if key in child: | |||||
del child[key] | |||||
out = [] | |||||
if name: | |||||
out.append(frappe.get_doc(doctype, name).as_dict()) | |||||
elif frappe.db.get_value("DocType", doctype, "issingle"): | |||||
out.append(frappe.get_doc(doctype).as_dict()) | |||||
else: | |||||
for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by): | |||||
out.append(frappe.get_doc(doctype, doc.name).as_dict()) | |||||
post_process(out) | |||||
dirname = os.path.dirname(path) | |||||
if not os.path.exists(dirname): | |||||
path = os.path.join('..', path) | |||||
with open(path, "w") as outfile: | |||||
outfile.write(frappe.as_json(out)) | |||||
def export_csv(doctype, path): | |||||
from frappe.core.doctype.data_export.exporter import export_data | |||||
with open(path, "wb") as csvfile: | |||||
export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True) | |||||
csvfile.write(frappe.response.result.encode("utf-8")) | |||||
@frappe.whitelist() | |||||
def export_fixture(doctype, app): | |||||
if frappe.session.user != "Administrator": | |||||
raise frappe.PermissionError | |||||
if not os.path.exists(frappe.get_app_path(app, "fixtures")): | |||||
os.mkdir(frappe.get_app_path(app, "fixtures")) | |||||
export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc") |
@@ -1,24 +0,0 @@ | |||||
frappe.listview_settings['Data Import Legacy'] = { | |||||
add_fields: ["import_status"], | |||||
has_indicator_for_draft: 1, | |||||
get_indicator: function(doc) { | |||||
let status = { | |||||
'Successful': [__("Success"), "green", "import_status,=,Successful"], | |||||
'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"], | |||||
'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"], | |||||
'Failed': [__("Failed"), "red", "import_status,=,Failed"], | |||||
'Pending': [__("Pending"), "orange", "import_status,=,"] | |||||
} | |||||
if (doc.import_status) { | |||||
return status[doc.import_status]; | |||||
} | |||||
if (doc.docstatus == 0) { | |||||
return status['Pending']; | |||||
} | |||||
return status['Pending']; | |||||
} | |||||
}; |
@@ -1,538 +0,0 @@ | |||||
#!/usr/bin/env python | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | |||||
# MIT License. See license.txt | |||||
import requests | |||||
import frappe, json | |||||
import frappe.permissions | |||||
from frappe import _ | |||||
from frappe.utils.csvutils import getlink | |||||
from frappe.utils.dateutils import parse_date | |||||
from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url, duration_to_seconds | |||||
@frappe.whitelist() | |||||
def get_data_keys(): | |||||
return frappe._dict({ | |||||
"data_separator": _('Start entering data below this line'), | |||||
"main_table": _("Table") + ":", | |||||
"parent_table": _("Parent Table") + ":", | |||||
"columns": _("Column Name") + ":", | |||||
"doctype": _("DocType") + ":" | |||||
}) | |||||
@frappe.whitelist() | |||||
def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None, | |||||
update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No", | |||||
skip_errors = True, data_import_doc=None, validate_template=False, user=None): | |||||
"""upload data""" | |||||
# for translations | |||||
if user: | |||||
frappe.cache().hdel("lang", user) | |||||
frappe.set_user_lang(user) | |||||
if data_import_doc and isinstance(data_import_doc, str): | |||||
data_import_doc = frappe.get_doc("Data Import Legacy", data_import_doc) | |||||
if data_import_doc and from_data_import == "Yes": | |||||
no_email = data_import_doc.no_email | |||||
ignore_encoding_errors = data_import_doc.ignore_encoding_errors | |||||
update_only = data_import_doc.only_update | |||||
submit_after_import = data_import_doc.submit_after_import | |||||
overwrite = data_import_doc.overwrite | |||||
skip_errors = data_import_doc.skip_errors | |||||
else: | |||||
# extra input params | |||||
params = json.loads(frappe.form_dict.get("params") or '{}') | |||||
if params.get("submit_after_import"): | |||||
submit_after_import = True | |||||
if params.get("ignore_encoding_errors"): | |||||
ignore_encoding_errors = True | |||||
if not params.get("no_email"): | |||||
no_email = False | |||||
if params.get('update_only'): | |||||
update_only = True | |||||
if params.get('from_data_import'): | |||||
from_data_import = params.get('from_data_import') | |||||
if not params.get('skip_errors'): | |||||
skip_errors = params.get('skip_errors') | |||||
frappe.flags.in_import = True | |||||
frappe.flags.mute_emails = no_email | |||||
def get_data_keys_definition(): | |||||
return get_data_keys() | |||||
def bad_template(): | |||||
frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator)) | |||||
def check_data_length(): | |||||
if not data: | |||||
frappe.throw(_("No data found in the file. Please reattach the new file with data.")) | |||||
def get_start_row(): | |||||
for i, row in enumerate(rows): | |||||
if row and row[0]==get_data_keys_definition().data_separator: | |||||
return i+1 | |||||
bad_template() | |||||
def get_header_row(key): | |||||
return get_header_row_and_idx(key)[0] | |||||
def get_header_row_and_idx(key): | |||||
for i, row in enumerate(header): | |||||
if row and row[0]==key: | |||||
return row, i | |||||
return [], -1 | |||||
def filter_empty_columns(columns): | |||||
empty_cols = list(filter(lambda x: x in ("", None), columns)) | |||||
if empty_cols: | |||||
if columns[-1*len(empty_cols):] == empty_cols: | |||||
# filter empty columns if they exist at the end | |||||
columns = columns[:-1*len(empty_cols)] | |||||
else: | |||||
frappe.msgprint(_("Please make sure that there are no empty columns in the file."), | |||||
raise_exception=1) | |||||
return columns | |||||
def make_column_map(): | |||||
doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype) | |||||
if row_idx == -1: # old style | |||||
return | |||||
dt = None | |||||
for i, d in enumerate(doctype_row[1:]): | |||||
if d not in ("~", "-"): | |||||
if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"): | |||||
dt, parentfield = d, None | |||||
# xls format truncates the row, so it may not have more columns | |||||
if len(doctype_row) > i+2: | |||||
parentfield = doctype_row[i+2] | |||||
doctypes.append((dt, parentfield)) | |||||
column_idx_to_fieldname[(dt, parentfield)] = {} | |||||
column_idx_to_fieldtype[(dt, parentfield)] = {} | |||||
if dt: | |||||
column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1] | |||||
column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1] | |||||
def get_doc(start_idx): | |||||
if doctypes: | |||||
doc = {} | |||||
attachments = [] | |||||
last_error_row_idx = None | |||||
for idx in range(start_idx, len(rows)): | |||||
last_error_row_idx = idx # pylint: disable=W0612 | |||||
if (not doc) or main_doc_empty(rows[idx]): | |||||
for dt, parentfield in doctypes: | |||||
d = {} | |||||
for column_idx in column_idx_to_fieldname[(dt, parentfield)]: | |||||
try: | |||||
fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx] | |||||
fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx] | |||||
if not fieldname or not rows[idx][column_idx]: | |||||
continue | |||||
d[fieldname] = rows[idx][column_idx] | |||||
if fieldtype in ("Int", "Check"): | |||||
d[fieldname] = cint(d[fieldname]) | |||||
elif fieldtype in ("Float", "Currency", "Percent"): | |||||
d[fieldname] = flt(d[fieldname]) | |||||
elif fieldtype == "Date": | |||||
if d[fieldname] and isinstance(d[fieldname], str): | |||||
d[fieldname] = getdate(parse_date(d[fieldname])) | |||||
elif fieldtype == "Datetime": | |||||
if d[fieldname]: | |||||
if " " in d[fieldname]: | |||||
_date, _time = d[fieldname].split() | |||||
else: | |||||
_date, _time = d[fieldname], '00:00:00' | |||||
_date = parse_date(d[fieldname]) | |||||
d[fieldname] = get_datetime(_date + " " + _time) | |||||
else: | |||||
d[fieldname] = None | |||||
elif fieldtype == "Duration": | |||||
d[fieldname] = duration_to_seconds(cstr(d[fieldname])) | |||||
elif fieldtype in ("Image", "Attach Image", "Attach"): | |||||
# added file to attachments list | |||||
attachments.append(d[fieldname]) | |||||
elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]: | |||||
# as fields can be saved in the number format(long type) in data import template | |||||
d[fieldname] = cstr(d[fieldname]) | |||||
except IndexError: | |||||
pass | |||||
# scrub quotes from name and modified | |||||
if d.get("name") and d["name"].startswith('"'): | |||||
d["name"] = d["name"][1:-1] | |||||
if sum(0 if not val else 1 for val in d.values()): | |||||
d['doctype'] = dt | |||||
if dt == doctype: | |||||
doc.update(d) | |||||
else: | |||||
if not overwrite and doc.get("name"): | |||||
d['parent'] = doc["name"] | |||||
d['parenttype'] = doctype | |||||
d['parentfield'] = parentfield | |||||
doc.setdefault(d['parentfield'], []).append(d) | |||||
else: | |||||
break | |||||
return doc, attachments, last_error_row_idx | |||||
else: | |||||
doc = frappe._dict(zip(columns, rows[start_idx][1:])) | |||||
doc['doctype'] = doctype | |||||
return doc, [], None | |||||
# used in testing whether a row is empty or parent row or child row | |||||
# checked only 3 first columns since first two columns can be blank for example the case of | |||||
# importing the item variant where item code and item name will be blank. | |||||
def main_doc_empty(row): | |||||
if row: | |||||
for i in range(3,0,-1): | |||||
if len(row) > i and row[i]: | |||||
return False | |||||
return True | |||||
def validate_naming(doc): | |||||
autoname = frappe.get_meta(doctype).autoname | |||||
if autoname: | |||||
if autoname[0:5] == 'field': | |||||
autoname = autoname[6:] | |||||
elif autoname == 'naming_series:': | |||||
autoname = 'naming_series' | |||||
else: | |||||
return True | |||||
if (autoname not in doc) or (not doc[autoname]): | |||||
from frappe.model.base_document import get_controller | |||||
if not hasattr(get_controller(doctype), "autoname"): | |||||
frappe.throw(_("{0} is a mandatory field").format(autoname)) | |||||
return True | |||||
users = frappe.db.sql_list("select name from tabUser") | |||||
def prepare_for_insert(doc): | |||||
# don't block data import if user is not set | |||||
# migrating from another system | |||||
if not doc.owner in users: | |||||
doc.owner = frappe.session.user | |||||
if not doc.modified_by in users: | |||||
doc.modified_by = frappe.session.user | |||||
def is_valid_url(url): | |||||
is_valid = False | |||||
if url.startswith("/files") or url.startswith("/private/files"): | |||||
url = get_url(url) | |||||
try: | |||||
r = requests.get(url) | |||||
is_valid = True if r.status_code == 200 else False | |||||
except Exception: | |||||
pass | |||||
return is_valid | |||||
def attach_file_to_doc(doctype, docname, file_url): | |||||
# check if attachment is already available | |||||
# check if the attachement link is relative or not | |||||
if not file_url: | |||||
return | |||||
if not is_valid_url(file_url): | |||||
return | |||||
files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and | |||||
attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format( | |||||
doctype=doctype, | |||||
docname=docname, | |||||
file_url=file_url | |||||
)) | |||||
if files: | |||||
# file is already attached | |||||
return | |||||
_file = frappe.get_doc({ | |||||
"doctype": "File", | |||||
"file_url": file_url, | |||||
"attached_to_name": docname, | |||||
"attached_to_doctype": doctype, | |||||
"attached_to_field": 0, | |||||
"folder": "Home/Attachments"}) | |||||
_file.save() | |||||
# header | |||||
filename, file_extension = ['',''] | |||||
if not rows: | |||||
_file = frappe.get_doc("File", {"file_url": data_import_doc.import_file}) | |||||
fcontent = _file.get_content() | |||||
filename, file_extension = _file.get_extension() | |||||
if file_extension == '.xlsx' and from_data_import == 'Yes': | |||||
from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file | |||||
rows = read_xlsx_file_from_attached_file(file_url=data_import_doc.import_file) | |||||
elif file_extension == '.csv': | |||||
from frappe.utils.csvutils import read_csv_content | |||||
rows = read_csv_content(fcontent, ignore_encoding_errors) | |||||
else: | |||||
frappe.throw(_("Unsupported File Format")) | |||||
start_row = get_start_row() | |||||
header = rows[:start_row] | |||||
data = rows[start_row:] | |||||
try: | |||||
doctype = get_header_row(get_data_keys_definition().main_table)[1] | |||||
columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:]) | |||||
except: | |||||
frappe.throw(_("Cannot change header content")) | |||||
doctypes = [] | |||||
column_idx_to_fieldname = {} | |||||
column_idx_to_fieldtype = {} | |||||
if skip_errors: | |||||
data_rows_with_error = header | |||||
if submit_after_import and not cint(frappe.db.get_value("DocType", | |||||
doctype, "is_submittable")): | |||||
submit_after_import = False | |||||
parenttype = get_header_row(get_data_keys_definition().parent_table) | |||||
if len(parenttype) > 1: | |||||
parenttype = parenttype[1] | |||||
# check permissions | |||||
if not frappe.permissions.can_import(parenttype or doctype): | |||||
frappe.flags.mute_emails = False | |||||
return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True} | |||||
# Throw expception in case of the empty data file | |||||
check_data_length() | |||||
make_column_map() | |||||
total = len(data) | |||||
if validate_template: | |||||
if total: | |||||
data_import_doc.total_rows = total | |||||
return True | |||||
if overwrite==None: | |||||
overwrite = params.get('overwrite') | |||||
# delete child rows (if parenttype) | |||||
parentfield = None | |||||
if parenttype: | |||||
parentfield = get_parent_field(doctype, parenttype) | |||||
if overwrite: | |||||
delete_child_rows(data, doctype) | |||||
import_log = [] | |||||
def log(**kwargs): | |||||
if via_console: | |||||
print((kwargs.get("title") + kwargs.get("message")).encode('utf-8')) | |||||
else: | |||||
import_log.append(kwargs) | |||||
def as_link(doctype, name): | |||||
if via_console: | |||||
return "{0}: {1}".format(doctype, name) | |||||
else: | |||||
return getlink(doctype, name) | |||||
# publish realtime task update | |||||
def publish_progress(achieved, reload=False): | |||||
if data_import_doc: | |||||
frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)), | |||||
"data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user) | |||||
error_flag = rollback_flag = False | |||||
batch_size = frappe.conf.data_import_batch_size or 1000 | |||||
for batch_start in range(0, total, batch_size): | |||||
batch = data[batch_start:batch_start + batch_size] | |||||
for i, row in enumerate(batch): | |||||
# bypass empty rows | |||||
if main_doc_empty(row): | |||||
continue | |||||
row_idx = i + start_row | |||||
doc = None | |||||
publish_progress(i) | |||||
try: | |||||
doc, attachments, last_error_row_idx = get_doc(row_idx) | |||||
validate_naming(doc) | |||||
if pre_process: | |||||
pre_process(doc) | |||||
original = None | |||||
if parentfield: | |||||
parent = frappe.get_doc(parenttype, doc["parent"]) | |||||
doc = parent.append(parentfield, doc) | |||||
parent.save() | |||||
else: | |||||
if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]): | |||||
original = frappe.get_doc(doctype, doc["name"]) | |||||
original_name = original.name | |||||
original.update(doc) | |||||
# preserve original name for case sensitivity | |||||
original.name = original_name | |||||
original.flags.ignore_links = ignore_links | |||||
original.save() | |||||
doc = original | |||||
else: | |||||
if not update_only: | |||||
doc = frappe.get_doc(doc) | |||||
prepare_for_insert(doc) | |||||
doc.flags.ignore_links = ignore_links | |||||
doc.insert() | |||||
if attachments: | |||||
# check file url and create a File document | |||||
for file_url in attachments: | |||||
attach_file_to_doc(doc.doctype, doc.name, file_url) | |||||
if submit_after_import: | |||||
doc.submit() | |||||
# log errors | |||||
if parentfield: | |||||
log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)), | |||||
"link": get_absolute_url(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"}) | |||||
elif submit_after_import: | |||||
log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)), | |||||
"message": "Document successfully submitted", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "blue"}) | |||||
elif original: | |||||
log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)), | |||||
"message": "Document successfully updated", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"}) | |||||
elif not update_only: | |||||
log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)), | |||||
"message": "Document successfully saved", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"}) | |||||
else: | |||||
log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None, | |||||
"message": "Document updation ignored", "indicator": "orange"}) | |||||
except Exception as e: | |||||
error_flag = True | |||||
# build error message | |||||
if frappe.local.message_log: | |||||
err_msg = "\n".join(['<p class="border-bottom small">{}</p>'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log]) | |||||
else: | |||||
err_msg = '<p class="border-bottom small">{}</p>'.format(cstr(e)) | |||||
error_trace = frappe.get_traceback() | |||||
if error_trace: | |||||
error_log_doc = frappe.log_error(error_trace) | |||||
error_link = get_absolute_url("Error Log", error_log_doc.name) | |||||
else: | |||||
error_link = None | |||||
log(**{ | |||||
"row": row_idx + 1, | |||||
"title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""), | |||||
"message": err_msg, | |||||
"indicator": "red", | |||||
"link":error_link | |||||
}) | |||||
# data with error to create a new file | |||||
# include the errored data in the last row as last_error_row_idx will not be updated for the last row | |||||
if skip_errors: | |||||
if last_error_row_idx == len(rows)-1: | |||||
last_error_row_idx = len(rows) | |||||
data_rows_with_error += rows[row_idx:last_error_row_idx] | |||||
else: | |||||
rollback_flag = True | |||||
finally: | |||||
frappe.local.message_log = [] | |||||
start_row += batch_size | |||||
if rollback_flag: | |||||
frappe.db.rollback() | |||||
else: | |||||
frappe.db.commit() | |||||
frappe.flags.mute_emails = False | |||||
frappe.flags.in_import = False | |||||
log_message = {"messages": import_log, "error": error_flag} | |||||
if data_import_doc: | |||||
data_import_doc.log_details = json.dumps(log_message) | |||||
import_status = None | |||||
if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error): | |||||
import_status = "Partially Successful" | |||||
# write the file with the faulty row | |||||
file_name = 'error_' + filename + file_extension | |||||
if file_extension == '.xlsx': | |||||
from frappe.utils.xlsxutils import make_xlsx | |||||
xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template") | |||||
file_data = xlsx_file.getvalue() | |||||
else: | |||||
from frappe.utils.csvutils import to_csv | |||||
file_data = to_csv(data_rows_with_error) | |||||
_file = frappe.get_doc({ | |||||
"doctype": "File", | |||||
"file_name": file_name, | |||||
"attached_to_doctype": "Data Import Legacy", | |||||
"attached_to_name": data_import_doc.name, | |||||
"folder": "Home/Attachments", | |||||
"content": file_data}) | |||||
_file.save() | |||||
data_import_doc.error_file = _file.file_url | |||||
elif error_flag: | |||||
import_status = "Failed" | |||||
else: | |||||
import_status = "Successful" | |||||
data_import_doc.import_status = import_status | |||||
data_import_doc.save() | |||||
if data_import_doc.import_status in ["Successful", "Partially Successful"]: | |||||
data_import_doc.submit() | |||||
publish_progress(100, True) | |||||
else: | |||||
publish_progress(0, True) | |||||
frappe.db.commit() | |||||
else: | |||||
return log_message | |||||
def get_parent_field(doctype, parenttype): | |||||
parentfield = None | |||||
# get parentfield | |||||
if parenttype: | |||||
for d in frappe.get_meta(parenttype).get_table_fields(): | |||||
if d.options==doctype: | |||||
parentfield = d.fieldname | |||||
break | |||||
if not parentfield: | |||||
frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype)) | |||||
raise Exception | |||||
return parentfield | |||||
def delete_child_rows(rows, doctype): | |||||
"""delete child rows for all parents""" | |||||
for p in list(set(r[1] for r in rows)): | |||||
if p: | |||||
frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p) |
@@ -1,38 +0,0 @@ | |||||
<div> | |||||
<div class="table-responsive"> | |||||
<table class="table table-bordered table-hover log-details-table"> | |||||
<tr> | |||||
<th style="width:10%"> {{ __("Row No") }} </th> | |||||
<th style="width:40%"> {{ __("Row Status") }} </th> | |||||
<th style="width:50%"> {{ __("Message") }} </th> | |||||
</tr> | |||||
{% for row in data %} | |||||
{% if (!show_only_errors) || (show_only_errors && row.indicator == "red") %} | |||||
<tr> | |||||
<td> | |||||
<span>{{ row.row }} </span> | |||||
</td> | |||||
<td> | |||||
<span class="indicator {{ row.indicator }}"> {{ row.title }} </span> | |||||
</td> | |||||
<td> | |||||
{% if (import_status != "Failed" || (row.indicator == "red")) { %} | |||||
<div>{{ row.message }}</div> | |||||
{% if row.link %} | |||||
<span style="width: 10%; float:right;"> | |||||
<a class="btn-open no-decoration" title="Open Link" href="{{ row.link }}"> | |||||
<i class="octicon octicon-arrow-right"></i> | |||||
</a> | |||||
</span> | |||||
{% endif %} | |||||
{% } else { %} | |||||
<span> {{ __("Document can't saved.") }} </span> | |||||
{% } %} | |||||
</td> | |||||
</tr> | |||||
{% endif %} | |||||
{% endfor %} | |||||
</table> | |||||
</div> | |||||
</div> |
@@ -1,8 +0,0 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | |||||
# See license.txt | |||||
# import frappe | |||||
import unittest | |||||
class TestDataImportLegacy(unittest.TestCase): | |||||
pass |
@@ -53,6 +53,7 @@ class User(Document): | |||||
def after_insert(self): | def after_insert(self): | ||||
create_notification_settings(self.name) | create_notification_settings(self.name) | ||||
frappe.cache().delete_key('users_for_mentions') | frappe.cache().delete_key('users_for_mentions') | ||||
frappe.cache().delete_key('enabled_users') | |||||
def validate(self): | def validate(self): | ||||
self.check_demo() | self.check_demo() | ||||
@@ -129,6 +130,9 @@ class User(Document): | |||||
if self.has_value_changed('allow_in_mentions') or self.has_value_changed('user_type'): | if self.has_value_changed('allow_in_mentions') or self.has_value_changed('user_type'): | ||||
frappe.cache().delete_key('users_for_mentions') | frappe.cache().delete_key('users_for_mentions') | ||||
if self.has_value_changed('enabled'): | |||||
frappe.cache().delete_key('enabled_users') | |||||
def has_website_permission(self, ptype, user, verbose=False): | def has_website_permission(self, ptype, user, verbose=False): | ||||
"""Returns true if current user is the session user""" | """Returns true if current user is the session user""" | ||||
return self.name == frappe.session.user | return self.name == frappe.session.user | ||||
@@ -392,6 +396,8 @@ class User(Document): | |||||
if self.get('allow_in_mentions'): | if self.get('allow_in_mentions'): | ||||
frappe.cache().delete_key('users_for_mentions') | frappe.cache().delete_key('users_for_mentions') | ||||
frappe.cache().delete_key('enabled_users') | |||||
def before_rename(self, old_name, new_name, merge=False): | def before_rename(self, old_name, new_name, merge=False): | ||||
self.check_demo() | self.check_demo() | ||||
@@ -1230,3 +1236,10 @@ def generate_keys(user): | |||||
def switch_theme(theme): | def switch_theme(theme): | ||||
if theme in ["Dark", "Light"]: | if theme in ["Dark", "Light"]: | ||||
frappe.db.set_value("User", frappe.session.user, "desk_theme", theme) | frappe.db.set_value("User", frappe.session.user, "desk_theme", theme) | ||||
def get_enabled_users(): | |||||
def _get_enabled_users(): | |||||
enabled_users = frappe.get_all("User", filters={"enabled": "1"}, pluck="name") | |||||
return enabled_users | |||||
return frappe.cache().get_value("enabled_users", _get_enabled_users) |
@@ -120,7 +120,7 @@ | |||||
"label": "Field Type", | "label": "Field Type", | ||||
"oldfieldname": "fieldtype", | "oldfieldname": "fieldtype", | ||||
"oldfieldtype": "Select", | "oldfieldtype": "Select", | ||||
"options": "Attach\nAttach Image\nBarcode\nButton\nCheck\nCode\nColor\nColumn Break\nCurrency\nData\nDate\nDatetime\nDuration\nDynamic Link\nFloat\nGeolocation\nHTML\nImage\nInt\nLink\nLong Text\nMarkdown Editor\nPassword\nPercent\nRating\nRead Only\nSection Break\nSelect\nSmall Text\nTable\nTable MultiSelect\nText\nText Editor\nTime\nSignature", | |||||
"options": "Attach\nAttach Image\nBarcode\nButton\nCheck\nCode\nColor\nColumn Break\nCurrency\nData\nDate\nDatetime\nDuration\nDynamic Link\nFloat\nFold\nGeolocation\nHeading\nHTML\nHTML Editor\nImage\nInt\nLink\nLong Text\nMarkdown Editor\nPassword\nPercent\nRead Only\nRating\nSection Break\nSelect\nSmall Text\nTable\nTable MultiSelect\nText\nText Editor\nTime\nSignature", | |||||
"reqd": 1 | "reqd": 1 | ||||
}, | }, | ||||
{ | { | ||||
@@ -417,7 +417,7 @@ | |||||
"idx": 1, | "idx": 1, | ||||
"index_web_pages_for_search": 1, | "index_web_pages_for_search": 1, | ||||
"links": [], | "links": [], | ||||
"modified": "2020-10-29 06:14:43.073329", | |||||
"modified": "2021-07-12 04:54:12.042319", | |||||
"modified_by": "Administrator", | "modified_by": "Administrator", | ||||
"module": "Custom", | "module": "Custom", | ||||
"name": "Custom Field", | "name": "Custom Field", | ||||
@@ -15,7 +15,6 @@ from frappe import _ | |||||
from time import time | from time import time | ||||
from frappe.utils import now, getdate, cast_fieldtype, get_datetime | from frappe.utils import now, getdate, cast_fieldtype, get_datetime | ||||
from frappe.model.utils.link_count import flush_local_link_count | from frappe.model.utils.link_count import flush_local_link_count | ||||
from frappe.utils import cint | |||||
class Database(object): | class Database(object): | ||||
@@ -556,8 +555,7 @@ class Database(object): | |||||
if not df: | if not df: | ||||
frappe.throw(_('Invalid field name: {0}').format(frappe.bold(fieldname)), self.InvalidColumnName) | frappe.throw(_('Invalid field name: {0}').format(frappe.bold(fieldname)), self.InvalidColumnName) | ||||
if df.fieldtype in frappe.model.numeric_fieldtypes: | |||||
val = cint(val) | |||||
val = cast_fieldtype(df.fieldtype, val) | |||||
self.value_cache[doctype][fieldname] = val | self.value_cache[doctype][fieldname] = val | ||||
@@ -452,6 +452,7 @@ def get_custom_report_list(module): | |||||
"type": "Link", | "type": "Link", | ||||
"link_type": "report", | "link_type": "report", | ||||
"doctype": r.ref_doctype, | "doctype": r.ref_doctype, | ||||
"dependencies": r.ref_doctype, | |||||
"is_query_report": 1 if r.report_type in ("Query Report", "Script Report", "Custom Report") else 0, | "is_query_report": 1 if r.report_type in ("Query Report", "Script Report", "Custom Report") else 0, | ||||
"label": _(r.name), | "label": _(r.name), | ||||
"link_to": r.name, | "link_to": r.name, | ||||
@@ -12,7 +12,10 @@ class NotificationLog(Document): | |||||
frappe.publish_realtime('notification', after_commit=True, user=self.for_user) | frappe.publish_realtime('notification', after_commit=True, user=self.for_user) | ||||
set_notifications_as_unseen(self.for_user) | set_notifications_as_unseen(self.for_user) | ||||
if is_email_notifications_enabled_for_type(self.for_user, self.type): | if is_email_notifications_enabled_for_type(self.for_user, self.type): | ||||
send_notification_email(self) | |||||
try: | |||||
send_notification_email(self) | |||||
except frappe.OutgoingEmailError: | |||||
frappe.log_error(message=frappe.get_traceback(), title=_("Failed to send notification email")) | |||||
def get_permission_query_conditions(for_user): | def get_permission_query_conditions(for_user): | ||||
@@ -1,8 +1,26 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | |||||
# See license.txt | |||||
# import frappe | |||||
import unittest | import unittest | ||||
import frappe | |||||
from frappe.desk.reportview import get_stats | |||||
from frappe.desk.doctype.tag.tag import add_tag | |||||
class TestTag(unittest.TestCase): | class TestTag(unittest.TestCase): | ||||
pass | |||||
def setUp(self) -> None: | |||||
frappe.db.sql("DELETE from `tabTag`") | |||||
frappe.db.sql("UPDATE `tabDocType` set _user_tags=''") | |||||
def test_tag_count_query(self): | |||||
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'), | |||||
{'_user_tags': [['No Tags', frappe.db.count('DocType')]]}) | |||||
add_tag('Standard', 'DocType', 'User') | |||||
add_tag('Standard', 'DocType', 'ToDo') | |||||
# count with no filter | |||||
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'), | |||||
{'_user_tags': [['Standard', 2], ['No Tags', frappe.db.count('DocType') - 2]]}) | |||||
# count with child table field filter | |||||
self.assertDictEqual(get_stats('["_user_tags"]', | |||||
'DocType', | |||||
filters='[["DocField", "fieldname", "like", "%last_name%"], ["DocType", "name", "like", "%use%"]]'), | |||||
{'_user_tags': [['Standard', 1], ['No Tags', 0]]}) |
@@ -445,24 +445,36 @@ def get_stats(stats, doctype, filters=[]): | |||||
for tag in tags: | for tag in tags: | ||||
if not tag in columns: continue | if not tag in columns: continue | ||||
try: | try: | ||||
tagcount = frappe.get_list(doctype, fields=[tag, "count(*)"], | |||||
#filters=["ifnull(`%s`,'')!=''" % tag], group_by=tag, as_list=True) | |||||
filters = filters + ["ifnull(`%s`,'')!=''" % tag], group_by = tag, as_list = True) | |||||
if tag=='_user_tags': | |||||
stats[tag] = scrub_user_tags(tagcount) | |||||
stats[tag].append([_("No Tags"), frappe.get_list(doctype, | |||||
tag_count = frappe.get_list(doctype, | |||||
fields=[tag, "count(*)"], | |||||
filters=filters + [[tag, '!=', '']], | |||||
group_by=tag, | |||||
as_list=True, | |||||
distinct=1, | |||||
) | |||||
if tag == '_user_tags': | |||||
stats[tag] = scrub_user_tags(tag_count) | |||||
no_tag_count = frappe.get_list(doctype, | |||||
fields=[tag, "count(*)"], | fields=[tag, "count(*)"], | ||||
filters=filters +["({0} = ',' or {0} = '' or {0} is null)".format(tag)], as_list=True)[0][1]]) | |||||
filters=filters + [[tag, "in", ('', ',')]], | |||||
as_list=True, | |||||
group_by=tag, | |||||
order_by=tag, | |||||
) | |||||
no_tag_count = no_tag_count[0][1] if no_tag_count else 0 | |||||
stats[tag].append([_("No Tags"), no_tag_count]) | |||||
else: | else: | ||||
stats[tag] = tagcount | |||||
stats[tag] = tag_count | |||||
except frappe.db.SQLError: | except frappe.db.SQLError: | ||||
# does not work for child tables | |||||
pass | pass | ||||
except frappe.db.InternalError: | |||||
except frappe.db.InternalError as e: | |||||
# raised when _user_tags column is added on the fly | # raised when _user_tags column is added on the fly | ||||
pass | pass | ||||
return stats | return stats | ||||
@frappe.whitelist() | @frappe.whitelist() | ||||
@@ -168,7 +168,18 @@ def search_widget(doctype, txt, query=None, searchfield=None, start=0, | |||||
strict=False) | strict=False) | ||||
if doctype in UNTRANSLATED_DOCTYPES: | if doctype in UNTRANSLATED_DOCTYPES: | ||||
values = tuple([v for v in list(values) if re.search(re.escape(txt)+".*", (_(v.name) if as_dict else _(v[0])), re.IGNORECASE)]) | |||||
# Filtering the values array so that query is included in very element | |||||
values = ( | |||||
v for v in values | |||||
if re.search( | |||||
f"{re.escape(txt)}.*", _(v.name if as_dict else v[0]), re.IGNORECASE | |||||
) | |||||
) | |||||
# Sorting the values array so that relevant results always come first | |||||
# This will first bring elements on top in which query is a prefix of element | |||||
# Then it will bring the rest of the elements and sort them in lexicographical order | |||||
values = sorted(values, key=lambda x: relevance_sorter(x, txt, as_dict)) | |||||
# remove _relevance from results | # remove _relevance from results | ||||
if as_dict: | if as_dict: | ||||
@@ -208,6 +219,13 @@ def scrub_custom_query(query, key, txt): | |||||
query = query.replace('%s', ((txt or '') + '%')) | query = query.replace('%s', ((txt or '') + '%')) | ||||
return query | return query | ||||
def relevance_sorter(key, query, as_dict): | |||||
value = _(key.name if as_dict else key[0]) | |||||
return ( | |||||
value.lower().startswith(query.lower()) is not True, | |||||
value | |||||
) | |||||
@wrapt.decorator | @wrapt.decorator | ||||
def validate_and_sanitize_search_inputs(fn, instance, args, kwargs): | def validate_and_sanitize_search_inputs(fn, instance, args, kwargs): | ||||
kwargs.update(dict(zip(fn.__code__.co_varnames, args))) | kwargs.update(dict(zip(fn.__code__.co_varnames, args))) | ||||
@@ -127,7 +127,7 @@ w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> | |||||
''' | ''' | ||||
transformed_html = ''' | transformed_html = ''' | ||||
<h3>Hi John</h3> | <h3>Hi John</h3> | ||||
<p style="margin:5px 0 !important">This is a test email</p> | |||||
<p style="margin:1em 0 !important">This is a test email</p> | |||||
''' | ''' | ||||
self.assertTrue(transformed_html in inline_style_in_html(html)) | self.assertTrue(transformed_html in inline_style_in_html(html)) | ||||
@@ -171,6 +171,9 @@ doc_events = { | |||||
"frappe.workflow.doctype.workflow_action.workflow_action.process_workflow_actions", | "frappe.workflow.doctype.workflow_action.workflow_action.process_workflow_actions", | ||||
"frappe.event_streaming.doctype.event_update_log.event_update_log.notify_consumers" | "frappe.event_streaming.doctype.event_update_log.event_update_log.notify_consumers" | ||||
], | ], | ||||
"on_update_after_submit": [ | |||||
"frappe.workflow.doctype.workflow_action.workflow_action.process_workflow_actions" | |||||
], | |||||
"on_change": [ | "on_change": [ | ||||
"frappe.social.doctype.energy_point_rule.energy_point_rule.process_energy_points", | "frappe.social.doctype.energy_point_rule.energy_point_rule.process_energy_points", | ||||
"frappe.automation.doctype.milestone_tracker.milestone_tracker.evaluate_milestone" | "frappe.automation.doctype.milestone_tracker.milestone_tracker.evaluate_milestone" | ||||
@@ -53,13 +53,15 @@ frappe.ui.form.ControlSignature = class ControlSignature extends frappe.ui.form. | |||||
this.img = $("<img class='img-responsive attach-image-display'>") | this.img = $("<img class='img-responsive attach-image-display'>") | ||||
.appendTo(this.img_wrapper).toggle(false); | .appendTo(this.img_wrapper).toggle(false); | ||||
} | } | ||||
refresh_input(e) { | |||||
refresh_input() { | |||||
// signature dom is not ready | |||||
if (!this.body) return; | |||||
// prevent to load the second time | // prevent to load the second time | ||||
this.make_pad(); | this.make_pad(); | ||||
this.$wrapper.find(".control-input").toggle(false); | this.$wrapper.find(".control-input").toggle(false); | ||||
this.set_editable(this.get_status()=="Write"); | this.set_editable(this.get_status()=="Write"); | ||||
this.load_pad(); | this.load_pad(); | ||||
if(this.get_status()=="Read") { | |||||
if (this.get_status() == "Read") { | |||||
$(this.disp_area).toggle(false); | $(this.disp_area).toggle(false); | ||||
} | } | ||||
} | } | ||||
@@ -1265,7 +1265,9 @@ frappe.ui.form.Form = class FrappeForm { | |||||
if (df && df[property] != value) { | if (df && df[property] != value) { | ||||
df[property] = value; | df[property] = value; | ||||
if (table_field && table_row_name) { | if (table_field && table_row_name) { | ||||
this.fields_dict[fieldname].grid.grid_rows_by_docname[table_row_name].refresh_field(fieldname); | |||||
if (this.fields_dict[fieldname].grid.grid_rows_by_docname[table_row_name]) { | |||||
this.fields_dict[fieldname].grid.grid_rows_by_docname[table_row_name].refresh_field(fieldname); | |||||
} | |||||
} else { | } else { | ||||
this.refresh_field(fieldname); | this.refresh_field(fieldname); | ||||
} | } | ||||
@@ -263,9 +263,6 @@ frappe.ui.form.Layout = class Layout { | |||||
section.addClass("empty-section"); | section.addClass("empty-section"); | ||||
} | } | ||||
}); | }); | ||||
this.frm && this.frm.dashboard.refresh(); | |||||
} | } | ||||
refresh_fields (fields) { | refresh_fields (fields) { | ||||
@@ -1202,6 +1202,8 @@ Object.assign(frappe.utils, { | |||||
} else if (type === "report") { | } else if (type === "report") { | ||||
if (item.is_query_report) { | if (item.is_query_report) { | ||||
route = "query-report/" + item.name; | route = "query-report/" + item.name; | ||||
} else if (!item.doctype) { | |||||
route = "/report/" + item.name; | |||||
} else { | } else { | ||||
route = frappe.router.slug(item.doctype) + "/view/report/" + item.name; | route = frappe.router.slug(item.doctype) + "/view/report/" + item.name; | ||||
} | } | ||||
@@ -68,7 +68,7 @@ export default class LinksWidget extends Widget { | |||||
is_query_report: item.is_query_report | is_query_report: item.is_query_report | ||||
}; | }; | ||||
if (item.link_type == "Report" && !item.is_query_report) { | |||||
if (item.link_type.toLowerCase() == "report" && !item.is_query_report) { | |||||
opts.doctype = item.dependencies; | opts.doctype = item.dependencies; | ||||
} | } | ||||
@@ -166,6 +166,9 @@ select.form-control { | |||||
.ace_print-margin { | .ace_print-margin { | ||||
background-color: var(--dark-border-color); | background-color: var(--dark-border-color); | ||||
} | } | ||||
.ace_scrollbar { | |||||
z-index: 3; | |||||
} | |||||
} | } | ||||
.frappe-control[data-fieldtype="Attach"], | .frappe-control[data-fieldtype="Attach"], | ||||
@@ -179,9 +179,20 @@ | |||||
--text-on-pink: var(--pink-500); | --text-on-pink: var(--pink-500); | ||||
--text-on-cyan: var(--cyan-600); | --text-on-cyan: var(--cyan-600); | ||||
// Layout Colors | |||||
--bg-color: var(--gray-50); | |||||
--fg-color: white; | |||||
--navbar-bg: white; | |||||
--fg-hover-color: var(--gray-100); | |||||
--card-bg: var(--fg-color); | |||||
--disabled-text-color: var(--gray-700); | |||||
--disabled-control-bg: var(--gray-50); | --disabled-control-bg: var(--gray-50); | ||||
--control-bg: var(--gray-100); | --control-bg: var(--gray-100); | ||||
--control-bg-on-gray: var(--gray-200); | --control-bg-on-gray: var(--gray-200); | ||||
--awesomebar-focus-bg: var(--fg-color); | |||||
--modal-bg: white; | |||||
--toast-bg: var(--modal-bg); | |||||
--popover-bg: white; | |||||
--awesomplete-hover-bg: var(--control-bg); | --awesomplete-hover-bg: var(--control-bg); | ||||
@@ -25,20 +25,7 @@ $input-height: 28px !default; | |||||
--navbar-height: 60px; | --navbar-height: 60px; | ||||
// Layout Colors | |||||
--bg-color: var(--gray-50); | |||||
--fg-color: white; | |||||
--navbar-bg: white; | |||||
--fg-hover-color: var(--gray-100); | |||||
--card-bg: var(--fg-color); | |||||
--disabled-text-color: var(--gray-700); | |||||
--disabled-control-bg: var(--gray-50); | |||||
--control-bg: var(--gray-100); | |||||
--control-bg-on-gray: var(--gray-200); | |||||
--awesomebar-focus-bg: var(--fg-color); | |||||
--modal-bg: white; | |||||
--toast-bg: var(--modal-bg); | |||||
--popover-bg: white; | |||||
--appreciation-color: var(--dark-green-600); | --appreciation-color: var(--dark-green-600); | ||||
--appreciation-bg: var(--dark-green-100); | --appreciation-bg: var(--dark-green-100); | ||||
@@ -36,7 +36,13 @@ a { | |||||
} | } | ||||
p { | p { | ||||
margin: 5px 0 !important; | |||||
margin: 1em 0 !important; | |||||
} | |||||
.with-container { | |||||
p { | |||||
margin: 5px 0 !important; | |||||
} | |||||
} | } | ||||
.ql-editor { | .ql-editor { | ||||
@@ -1 +1 @@ | |||||
@import './website/index'; | |||||
@import './website/index'; |
@@ -145,20 +145,21 @@ | |||||
.section-with-cards .card { | .section-with-cards .card { | ||||
@include transition(); | @include transition(); | ||||
border: none; | |||||
.card-body { | |||||
padding: 0 1.5rem 2rem 0; | |||||
} | |||||
&:hover { | &:hover { | ||||
border-color: $gray-500; | border-color: $gray-500; | ||||
} | } | ||||
.card-title { | .card-title { | ||||
line-height: 1; | |||||
line-height: 1.3; | |||||
} | } | ||||
&.card-sm { | &.card-sm { | ||||
.card-body { | |||||
padding: 1.5rem; | |||||
} | |||||
.card-title { | .card-title { | ||||
font-size: $font-size-base; | font-size: $font-size-base; | ||||
font-weight: 600; | font-weight: 600; | ||||
@@ -169,10 +170,6 @@ | |||||
} | } | ||||
} | } | ||||
&.card-md { | &.card-md { | ||||
.card-body { | |||||
padding: 1.75rem; | |||||
} | |||||
.card-title { | .card-title { | ||||
font-size: $font-size-lg; | font-size: $font-size-lg; | ||||
font-weight: 600; | font-weight: 600; | ||||
@@ -186,10 +183,6 @@ | |||||
} | } | ||||
} | } | ||||
&.card-lg { | &.card-lg { | ||||
.card-body { | |||||
padding: 2rem; | |||||
} | |||||
.card-title { | .card-title { | ||||
font-size: $font-size-xl; | font-size: $font-size-xl; | ||||
font-weight: bold; | font-weight: bold; | ||||
@@ -273,6 +273,31 @@ class TestEnergyPointLog(unittest.TestCase): | |||||
self.assertEqual(points_after_reverting_todo, points_after_closing_todo - rule_points) | self.assertEqual(points_after_reverting_todo, points_after_closing_todo - rule_points) | ||||
self.assertEqual(points_after_saving_todo_again, points_after_reverting_todo + rule_points) | self.assertEqual(points_after_saving_todo_again, points_after_reverting_todo + rule_points) | ||||
def test_energy_points_disabled_user(self): | |||||
frappe.set_user('test@example.com') | |||||
user = frappe.get_doc('User', 'test@example.com') | |||||
user.enabled = 0 | |||||
user.save() | |||||
todo_point_rule = create_energy_point_rule_for_todo() | |||||
energy_point_of_user = get_points('test@example.com') | |||||
created_todo = create_a_todo() | |||||
created_todo.status = 'Closed' | |||||
created_todo.save() | |||||
points_after_closing_todo = get_points('test@example.com') | |||||
# do not update energy points for disabled user | |||||
self.assertEqual(points_after_closing_todo, energy_point_of_user) | |||||
user.enabled = 1 | |||||
user.save() | |||||
created_todo.save() | |||||
points_after_re_saving_todo = get_points('test@example.com') | |||||
self.assertEqual(points_after_re_saving_todo, energy_point_of_user + todo_point_rule.points) | |||||
def create_energy_point_rule_for_todo(multiplier_field=None, for_doc_event='Custom', max_points=None, | def create_energy_point_rule_for_todo(multiplier_field=None, for_doc_event='Custom', max_points=None, | ||||
for_assigned_users=0, field_to_check=None, apply_once=False, user_field='owner'): | for_assigned_users=0, field_to_check=None, apply_once=False, user_field='owner'): | ||||
name = 'ToDo Closed' | name = 'ToDo Closed' | ||||
@@ -5,6 +5,7 @@ | |||||
import frappe | import frappe | ||||
from frappe import _ | from frappe import _ | ||||
import frappe.cache_manager | import frappe.cache_manager | ||||
from frappe.core.doctype.user.user import get_enabled_users | |||||
from frappe.model import log_types | from frappe.model import log_types | ||||
from frappe.model.document import Document | from frappe.model.document import Document | ||||
from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled | from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled | ||||
@@ -44,7 +45,7 @@ class EnergyPointRule(Document): | |||||
try: | try: | ||||
for user in users: | for user in users: | ||||
if not user or user == 'Administrator': continue | |||||
if not is_eligible_user(user): continue | |||||
create_energy_points_log(reference_doctype, reference_name, { | create_energy_points_log(reference_doctype, reference_name, { | ||||
'points': points, | 'points': points, | ||||
'user': user, | 'user': user, | ||||
@@ -119,3 +120,8 @@ def get_energy_point_doctypes(): | |||||
d.reference_doctype for d in frappe.get_all('Energy Point Rule', | d.reference_doctype for d in frappe.get_all('Energy Point Rule', | ||||
['reference_doctype'], {'enabled': 1}) | ['reference_doctype'], {'enabled': 1}) | ||||
] | ] | ||||
def is_eligible_user(user): | |||||
'''Checks if user is eligible to get energy points''' | |||||
enabled_users = get_enabled_users() | |||||
return user and user in enabled_users and user != 'Administrator' |
@@ -20,7 +20,7 @@ class TestBoilerPlate(unittest.TestCase): | |||||
def test_create_app(self): | def test_create_app(self): | ||||
title = "Test App" | title = "Test App" | ||||
description = "Test app for unit testing" | |||||
description = "This app's description contains 'single quotes' and \"double quotes\"." | |||||
publisher = "Test Publisher" | publisher = "Test Publisher" | ||||
email = "example@example.org" | email = "example@example.org" | ||||
icon = "" # empty -> default | icon = "" # empty -> default | ||||
@@ -426,3 +426,13 @@ class TestCommands(BaseTestCommands): | |||||
self.assertEqual(self.returncode, 0) | self.assertEqual(self.returncode, 0) | ||||
self.assertIn("pong", self.stdout) | self.assertIn("pong", self.stdout) | ||||
def test_version(self): | |||||
self.execute("bench version") | |||||
self.assertEqual(self.returncode, 0) | |||||
for output in ["legacy", "plain", "table", "json"]: | |||||
self.execute(f"bench version -f {output}") | |||||
self.assertEqual(self.returncode, 0) | |||||
self.execute("bench version -f invalid") | |||||
self.assertEqual(self.returncode, 2) |
@@ -5,6 +5,7 @@ | |||||
import unittest | import unittest | ||||
from random import choice | from random import choice | ||||
import datetime | |||||
import frappe | import frappe | ||||
from frappe.custom.doctype.custom_field.custom_field import create_custom_field | from frappe.custom.doctype.custom_field.custom_field import create_custom_field | ||||
@@ -45,11 +46,35 @@ class TestDB(unittest.TestCase): | |||||
frappe.db.escape("香港濟生堂製藥有限公司 - IT".encode("utf-8")) | frappe.db.escape("香港濟生堂製藥有限公司 - IT".encode("utf-8")) | ||||
def test_get_single_value(self): | def test_get_single_value(self): | ||||
frappe.db.set_value('System Settings', 'System Settings', 'backup_limit', 5) | |||||
frappe.db.commit() | |||||
#setup | |||||
values_dict = { | |||||
"Float": 1.5, | |||||
"Int": 1, | |||||
"Percent": 55.5, | |||||
"Currency": 12.5, | |||||
"Data": "Test", | |||||
"Date": datetime.datetime.now().date(), | |||||
"Datetime": datetime.datetime.now(), | |||||
"Time": datetime.timedelta(hours=9, minutes=45, seconds=10) | |||||
} | |||||
test_inputs = [{ | |||||
"fieldtype": fieldtype, | |||||
"value": value} for fieldtype, value in values_dict.items()] | |||||
for fieldtype in values_dict.keys(): | |||||
create_custom_field("Print Settings", { | |||||
"fieldname": f"test_{fieldtype.lower()}", | |||||
"label": f"Test {fieldtype}", | |||||
"fieldtype": fieldtype, | |||||
}) | |||||
#test | |||||
for inp in test_inputs: | |||||
fieldname = f"test_{inp['fieldtype'].lower()}" | |||||
frappe.db.set_value("Print Settings", "Print Settings", fieldname, inp["value"]) | |||||
self.assertEqual(frappe.db.get_single_value("Print Settings", fieldname), inp["value"]) | |||||
limit = frappe.db.get_single_value('System Settings', 'backup_limit') | |||||
self.assertEqual(limit, 5) | |||||
#teardown | |||||
clear_custom_fields("Print Settings") | |||||
def test_log_touched_tables(self): | def test_log_touched_tables(self): | ||||
frappe.flags.in_migrate = True | frappe.flags.in_migrate = True | ||||
@@ -132,29 +157,29 @@ class TestDB(unittest.TestCase): | |||||
# Testing read | # Testing read | ||||
self.assertEqual(list(frappe.get_all("ToDo", fields=[random_field], limit=1)[0])[0], random_field) | self.assertEqual(list(frappe.get_all("ToDo", fields=[random_field], limit=1)[0])[0], random_field) | ||||
self.assertEqual(list(frappe.get_all("ToDo", fields=["`{0}` as total".format(random_field)], limit=1)[0])[0], "total") | |||||
self.assertEqual(list(frappe.get_all("ToDo", fields=[f"`{random_field}` as total"], limit=1)[0])[0], "total") | |||||
# Testing read for distinct and sql functions | # Testing read for distinct and sql functions | ||||
self.assertEqual(list( | self.assertEqual(list( | ||||
frappe.get_all("ToDo", | frappe.get_all("ToDo", | ||||
fields=["`{0}` as total".format(random_field)], | |||||
fields=[f"`{random_field}` as total"], | |||||
distinct=True, | distinct=True, | ||||
limit=1, | limit=1, | ||||
)[0] | )[0] | ||||
)[0], "total") | )[0], "total") | ||||
self.assertEqual(list( | self.assertEqual(list( | ||||
frappe.get_all("ToDo", | frappe.get_all("ToDo", | ||||
fields=["`{0}`".format(random_field)], | |||||
fields=[f"`{random_field}`"], | |||||
distinct=True, | distinct=True, | ||||
limit=1, | limit=1, | ||||
)[0] | )[0] | ||||
)[0], random_field) | )[0], random_field) | ||||
self.assertEqual(list( | self.assertEqual(list( | ||||
frappe.get_all("ToDo", | frappe.get_all("ToDo", | ||||
fields=["count(`{0}`)".format(random_field)], | |||||
fields=[f"count(`{random_field}`)"], | |||||
limit=1 | limit=1 | ||||
)[0] | )[0] | ||||
)[0], "count" if frappe.conf.db_type == "postgres" else "count(`{0}`)".format(random_field)) | |||||
)[0], "count" if frappe.conf.db_type == "postgres" else f"count(`{random_field}`)") | |||||
# Testing update | # Testing update | ||||
frappe.db.set_value(test_doctype, random_doc, random_field, random_value) | frappe.db.set_value(test_doctype, random_doc, random_field, random_value) | ||||
@@ -1,4 +1,4 @@ | |||||
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and Contributors | |||||
# Copyright (c) 2021, Frappe Technologies Pvt. Ltd. and Contributors | |||||
# MIT License. See license.txt | # MIT License. See license.txt | ||||
import unittest | import unittest | ||||
@@ -6,7 +6,16 @@ import frappe | |||||
from frappe.desk.search import search_link | from frappe.desk.search import search_link | ||||
from frappe.desk.search import search_widget | from frappe.desk.search import search_widget | ||||
class TestSearch(unittest.TestCase): | class TestSearch(unittest.TestCase): | ||||
def setUp(self): | |||||
if self._testMethodName == "test_link_field_order": | |||||
setup_test_link_field_order(self) | |||||
def tearDown(self): | |||||
if self._testMethodName == "test_link_field_order": | |||||
teardown_test_link_field_order(self) | |||||
def test_search_field_sanitizer(self): | def test_search_field_sanitizer(self): | ||||
# pass | # pass | ||||
search_link('DocType', 'User', query=None, filters=None, page_length=20, searchfield='name') | search_link('DocType', 'User', query=None, filters=None, page_length=20, searchfield='name') | ||||
@@ -38,6 +47,18 @@ class TestSearch(unittest.TestCase): | |||||
search_link, 'DocType', 'Customer', query=None, filters=None, | search_link, 'DocType', 'Customer', query=None, filters=None, | ||||
page_length=20, searchfield=';') | page_length=20, searchfield=';') | ||||
def test_link_field_order(self): | |||||
# Making a request to the search_link with the tree doctype | |||||
search_link(doctype=self.tree_doctype_name, txt='all', query=None, | |||||
filters=None, page_length=20, searchfield=None) | |||||
result = frappe.response['results'] | |||||
# Check whether the result is sorted or not | |||||
self.assertEquals(self.parent_doctype_name, result[0]['value']) | |||||
# Check whether searching for parent also list out children | |||||
self.assertEquals(len(result), len(self.child_doctypes_names) + 1) | |||||
#Search for the word "pay", part of the word "pays" (country) in french. | #Search for the word "pay", part of the word "pays" (country) in french. | ||||
def test_link_search_in_foreign_language(self): | def test_link_search_in_foreign_language(self): | ||||
try: | try: | ||||
@@ -80,4 +101,58 @@ class TestSearch(unittest.TestCase): | |||||
@frappe.validate_and_sanitize_search_inputs | @frappe.validate_and_sanitize_search_inputs | ||||
def get_data(doctype, txt, searchfield, start, page_len, filters): | def get_data(doctype, txt, searchfield, start, page_len, filters): | ||||
return [doctype, txt, searchfield, start, page_len, filters] | |||||
return [doctype, txt, searchfield, start, page_len, filters] | |||||
def setup_test_link_field_order(TestCase): | |||||
TestCase.tree_doctype_name = 'Test Tree Order' | |||||
TestCase.child_doctype_list = [] | |||||
TestCase.child_doctypes_names = ['USA', 'India', 'Russia', 'China'] | |||||
TestCase.parent_doctype_name = 'All Territories' | |||||
# Create Tree doctype | |||||
TestCase.tree_doc = frappe.get_doc({ | |||||
'doctype': 'DocType', | |||||
'name': TestCase.tree_doctype_name, | |||||
'module': 'Custom', | |||||
'custom': 1, | |||||
'is_tree': 1, | |||||
'autoname': 'field:random', | |||||
'fields': [{ | |||||
'fieldname': 'random', | |||||
'label': 'Random', | |||||
'fieldtype': 'Data' | |||||
}] | |||||
}).insert() | |||||
TestCase.tree_doc.search_fields = 'parent_test_tree_order' | |||||
TestCase.tree_doc.save() | |||||
# Create root for the tree doctype | |||||
frappe.get_doc({ | |||||
"doctype": TestCase.tree_doctype_name, | |||||
"random": TestCase.parent_doctype_name, | |||||
"is_group": 1 | |||||
}).insert() | |||||
# Create children for the root | |||||
for child_name in TestCase.child_doctypes_names: | |||||
temp = frappe.get_doc({ | |||||
"doctype": TestCase.tree_doctype_name, | |||||
"random": child_name, | |||||
"parent_test_tree_order": TestCase.parent_doctype_name | |||||
}).insert() | |||||
TestCase.child_doctype_list.append(temp) | |||||
def teardown_test_link_field_order(TestCase): | |||||
# Deleting all the created doctype | |||||
for child_doctype in TestCase.child_doctype_list: | |||||
child_doctype.delete() | |||||
frappe.delete_doc( | |||||
TestCase.tree_doctype_name, | |||||
TestCase.parent_doctype_name, | |||||
ignore_permissions=True, | |||||
force=True, | |||||
for_reload=True, | |||||
) | |||||
TestCase.tree_doc.delete() |
@@ -66,9 +66,6 @@ def make_boilerplate(dest, app_name): | |||||
with open(os.path.join(dest, hooks.app_name, ".gitignore"), "w") as f: | with open(os.path.join(dest, hooks.app_name, ".gitignore"), "w") as f: | ||||
f.write(frappe.as_unicode(gitignore_template.format(app_name = hooks.app_name))) | f.write(frappe.as_unicode(gitignore_template.format(app_name = hooks.app_name))) | ||||
with open(os.path.join(dest, hooks.app_name, "setup.py"), "w") as f: | |||||
f.write(frappe.as_unicode(setup_template.format(**hooks))) | |||||
with open(os.path.join(dest, hooks.app_name, "requirements.txt"), "w") as f: | with open(os.path.join(dest, hooks.app_name, "requirements.txt"), "w") as f: | ||||
f.write("# frappe -- https://github.com/frappe/frappe is installed via 'bench init'") | f.write("# frappe -- https://github.com/frappe/frappe is installed via 'bench init'") | ||||
@@ -82,6 +79,14 @@ def make_boilerplate(dest, app_name): | |||||
with open(os.path.join(dest, hooks.app_name, hooks.app_name, "modules.txt"), "w") as f: | with open(os.path.join(dest, hooks.app_name, hooks.app_name, "modules.txt"), "w") as f: | ||||
f.write(frappe.as_unicode(hooks.app_title)) | f.write(frappe.as_unicode(hooks.app_title)) | ||||
# These values could contain quotes and can break string declarations | |||||
# So escaping them before setting variables in setup.py and hooks.py | |||||
for key in ("app_publisher", "app_description", "app_license"): | |||||
hooks[key] = hooks[key].replace("\\", "\\\\").replace("'", "\\'").replace("\"", "\\\"") | |||||
with open(os.path.join(dest, hooks.app_name, "setup.py"), "w") as f: | |||||
f.write(frappe.as_unicode(setup_template.format(**hooks))) | |||||
with open(os.path.join(dest, hooks.app_name, hooks.app_name, "hooks.py"), "w") as f: | with open(os.path.join(dest, hooks.app_name, hooks.app_name, "hooks.py"), "w") as f: | ||||
f.write(frappe.as_unicode(hooks_template.format(**hooks))) | f.write(frappe.as_unicode(hooks_template.format(**hooks))) | ||||
@@ -328,18 +333,18 @@ def get_data(): | |||||
setup_template = """from setuptools import setup, find_packages | setup_template = """from setuptools import setup, find_packages | ||||
with open('requirements.txt') as f: | |||||
install_requires = f.read().strip().split('\\n') | |||||
with open("requirements.txt") as f: | |||||
install_requires = f.read().strip().split("\\n") | |||||
# get version from __version__ variable in {app_name}/__init__.py | # get version from __version__ variable in {app_name}/__init__.py | ||||
from {app_name} import __version__ as version | from {app_name} import __version__ as version | ||||
setup( | setup( | ||||
name='{app_name}', | |||||
name="{app_name}", | |||||
version=version, | version=version, | ||||
description='{app_description}', | |||||
author='{app_publisher}', | |||||
author_email='{app_email}', | |||||
description="{app_description}", | |||||
author="{app_publisher}", | |||||
author_email="{app_email}", | |||||
packages=find_packages(), | packages=find_packages(), | ||||
zip_safe=False, | zip_safe=False, | ||||
include_package_data=True, | include_package_data=True, | ||||
@@ -20,7 +20,7 @@ def sync_fixtures(app=None): | |||||
if os.path.exists(frappe.get_app_path(app, "fixtures")): | if os.path.exists(frappe.get_app_path(app, "fixtures")): | ||||
fixture_files = sorted(os.listdir(frappe.get_app_path(app, "fixtures"))) | fixture_files = sorted(os.listdir(frappe.get_app_path(app, "fixtures"))) | ||||
for fname in fixture_files: | for fname in fixture_files: | ||||
if fname.endswith(".json") or fname.endswith(".csv"): | |||||
if fname.endswith(".json"): | |||||
import_doc(frappe.get_app_path(app, "fixtures", fname)) | import_doc(frappe.get_app_path(app, "fixtures", fname)) | ||||
import_custom_scripts(app) | import_custom_scripts(app) | ||||
@@ -231,9 +231,6 @@ def update_global_search(doc): | |||||
if frappe.local.conf.get('disable_global_search'): | if frappe.local.conf.get('disable_global_search'): | ||||
return | return | ||||
if frappe.local.conf.get('disable_global_search'): | |||||
return | |||||
if doc.docstatus > 1 or (doc.meta.has_field("enabled") and not doc.get("enabled")) \ | if doc.docstatus > 1 or (doc.meta.has_field("enabled") and not doc.get("enabled")) \ | ||||
or doc.get("disabled"): | or doc.get("disabled"): | ||||
return | return | ||||
@@ -8,11 +8,11 @@ from frappe.website.utils import can_cache | |||||
HOMEPAGE_PATHS = ('/', '/index', 'index') | HOMEPAGE_PATHS = ('/', '/index', 'index') | ||||
class NotFoundPage(TemplatePage): | class NotFoundPage(TemplatePage): | ||||
def __init__(self, path, http_status_code): | |||||
def __init__(self, path, http_status_code=None): | |||||
self.request_path = path | self.request_path = path | ||||
self.request_url = frappe.local.request.url if hasattr(frappe.local, 'request') else '' | self.request_url = frappe.local.request.url if hasattr(frappe.local, 'request') else '' | ||||
path = '404' | path = '404' | ||||
http_status_code = 404 | |||||
http_status_code = http_status_code or 404 | |||||
super().__init__(path=path, http_status_code=http_status_code) | super().__init__(path=path, http_status_code=http_status_code) | ||||
def can_render(self): | def can_render(self): | ||||
@@ -4163,16 +4163,11 @@ minimatch@^3.0.4, minimatch@~3.0.2: | |||||
dependencies: | dependencies: | ||||
brace-expansion "^1.1.7" | brace-expansion "^1.1.7" | ||||
minimist@^1.1.3, minimist@^1.2.5: | |||||
minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: | |||||
version "1.2.5" | version "1.2.5" | ||||
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" | resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" | ||||
integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== | integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== | ||||
minimist@^1.2.0: | |||||
version "1.2.0" | |||||
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" | |||||
integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= | |||||
minipass@^3.0.0: | minipass@^3.0.0: | ||||
version "3.1.3" | version "3.1.3" | ||||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.3.tgz#7d42ff1f39635482e15f9cdb53184deebd5815fd" | resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.3.tgz#7d42ff1f39635482e15f9cdb53184deebd5815fd" | ||||