refactor: clean up code to py39+ supported syntax - f-strings instead of format - latest typing support instead of pre 3.9 TitleCase - remove UTF-8 declarations. - many more changes Powered by https://github.com/asottile/pyupgrade/ + manual cleanupsversion-14
@@ -17,7 +17,7 @@ import json | |||||
import os | import os | ||||
import re | import re | ||||
import warnings | import warnings | ||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union | |||||
from typing import TYPE_CHECKING, Any, Callable | |||||
import click | import click | ||||
from werkzeug.local import Local, release_local | from werkzeug.local import Local, release_local | ||||
@@ -103,7 +103,7 @@ def _(msg, lang=None, context=None) -> str: | |||||
translated_string = "" | translated_string = "" | ||||
if context: | if context: | ||||
string_key = "{msg}:{context}".format(msg=msg, context=context) | |||||
string_key = f"{msg}:{context}" | |||||
translated_string = get_full_dict(lang).get(string_key) | translated_string = get_full_dict(lang).get(string_key) | ||||
if not translated_string: | if not translated_string: | ||||
@@ -168,8 +168,8 @@ if TYPE_CHECKING: | |||||
from frappe.query_builder.builder import MariaDB, Postgres | from frappe.query_builder.builder import MariaDB, Postgres | ||||
from frappe.utils.redis_wrapper import RedisWrapper | from frappe.utils.redis_wrapper import RedisWrapper | ||||
db: Union[MariaDBDatabase, PostgresDatabase] | |||||
qb: Union[MariaDB, Postgres] | |||||
db: MariaDBDatabase | PostgresDatabase | |||||
qb: MariaDB | Postgres | |||||
# end: static analysis hack | # end: static analysis hack | ||||
@@ -308,10 +308,10 @@ def get_site_config(sites_path=None, site_path=None): | |||||
try: | try: | ||||
config.update(get_file_json(site_config)) | config.update(get_file_json(site_config)) | ||||
except Exception as error: | except Exception as error: | ||||
click.secho("{0}/site_config.json is invalid".format(local.site), fg="red") | |||||
click.secho(f"{local.site}/site_config.json is invalid", fg="red") | |||||
print(error) | print(error) | ||||
elif local.site and not local.flags.new_site: | elif local.site and not local.flags.new_site: | ||||
raise IncorrectSitePath("{0} does not exist".format(local.site)) | |||||
raise IncorrectSitePath(f"{local.site} does not exist") | |||||
return _dict(config) | return _dict(config) | ||||
@@ -993,7 +993,7 @@ def get_precision(doctype, fieldname, currency=None, doc=None): | |||||
return get_field_precision(get_meta(doctype).get_field(fieldname), doc, currency) | return get_field_precision(get_meta(doctype).get_field(fieldname), doc, currency) | ||||
def generate_hash(txt: Optional[str] = None, length: Optional[int] = None) -> str: | |||||
def generate_hash(txt: str | None = None, length: int | None = None) -> str: | |||||
"""Generates random hash for given text + current timestamp + random string.""" | """Generates random hash for given text + current timestamp + random string.""" | ||||
import hashlib | import hashlib | ||||
import time | import time | ||||
@@ -1399,7 +1399,7 @@ def get_doc_hooks(): | |||||
@request_cache | @request_cache | ||||
def _load_app_hooks(app_name: Optional[str] = None): | |||||
def _load_app_hooks(app_name: str | None = None): | |||||
hooks = {} | hooks = {} | ||||
apps = [app_name] if app_name else get_installed_apps(sort=True) | apps = [app_name] if app_name else get_installed_apps(sort=True) | ||||
@@ -1422,7 +1422,7 @@ def _load_app_hooks(app_name: Optional[str] = None): | |||||
def get_hooks( | def get_hooks( | ||||
hook: str = None, default: Optional[Any] = "_KEEP_DEFAULT_LIST", app_name: str = None | |||||
hook: str = None, default: Any | None = "_KEEP_DEFAULT_LIST", app_name: str = None | |||||
) -> _dict: | ) -> _dict: | ||||
"""Get hooks via `app/hooks.py` | """Get hooks via `app/hooks.py` | ||||
@@ -1505,7 +1505,7 @@ def get_file_items(path, raise_not_found=False, ignore_empty_lines=True): | |||||
def get_file_json(path): | def get_file_json(path): | ||||
"""Read a file and return parsed JSON object.""" | """Read a file and return parsed JSON object.""" | ||||
with open(path, "r") as f: | |||||
with open(path) as f: | |||||
return json.load(f) | return json.load(f) | ||||
@@ -1515,10 +1515,10 @@ def read_file(path, raise_not_found=False): | |||||
path = path.encode("utf-8") | path = path.encode("utf-8") | ||||
if os.path.exists(path): | if os.path.exists(path): | ||||
with open(path, "r") as f: | |||||
with open(path) as f: | |||||
return as_unicode(f.read()) | return as_unicode(f.read()) | ||||
elif raise_not_found: | elif raise_not_found: | ||||
raise IOError("{} Not Found".format(path)) | |||||
raise OSError(f"{path} Not Found") | |||||
else: | else: | ||||
return None | return None | ||||
@@ -1548,7 +1548,7 @@ def call(fn, *args, **kwargs): | |||||
return fn(*args, **newargs) | return fn(*args, **newargs) | ||||
def get_newargs(fn: Callable, kwargs: Dict[str, Any]) -> Dict[str, Any]: | |||||
def get_newargs(fn: Callable, kwargs: dict[str, Any]) -> dict[str, Any]: | |||||
"""Remove any kwargs that are not supported by the function. | """Remove any kwargs that are not supported by the function. | ||||
Example: | Example: | ||||
@@ -1785,8 +1785,8 @@ def redirect_to_message(title, html, http_status_code=None, context=None, indica | |||||
if indicator_color: | if indicator_color: | ||||
message["context"].update({"indicator_color": indicator_color}) | message["context"].update({"indicator_color": indicator_color}) | ||||
cache().set_value("message_id:{0}".format(message_id), message, expires_in_sec=60) | |||||
location = "/message?id={0}".format(message_id) | |||||
cache().set_value(f"message_id:{message_id}", message, expires_in_sec=60) | |||||
location = f"/message?id={message_id}" | |||||
if not getattr(local, "is_ajax", False): | if not getattr(local, "is_ajax", False): | ||||
local.response["type"] = "redirect" | local.response["type"] = "redirect" | ||||
@@ -1872,7 +1872,7 @@ def get_value(*args, **kwargs): | |||||
return db.get_value(*args, **kwargs) | return db.get_value(*args, **kwargs) | ||||
def as_json(obj: Union[Dict, List], indent=1, separators=None) -> str: | |||||
def as_json(obj: dict | list, indent=1, separators=None) -> str: | |||||
from frappe.utils.response import json_handler | from frappe.utils.response import json_handler | ||||
if separators is None: | if separators is None: | ||||
@@ -1903,7 +1903,7 @@ def get_test_records(doctype): | |||||
get_module_path(get_doctype_module(doctype)), "doctype", scrub(doctype), "test_records.json" | get_module_path(get_doctype_module(doctype)), "doctype", scrub(doctype), "test_records.json" | ||||
) | ) | ||||
if os.path.exists(path): | if os.path.exists(path): | ||||
with open(path, "r") as f: | |||||
with open(path) as f: | |||||
return json.loads(f.read()) | return json.loads(f.read()) | ||||
else: | else: | ||||
return [] | return [] | ||||
@@ -2183,7 +2183,7 @@ def get_desk_link(doctype, name): | |||||
def bold(text): | def bold(text): | ||||
return "<strong>{0}</strong>".format(text) | |||||
return f"<strong>{text}</strong>" | |||||
def safe_eval(code, eval_globals=None, eval_locals=None): | def safe_eval(code, eval_globals=None, eval_locals=None): | ||||
@@ -2211,10 +2211,10 @@ def safe_eval(code, eval_globals=None, eval_locals=None): | |||||
for attribute in UNSAFE_ATTRIBUTES: | for attribute in UNSAFE_ATTRIBUTES: | ||||
if attribute in code: | if attribute in code: | ||||
throw('Illegal rule {0}. Cannot use "{1}"'.format(bold(code), attribute)) | |||||
throw(f'Illegal rule {bold(code)}. Cannot use "{attribute}"') | |||||
if "__" in code: | if "__" in code: | ||||
throw('Illegal rule {0}. Cannot use "__"'.format(bold(code))) | |||||
throw(f'Illegal rule {bold(code)}. Cannot use "__"') | |||||
if not eval_globals: | if not eval_globals: | ||||
eval_globals = {} | eval_globals = {} | ||||
@@ -167,7 +167,7 @@ def validate_auth(): | |||||
""" | """ | ||||
Authenticate and sets user for the request. | Authenticate and sets user for the request. | ||||
""" | """ | ||||
authorization_header = frappe.get_request_header("Authorization", str()).split(" ") | |||||
authorization_header = frappe.get_request_header("Authorization", "").split(" ") | |||||
if len(authorization_header) == 2: | if len(authorization_header) == 2: | ||||
validate_oauth(authorization_header) | validate_oauth(authorization_header) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -34,7 +33,7 @@ SAFE_HTTP_METHODS = ("GET", "HEAD", "OPTIONS") | |||||
UNSAFE_HTTP_METHODS = ("POST", "PUT", "DELETE", "PATCH") | UNSAFE_HTTP_METHODS = ("POST", "PUT", "DELETE", "PATCH") | ||||
class RequestContext(object): | |||||
class RequestContext: | |||||
def __init__(self, environ): | def __init__(self, environ): | ||||
self.request = Request(environ) | self.request = Request(environ) | ||||
@@ -331,12 +330,10 @@ def serve( | |||||
if not os.environ.get("NO_STATICS"): | if not os.environ.get("NO_STATICS"): | ||||
application = SharedDataMiddleware( | application = SharedDataMiddleware( | ||||
application, {str("/assets"): str(os.path.join(sites_path, "assets"))} | |||||
application, {"/assets": str(os.path.join(sites_path, "assets"))} | |||||
) | ) | ||||
application = StaticDataMiddleware( | |||||
application, {str("/files"): str(os.path.abspath(sites_path))} | |||||
) | |||||
application = StaticDataMiddleware(application, {"/files": str(os.path.abspath(sites_path))}) | |||||
application.debug = True | application.debug = True | ||||
application.config = {"SERVER_NAME": "localhost:8000"} | application.config = {"SERVER_NAME": "localhost:8000"} | ||||
@@ -471,7 +471,7 @@ def get_login_attempt_tracker(user_name: str, raise_locked_exception: bool = Tru | |||||
return tracker | return tracker | ||||
class LoginAttemptTracker(object): | |||||
class LoginAttemptTracker: | |||||
"""Track login attemts of a user. | """Track login attemts of a user. | ||||
Lock the account for s number of seconds if there have been n consecutive unsuccessful attempts to log in. | Lock the account for s number of seconds if there have been n consecutive unsuccessful attempts to log in. | ||||
@@ -1,7 +1,7 @@ | |||||
# Copyright (c) 2022, Frappe Technologies and contributors | # Copyright (c) 2022, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
from typing import Dict, Iterable, List | |||||
from collections.abc import Iterable | |||||
import frappe | import frappe | ||||
from frappe import _ | from frappe import _ | ||||
@@ -157,7 +157,7 @@ class AssignmentRule(Document): | |||||
return assignment_days and today not in assignment_days | return assignment_days and today not in assignment_days | ||||
def get_assignments(doc) -> List[Dict]: | |||||
def get_assignments(doc) -> list[dict]: | |||||
return frappe.get_all( | return frappe.get_all( | ||||
"ToDo", | "ToDo", | ||||
fields=["name", "assignment_rule"], | fields=["name", "assignment_rule"], | ||||
@@ -228,7 +228,7 @@ def apply(doc=None, method=None, doctype=None, name=None): | |||||
) | ) | ||||
# multiple auto assigns | # multiple auto assigns | ||||
assignment_rule_docs: List[AssignmentRule] = [ | |||||
assignment_rule_docs: list[AssignmentRule] = [ | |||||
frappe.get_cached_doc("Assignment Rule", d.get("name")) for d in assignment_rules | frappe.get_cached_doc("Assignment Rule", d.get("name")) for d in assignment_rules | ||||
] | ] | ||||
@@ -356,11 +356,11 @@ def update_due_date(doc, state=None): | |||||
todo_doc.save(ignore_permissions=True) | todo_doc.save(ignore_permissions=True) | ||||
def get_assignment_rules() -> List[str]: | |||||
def get_assignment_rules() -> list[str]: | |||||
return frappe.get_all("Assignment Rule", filters={"disabled": 0}, pluck="document_type") | return frappe.get_all("Assignment Rule", filters={"disabled": 0}, pluck="document_type") | ||||
def get_repeated(values: Iterable) -> List: | |||||
def get_repeated(values: Iterable) -> list: | |||||
unique = set() | unique = set() | ||||
repeated = set() | repeated = set() | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors | # Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2018, Frappe Technologies and Contributors | # Copyright (c) 2018, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -200,7 +199,7 @@ class TestAutoRepeat(unittest.TestCase): | |||||
# next_schedule_date is set as on or after current date | # next_schedule_date is set as on or after current date | ||||
# it should not be a previous month's date | # it should not be a previous month's date | ||||
self.assertTrue((doc.next_schedule_date >= current_date)) | |||||
self.assertTrue(doc.next_schedule_date >= current_date) | |||||
todo = frappe.get_doc( | todo = frappe.get_doc( | ||||
dict( | dict( | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
# import frappe | # import frappe | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -200,7 +200,7 @@ def symlink(target, link_name, overwrite=False): | |||||
try: | try: | ||||
# Pre-empt os.replace on a directory with a nicer message | # Pre-empt os.replace on a directory with a nicer message | ||||
if os.path.isdir(link_name): | if os.path.isdir(link_name): | ||||
raise IsADirectoryError("Cannot symlink over existing directory: '{}'".format(link_name)) | |||||
raise IsADirectoryError(f"Cannot symlink over existing directory: '{link_name}'") | |||||
try: | try: | ||||
os.replace(temp_link_name, link_name) | os.replace(temp_link_name, link_name) | ||||
except AttributeError: | except AttributeError: | ||||
@@ -239,10 +239,10 @@ def bundle( | |||||
make_asset_dirs(hard_link=hard_link) | make_asset_dirs(hard_link=hard_link) | ||||
mode = "production" if mode == "production" else "build" | mode = "production" if mode == "production" else "build" | ||||
command = "yarn run {mode}".format(mode=mode) | |||||
command = f"yarn run {mode}" | |||||
if apps: | if apps: | ||||
command += " --apps {apps}".format(apps=apps) | |||||
command += f" --apps {apps}" | |||||
if skip_frappe: | if skip_frappe: | ||||
command += " --skip_frappe" | command += " --skip_frappe" | ||||
@@ -263,7 +263,7 @@ def watch(apps=None): | |||||
command = "yarn run watch" | command = "yarn run watch" | ||||
if apps: | if apps: | ||||
command += " --apps {apps}".format(apps=apps) | |||||
command += f" --apps {apps}" | |||||
live_reload = frappe.utils.cint(os.environ.get("LIVE_RELOAD", frappe.conf.live_reload)) | live_reload = frappe.utils.cint(os.environ.get("LIVE_RELOAD", frappe.conf.live_reload)) | ||||
@@ -349,13 +349,13 @@ def get_js(items): | |||||
frappe.throw(_("Invalid file path: {0}").format("/".join(src))) | frappe.throw(_("Invalid file path: {0}").format("/".join(src))) | ||||
contentpath = os.path.join(frappe.local.sites_path, *src) | contentpath = os.path.join(frappe.local.sites_path, *src) | ||||
with open(contentpath, "r") as srcfile: | |||||
with open(contentpath) as srcfile: | |||||
code = frappe.utils.cstr(srcfile.read()) | code = frappe.utils.cstr(srcfile.read()) | ||||
if frappe.local.lang != "en": | if frappe.local.lang != "en": | ||||
messages = frappe.get_lang_dict("jsfile", contentpath) | messages = frappe.get_lang_dict("jsfile", contentpath) | ||||
messages = json.dumps(messages) | messages = json.dumps(messages) | ||||
code += "\n\n$.extend(frappe._messages, {})".format(messages) | |||||
code += f"\n\n$.extend(frappe._messages, {messages})" | |||||
out.append(code) | out.append(code) | ||||
@@ -114,7 +114,7 @@ def scheduler(context, state, site=None): | |||||
frappe.utils.scheduler.enable_scheduler() | frappe.utils.scheduler.enable_scheduler() | ||||
frappe.db.commit() | frappe.db.commit() | ||||
print("Scheduler {0}d for site {1}".format(state, site)) | |||||
print(f"Scheduler {state}d for site {site}") | |||||
finally: | finally: | ||||
frappe.destroy() | frappe.destroy() | ||||
@@ -182,7 +182,7 @@ def purge_jobs(site=None, queue=None, event=None): | |||||
frappe.init(site or "") | frappe.init(site or "") | ||||
count = purge_pending_jobs(event=event, site=site, queue=queue) | count = purge_pending_jobs(event=event, site=site, queue=queue) | ||||
print("Purged {} jobs".format(count)) | |||||
print(f"Purged {count} jobs") | |||||
@click.command("schedule") | @click.command("schedule") | ||||
@@ -218,11 +218,11 @@ def ready_for_migration(context, site=None): | |||||
pending_jobs = get_pending_jobs(site=site) | pending_jobs = get_pending_jobs(site=site) | ||||
if pending_jobs: | if pending_jobs: | ||||
print("NOT READY for migration: site {0} has pending background jobs".format(site)) | |||||
print(f"NOT READY for migration: site {site} has pending background jobs") | |||||
sys.exit(1) | sys.exit(1) | ||||
else: | else: | ||||
print("READY for migration: site {0} does not have any background jobs".format(site)) | |||||
print(f"READY for migration: site {site} does not have any background jobs") | |||||
return 0 | return 0 | ||||
finally: | finally: | ||||
@@ -256,7 +256,7 @@ def restore( | |||||
os.remove(private) | os.remove(private) | ||||
_backup.decryption_rollback() | _backup.decryption_rollback() | ||||
success_message = "Site {0} has been restored{1}".format( | |||||
success_message = "Site {} has been restored{}".format( | |||||
site, " with files" if (with_public_files or with_private_files) else "" | site, " with files" if (with_public_files or with_private_files) else "" | ||||
) | ) | ||||
click.secho(success_message, fg="green") | click.secho(success_message, fg="green") | ||||
@@ -413,12 +413,12 @@ def install_app(context, apps, force=False): | |||||
try: | try: | ||||
_install_app(app, verbose=context.verbose, force=force) | _install_app(app, verbose=context.verbose, force=force) | ||||
except frappe.IncompatibleApp as err: | except frappe.IncompatibleApp as err: | ||||
err_msg = ":\n{}".format(err) if str(err) else "" | |||||
print("App {} is Incompatible with Site {}{}".format(app, site, err_msg)) | |||||
err_msg = f":\n{err}" if str(err) else "" | |||||
print(f"App {app} is Incompatible with Site {site}{err_msg}") | |||||
exit_code = 1 | exit_code = 1 | ||||
except Exception as err: | except Exception as err: | ||||
err_msg = ": {}\n{}".format(str(err), frappe.get_traceback()) | |||||
print("An error occurred while installing {}{}".format(app, err_msg)) | |||||
err_msg = f": {str(err)}\n{frappe.get_traceback()}" | |||||
print(f"An error occurred while installing {app}{err_msg}") | |||||
exit_code = 1 | exit_code = 1 | ||||
frappe.destroy() | frappe.destroy() | ||||
@@ -448,8 +448,8 @@ def list_apps(context, format): | |||||
apps = frappe.get_single("Installed Applications").installed_applications | apps = frappe.get_single("Installed Applications").installed_applications | ||||
if apps: | if apps: | ||||
name_len, ver_len = [max([len(x.get(y)) for x in apps]) for y in ["app_name", "app_version"]] | |||||
template = "{{0:{0}}} {{1:{1}}} {{2}}".format(name_len, ver_len) | |||||
name_len, ver_len = (max(len(x.get(y)) for x in apps) for y in ["app_name", "app_version"]) | |||||
template = f"{{0:{name_len}}} {{1:{ver_len}}} {{2}}" | |||||
installed_applications = [ | installed_applications = [ | ||||
template.format(app.app_name, app.app_version, app.git_branch) for app in apps | template.format(app.app_name, app.app_version, app.git_branch) for app in apps | ||||
@@ -607,7 +607,7 @@ def reload_doctype(context, doctype): | |||||
def add_to_hosts(context): | def add_to_hosts(context): | ||||
"Add site to hosts" | "Add site to hosts" | ||||
for site in context.sites: | for site in context.sites: | ||||
frappe.commands.popen("echo 127.0.0.1\t{0} | sudo tee -a /etc/hosts".format(site)) | |||||
frappe.commands.popen(f"echo 127.0.0.1\t{site} | sudo tee -a /etc/hosts") | |||||
if not context.sites: | if not context.sites: | ||||
raise SiteNotSpecifiedError | raise SiteNotSpecifiedError | ||||
@@ -623,9 +623,9 @@ def use(site, sites_path="."): | |||||
if os.path.exists(os.path.join(sites_path, site)): | if os.path.exists(os.path.join(sites_path, site)): | ||||
with open(os.path.join(sites_path, "currentsite.txt"), "w") as sitefile: | with open(os.path.join(sites_path, "currentsite.txt"), "w") as sitefile: | ||||
sitefile.write(site) | sitefile.write(site) | ||||
print("Current Site set to {}".format(site)) | |||||
print(f"Current Site set to {site}") | |||||
else: | else: | ||||
print("Site {} does not exist".format(site)) | |||||
print(f"Site {site} does not exist") | |||||
@click.command("backup") | @click.command("backup") | ||||
@@ -699,7 +699,7 @@ def backup( | |||||
) | ) | ||||
except Exception: | except Exception: | ||||
click.secho( | click.secho( | ||||
"Backup failed for Site {0}. Database or site_config.json may be corrupted".format(site), | |||||
f"Backup failed for Site {site}. Database or site_config.json may be corrupted", | |||||
fg="red", | fg="red", | ||||
) | ) | ||||
if verbose: | if verbose: | ||||
@@ -713,7 +713,7 @@ def backup( | |||||
odb.print_summary() | odb.print_summary() | ||||
click.secho( | click.secho( | ||||
"Backup for Site {0} has been successfully completed{1}".format( | |||||
"Backup for Site {} has been successfully completed{}".format( | |||||
site, " with files" if with_files else "" | site, " with files" if with_files else "" | ||||
), | ), | ||||
fg="green", | fg="green", | ||||
@@ -830,8 +830,8 @@ def _drop_site( | |||||
else: | else: | ||||
messages = [ | messages = [ | ||||
"=" * 80, | "=" * 80, | ||||
"Error: The operation has stopped because backup of {0}'s database failed.".format(site), | |||||
"Reason: {0}\n".format(str(err)), | |||||
f"Error: The operation has stopped because backup of {site}'s database failed.", | |||||
f"Reason: {str(err)}\n", | |||||
"Fix the issue and try again.", | "Fix the issue and try again.", | ||||
"Hint: Use 'bench drop-site {0} --force' to force the removal of {0}".format(site), | "Hint: Use 'bench drop-site {0} --force' to force the removal of {0}".format(site), | ||||
] | ] | ||||
@@ -1080,7 +1080,7 @@ def build_search_index(context): | |||||
if not site: | if not site: | ||||
raise SiteNotSpecifiedError | raise SiteNotSpecifiedError | ||||
print("Building search index for {}".format(site)) | |||||
print(f"Building search index for {site}") | |||||
frappe.init(site=site) | frappe.init(site=site) | ||||
frappe.connect() | frappe.connect() | ||||
try: | try: | ||||
@@ -387,7 +387,7 @@ def import_doc(context, path, force=False): | |||||
if not os.path.exists(path): | if not os.path.exists(path): | ||||
path = os.path.join("..", path) | path = os.path.join("..", path) | ||||
if not os.path.exists(path): | if not os.path.exists(path): | ||||
print("Invalid path {0}".format(path)) | |||||
print(f"Invalid path {path}") | |||||
sys.exit(1) | sys.exit(1) | ||||
for site in context.sites: | for site in context.sites: | ||||
@@ -471,7 +471,7 @@ def bulk_rename(context, doctype, path): | |||||
site = get_site(context) | site = get_site(context) | ||||
with open(path, "r") as csvfile: | |||||
with open(path) as csvfile: | |||||
rows = read_csv_content(csvfile.read()) | rows = read_csv_content(csvfile.read()) | ||||
frappe.init(site=site) | frappe.init(site=site) | ||||
@@ -566,7 +566,7 @@ def jupyter(context): | |||||
try: | try: | ||||
os.stat(jupyter_notebooks_path) | os.stat(jupyter_notebooks_path) | ||||
except OSError: | except OSError: | ||||
print("Creating folder to keep jupyter notebooks at {}".format(jupyter_notebooks_path)) | |||||
print(f"Creating folder to keep jupyter notebooks at {jupyter_notebooks_path}") | |||||
os.mkdir(jupyter_notebooks_path) | os.mkdir(jupyter_notebooks_path) | ||||
bin_path = os.path.abspath("../env/bin") | bin_path = os.path.abspath("../env/bin") | ||||
print( | print( | ||||
@@ -585,9 +585,9 @@ frappe.db.connect() | |||||
) | ) | ||||
) | ) | ||||
os.execv( | os.execv( | ||||
"{0}/jupyter".format(bin_path), | |||||
f"{bin_path}/jupyter", | |||||
[ | [ | ||||
"{0}/jupyter".format(bin_path), | |||||
f"{bin_path}/jupyter", | |||||
"notebook", | "notebook", | ||||
jupyter_notebooks_path, | jupyter_notebooks_path, | ||||
], | ], | ||||
@@ -780,7 +780,7 @@ def run_tests( | |||||
if not (allow_tests or os.environ.get("CI")): | if not (allow_tests or os.environ.get("CI")): | ||||
click.secho("Testing is disabled for the site!", bold=True) | click.secho("Testing is disabled for the site!", bold=True) | ||||
click.secho("You can enable tests by entering following command:") | click.secho("You can enable tests by entering following command:") | ||||
click.secho("bench --site {0} set-config allow_tests true".format(site), fg="green") | |||||
click.secho(f"bench --site {site} set-config allow_tests true", fg="green") | |||||
return | return | ||||
frappe.init(site=site) | frappe.init(site=site) | ||||
@@ -955,7 +955,7 @@ def request(context, args=None, path=None): | |||||
if args.startswith("/api/method"): | if args.startswith("/api/method"): | ||||
frappe.local.form_dict.cmd = args.split("?")[0].split("/")[-1] | frappe.local.form_dict.cmd = args.split("?")[0].split("/")[-1] | ||||
elif path: | elif path: | ||||
with open(os.path.join("..", path), "r") as f: | |||||
with open(os.path.join("..", path)) as f: | |||||
args = json.loads(f.read()) | args = json.loads(f.read()) | ||||
frappe.local.form_dict = frappe._dict(args) | frappe.local.form_dict = frappe._dict(args) | ||||
@@ -3,7 +3,6 @@ | |||||
import functools | import functools | ||||
import re | import re | ||||
from typing import Dict, List | |||||
import frappe | import frappe | ||||
from frappe import _ | from frappe import _ | ||||
@@ -117,9 +116,7 @@ def get_permission_query_conditions(doctype): | |||||
# when everything is not permitted | # when everything is not permitted | ||||
for df in links.get("not_permitted_links"): | for df in links.get("not_permitted_links"): | ||||
# like ifnull(customer, '')='' and ifnull(supplier, '')='' | # like ifnull(customer, '')='' and ifnull(supplier, '')='' | ||||
conditions.append( | |||||
"ifnull(`tab{doctype}`.`{fieldname}`, '')=''".format(doctype=doctype, fieldname=df.fieldname) | |||||
) | |||||
conditions.append(f"ifnull(`tab{doctype}`.`{df.fieldname}`, '')=''") | |||||
return "( " + " and ".join(conditions) + " )" | return "( " + " and ".join(conditions) + " )" | ||||
@@ -128,9 +125,7 @@ def get_permission_query_conditions(doctype): | |||||
for df in links.get("permitted_links"): | for df in links.get("permitted_links"): | ||||
# like ifnull(customer, '')!='' or ifnull(supplier, '')!='' | # like ifnull(customer, '')!='' or ifnull(supplier, '')!='' | ||||
conditions.append( | |||||
"ifnull(`tab{doctype}`.`{fieldname}`, '')!=''".format(doctype=doctype, fieldname=df.fieldname) | |||||
) | |||||
conditions.append(f"ifnull(`tab{doctype}`.`{df.fieldname}`, '')!=''") | |||||
return "( " + " or ".join(conditions) + " )" | return "( " + " or ".join(conditions) + " )" | ||||
@@ -171,8 +166,8 @@ def delete_contact_and_address(doctype, docname): | |||||
@frappe.whitelist() | @frappe.whitelist() | ||||
@frappe.validate_and_sanitize_search_inputs | @frappe.validate_and_sanitize_search_inputs | ||||
def filter_dynamic_link_doctypes( | def filter_dynamic_link_doctypes( | ||||
doctype, txt: str, searchfield, start, page_len, filters: Dict | |||||
) -> List[List[str]]: | |||||
doctype, txt: str, searchfield, start, page_len, filters: dict | |||||
) -> list[list[str]]: | |||||
from frappe.permissions import get_doctypes_with_read | from frappe.permissions import get_doctypes_with_read | ||||
txt = txt or "" | txt = txt or "" | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -236,7 +235,7 @@ def address_query(doctype, txt, searchfield, start, page_len, filters): | |||||
meta = frappe.get_meta("Address") | meta = frappe.get_meta("Address") | ||||
for fieldname, value in filters.items(): | for fieldname, value in filters.items(): | ||||
if meta.get_field(fieldname) or fieldname in frappe.db.DEFAULT_COLUMNS: | if meta.get_field(fieldname) or fieldname in frappe.db.DEFAULT_COLUMNS: | ||||
condition += " and {field}={value}".format(field=fieldname, value=frappe.db.escape(value)) | |||||
condition += f" and {fieldname}={frappe.db.escape(value)}" | |||||
searchfields = meta.get_search_fields() | searchfields = meta.get_search_fields() | ||||
@@ -246,9 +245,9 @@ def address_query(doctype, txt, searchfield, start, page_len, filters): | |||||
search_condition = "" | search_condition = "" | ||||
for field in searchfields: | for field in searchfields: | ||||
if search_condition == "": | if search_condition == "": | ||||
search_condition += "`tabAddress`.`{field}` like %(txt)s".format(field=field) | |||||
search_condition += f"`tabAddress`.`{field}` like %(txt)s" | |||||
else: | else: | ||||
search_condition += " or `tabAddress`.`{field}` like %(txt)s".format(field=field) | |||||
search_condition += f" or `tabAddress`.`{field}` like %(txt)s" | |||||
return frappe.db.sql( | return frappe.db.sql( | ||||
"""select | """select | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -290,7 +290,7 @@ def get_contact_with_phone_number(number): | |||||
return | return | ||||
contacts = frappe.get_all( | contacts = frappe.get_all( | ||||
"Contact Phone", filters=[["phone", "like", "%{0}".format(number)]], fields=["parent"], limit=1 | |||||
"Contact Phone", filters=[["phone", "like", f"%{number}"]], fields=["parent"], limit=1 | |||||
) | ) | ||||
return contacts[0].parent if contacts else None | return contacts[0].parent if contacts else None | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and Contributors | # Copyright (c) 2017, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and contributors | # Copyright (c) 2017, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and Contributors | # Copyright (c) 2017, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and contributors | # Copyright (c) 2017, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and Contributors | # Copyright (c) 2017, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,5 +1,4 @@ | |||||
import json | import json | ||||
from typing import Dict, List | |||||
import frappe | import frappe | ||||
from frappe.core.doctype.file.file import File, setup_folder_path | from frappe.core.doctype.file.file import File, setup_folder_path | ||||
@@ -14,7 +13,7 @@ def unzip_file(name: str): | |||||
@frappe.whitelist() | @frappe.whitelist() | ||||
def get_attached_images(doctype: str, names: List[str]) -> frappe._dict: | |||||
def get_attached_images(doctype: str, names: list[str]) -> frappe._dict: | |||||
"""get list of image urls attached in form | """get list of image urls attached in form | ||||
returns {name: ['image.jpg', 'image.png']}""" | returns {name: ['image.jpg', 'image.png']}""" | ||||
@@ -40,7 +39,7 @@ def get_attached_images(doctype: str, names: List[str]) -> frappe._dict: | |||||
@frappe.whitelist() | @frappe.whitelist() | ||||
def get_files_in_folder(folder: str, start: int = 0, page_length: int = 20) -> Dict: | |||||
def get_files_in_folder(folder: str, start: int = 0, page_length: int = 20) -> dict: | |||||
start = cint(start) | start = cint(start) | ||||
page_length = cint(page_length) | page_length = cint(page_length) | ||||
@@ -66,7 +65,7 @@ def get_files_in_folder(folder: str, start: int = 0, page_length: int = 20) -> D | |||||
@frappe.whitelist() | @frappe.whitelist() | ||||
def get_files_by_search_text(text: str) -> List[Dict]: | |||||
def get_files_by_search_text(text: str) -> list[dict]: | |||||
if not text: | if not text: | ||||
return [] | return [] | ||||
@@ -102,7 +101,7 @@ def create_new_folder(file_name: str, folder: str) -> File: | |||||
@frappe.whitelist() | @frappe.whitelist() | ||||
def move_file(file_list: List[File], new_parent: str, old_parent: str) -> None: | |||||
def move_file(file_list: list[File], new_parent: str, old_parent: str) -> None: | |||||
if isinstance(file_list, str): | if isinstance(file_list, str): | ||||
file_list = json.loads(file_list) | file_list = json.loads(file_list) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -28,7 +27,7 @@ class TestAccessLog(unittest.TestCase): | |||||
"User", frappe.session.user, fieldname="api_secret" | "User", frappe.session.user, fieldname="api_secret" | ||||
) | ) | ||||
api_key = frappe.db.get_value("User", "Administrator", "api_key") | api_key = frappe.db.get_value("User", "Administrator", "api_key") | ||||
self.header = {"Authorization": "token {}:{}".format(api_key, generated_secret)} | |||||
self.header = {"Authorization": f"token {api_key}:{generated_secret}"} | |||||
self.test_html_template = """ | self.test_html_template = """ | ||||
<!DOCTYPE html> | <!DOCTYPE html> | ||||
@@ -74,9 +74,7 @@ def get_feed_match_conditions(user=None, doctype="Comment"): | |||||
user_permissions = frappe.permissions.get_user_permissions(user) | user_permissions = frappe.permissions.get_user_permissions(user) | ||||
can_read = frappe.get_user().get_can_read() | can_read = frappe.get_user().get_can_read() | ||||
can_read_doctypes = [ | |||||
"'{}'".format(dt) for dt in list(set(can_read) - set(list(user_permissions))) | |||||
] | |||||
can_read_doctypes = [f"'{dt}'" for dt in list(set(can_read) - set(list(user_permissions)))] | |||||
if can_read_doctypes: | if can_read_doctypes: | ||||
conditions += [ | conditions += [ | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import time | import time | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import json | import json | ||||
@@ -51,7 +50,7 @@ class Comment(Document): | |||||
return | return | ||||
frappe.publish_realtime( | frappe.publish_realtime( | ||||
"update_docinfo_for_{}_{}".format(self.reference_doctype, self.reference_name), | |||||
f"update_docinfo_for_{self.reference_doctype}_{self.reference_name}", | |||||
{"doc": self.as_dict(), "key": key, "action": action}, | {"doc": self.as_dict(), "key": key, "action": action}, | ||||
after_commit=True, | after_commit=True, | ||||
) | ) | ||||
@@ -183,7 +182,7 @@ def update_comments_in_parent(reference_doctype, reference_name, _comments): | |||||
try: | try: | ||||
# use sql, so that we do not mess with the timestamp | # use sql, so that we do not mess with the timestamp | ||||
frappe.db.sql( | frappe.db.sql( | ||||
"""update `tab{0}` set `_comments`=%s where name=%s""".format(reference_doctype), # nosec | |||||
f"""update `tab{reference_doctype}` set `_comments`=%s where name=%s""", # nosec | |||||
(json.dumps(_comments[-100:]), reference_name), | (json.dumps(_comments[-100:]), reference_name), | ||||
) | ) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import json | import json | ||||
@@ -3,7 +3,6 @@ | |||||
from collections import Counter | from collections import Counter | ||||
from email.utils import getaddresses | from email.utils import getaddresses | ||||
from typing import List | |||||
from urllib.parse import unquote | from urllib.parse import unquote | ||||
from parse import compile | from parse import compile | ||||
@@ -204,7 +203,7 @@ class Communication(Document, CommunicationEmailMixin): | |||||
""" | """ | ||||
emails = split_emails(emails) if isinstance(emails, str) else (emails or []) | emails = split_emails(emails) if isinstance(emails, str) else (emails or []) | ||||
if exclude_displayname: | if exclude_displayname: | ||||
return [email.lower() for email in set([parse_addr(email)[1] for email in emails]) if email] | |||||
return [email.lower() for email in {parse_addr(email)[1] for email in emails} if email] | |||||
return [email.lower() for email in set(emails) if email] | return [email.lower() for email in set(emails) if email] | ||||
def to_list(self, exclude_displayname=True): | def to_list(self, exclude_displayname=True): | ||||
@@ -229,7 +228,7 @@ class Communication(Document, CommunicationEmailMixin): | |||||
def notify_change(self, action): | def notify_change(self, action): | ||||
frappe.publish_realtime( | frappe.publish_realtime( | ||||
"update_docinfo_for_{}_{}".format(self.reference_doctype, self.reference_name), | |||||
f"update_docinfo_for_{self.reference_doctype}_{self.reference_name}", | |||||
{"doc": self.as_dict(), "key": "communications", "action": action}, | {"doc": self.as_dict(), "key": "communications", "action": action}, | ||||
after_commit=True, | after_commit=True, | ||||
) | ) | ||||
@@ -425,7 +424,7 @@ def get_permission_query_conditions_for_communication(user): | |||||
) | ) | ||||
def get_contacts(email_strings: List[str], auto_create_contact=False) -> List[str]: | |||||
def get_contacts(email_strings: list[str], auto_create_contact=False) -> list[str]: | |||||
email_addrs = get_emails(email_strings) | email_addrs = get_emails(email_strings) | ||||
contacts = [] | contacts = [] | ||||
for email in email_addrs: | for email in email_addrs: | ||||
@@ -437,9 +436,7 @@ def get_contacts(email_strings: List[str], auto_create_contact=False) -> List[st | |||||
first_name = frappe.unscrub(email_parts[0]) | first_name = frappe.unscrub(email_parts[0]) | ||||
try: | try: | ||||
contact_name = ( | |||||
"{0}-{1}".format(first_name, email_parts[1]) if first_name == "Contact" else first_name | |||||
) | |||||
contact_name = f"{first_name}-{email_parts[1]}" if first_name == "Contact" else first_name | |||||
contact = frappe.get_doc( | contact = frappe.get_doc( | ||||
{"doctype": "Contact", "first_name": contact_name, "name": contact_name} | {"doctype": "Contact", "first_name": contact_name, "name": contact_name} | ||||
) | ) | ||||
@@ -455,7 +452,7 @@ def get_contacts(email_strings: List[str], auto_create_contact=False) -> List[st | |||||
return contacts | return contacts | ||||
def get_emails(email_strings: List[str]) -> List[str]: | |||||
def get_emails(email_strings: list[str]) -> list[str]: | |||||
email_addrs = [] | email_addrs = [] | ||||
for email_string in email_strings: | for email_string in email_strings: | ||||
@@ -522,7 +519,7 @@ def get_email_without_link(email): | |||||
except IndexError: | except IndexError: | ||||
return email | return email | ||||
return "{0}@{1}".format(email_id, email_host) | |||||
return f"{email_id}@{email_host}" | |||||
def update_parent_document_on_communication(doc): | def update_parent_document_on_communication(doc): | ||||
@@ -2,7 +2,7 @@ | |||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import json | import json | ||||
from typing import TYPE_CHECKING, Dict | |||||
from typing import TYPE_CHECKING | |||||
import frappe | import frappe | ||||
import frappe.email.smtp | import frappe.email.smtp | ||||
@@ -45,7 +45,7 @@ def make( | |||||
email_template=None, | email_template=None, | ||||
communication_type=None, | communication_type=None, | ||||
**kwargs, | **kwargs, | ||||
) -> Dict[str, str]: | |||||
) -> dict[str, str]: | |||||
"""Make a new communication. Checks for email permissions for specified Document. | """Make a new communication. Checks for email permissions for specified Document. | ||||
:param doctype: Reference DocType. | :param doctype: Reference DocType. | ||||
@@ -122,7 +122,7 @@ def _make( | |||||
email_template=None, | email_template=None, | ||||
communication_type=None, | communication_type=None, | ||||
add_signature=True, | add_signature=True, | ||||
) -> Dict[str, str]: | |||||
) -> dict[str, str]: | |||||
"""Internal method to make a new communication that ignores Permission checks.""" | """Internal method to make a new communication that ignores Permission checks.""" | ||||
sender = sender or get_formatted_email(frappe.session.user) | sender = sender or get_formatted_email(frappe.session.user) | ||||
@@ -1,5 +1,3 @@ | |||||
from typing import List | |||||
import frappe | import frappe | ||||
from frappe import _ | from frappe import _ | ||||
from frappe.core.utils import get_parent_doc | from frappe.core.utils import get_parent_doc | ||||
@@ -201,7 +199,7 @@ class CommunicationEmailMixin: | |||||
return _("Leave this conversation") | return _("Leave this conversation") | ||||
return "" | return "" | ||||
def exclude_emails_list(self, is_inbound_mail_communcation=False, include_sender=False) -> List: | |||||
def exclude_emails_list(self, is_inbound_mail_communcation=False, include_sender=False) -> list: | |||||
"""List of mail id's excluded while sending mail.""" | """List of mail id's excluded while sending mail.""" | ||||
all_ids = self.get_all_email_addresses(exclude_displayname=True) | all_ids = self.get_all_email_addresses(exclude_displayname=True) | ||||
@@ -236,7 +236,7 @@ class TestCommunication(unittest.TestCase): | |||||
"communication_medium": "Email", | "communication_medium": "Email", | ||||
"subject": "Document Link in Email", | "subject": "Document Link in Email", | ||||
"sender": "comm_sender@example.com", | "sender": "comm_sender@example.com", | ||||
"recipients": "comm_recipient+{0}+{1}@example.com".format(quote("Note"), quote(note.name)), | |||||
"recipients": "comm_recipient+{}+{}@example.com".format(quote("Note"), quote(note.name)), | |||||
} | } | ||||
).insert(ignore_permissions=True) | ).insert(ignore_permissions=True) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -331,7 +331,7 @@ class DataExporter: | |||||
order_by = None | order_by = None | ||||
table_columns = frappe.db.get_table_columns(self.parent_doctype) | table_columns = frappe.db.get_table_columns(self.parent_doctype) | ||||
if "lft" in table_columns and "rgt" in table_columns: | if "lft" in table_columns and "rgt" in table_columns: | ||||
order_by = "`tab{doctype}`.`lft` asc".format(doctype=self.parent_doctype) | |||||
order_by = f"`tab{self.parent_doctype}`.`lft` asc" | |||||
# get permitted data only | # get permitted data only | ||||
self.data = frappe.get_list( | self.data = frappe.get_list( | ||||
self.doctype, fields=["*"], filters=self.filters, limit_page_length=None, order_by=order_by | self.doctype, fields=["*"], filters=self.filters, limit_page_length=None, order_by=order_by | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors | # Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -132,8 +131,7 @@ class Exporter: | |||||
child_doctype = table_df.options | child_doctype = table_df.options | ||||
rows = self.add_data_row(child_doctype, child_row.parentfield, child_row, rows, i) | rows = self.add_data_row(child_doctype, child_row.parentfield, child_row, rows, i) | ||||
for row in rows: | |||||
yield row | |||||
yield from rows | |||||
def add_data_row(self, doctype, parentfield, doc, rows, row_idx): | def add_data_row(self, doctype, parentfield, doc, rows, row_idx): | ||||
if len(rows) < row_idx + 1: | if len(rows) < row_idx + 1: | ||||
@@ -156,14 +154,14 @@ class Exporter: | |||||
def get_data_as_docs(self): | def get_data_as_docs(self): | ||||
def format_column_name(df): | def format_column_name(df): | ||||
return "`tab{0}`.`{1}`".format(df.parent, df.fieldname) | |||||
return f"`tab{df.parent}`.`{df.fieldname}`" | |||||
filters = self.export_filters | filters = self.export_filters | ||||
if self.meta.is_nested_set(): | if self.meta.is_nested_set(): | ||||
order_by = "`tab{0}`.`lft` ASC".format(self.doctype) | |||||
order_by = f"`tab{self.doctype}`.`lft` ASC" | |||||
else: | else: | ||||
order_by = "`tab{0}`.`creation` DESC".format(self.doctype) | |||||
order_by = f"`tab{self.doctype}`.`creation` DESC" | |||||
parent_fields = [format_column_name(df) for df in self.fields if df.parent == self.doctype] | parent_fields = [format_column_name(df) for df in self.fields if df.parent == self.doctype] | ||||
parent_data = frappe.db.get_list( | parent_data = frappe.db.get_list( | ||||
@@ -183,7 +181,7 @@ class Exporter: | |||||
child_table_df = self.meta.get_field(key) | child_table_df = self.meta.get_field(key) | ||||
child_table_doctype = child_table_df.options | child_table_doctype = child_table_df.options | ||||
child_fields = ["name", "idx", "parent", "parentfield"] + list( | child_fields = ["name", "idx", "parent", "parentfield"] + list( | ||||
set([format_column_name(df) for df in self.fields if df.parent == child_table_doctype]) | |||||
{format_column_name(df) for df in self.fields if df.parent == child_table_doctype} | |||||
) | ) | ||||
data = frappe.db.get_all( | data = frappe.db.get_all( | ||||
child_table_doctype, | child_table_doctype, | ||||
@@ -211,16 +209,16 @@ class Exporter: | |||||
if is_parent: | if is_parent: | ||||
label = _(df.label) | label = _(df.label) | ||||
else: | else: | ||||
label = "{0} ({1})".format(_(df.label), _(df.child_table_df.label)) | |||||
label = f"{_(df.label)} ({_(df.child_table_df.label)})" | |||||
if label in header: | if label in header: | ||||
# this label is already in the header, | # this label is already in the header, | ||||
# which means two fields with the same label | # which means two fields with the same label | ||||
# add the fieldname to avoid clash | # add the fieldname to avoid clash | ||||
if is_parent: | if is_parent: | ||||
label = "{0}".format(df.fieldname) | |||||
label = f"{df.fieldname}" | |||||
else: | else: | ||||
label = "{0}.{1}".format(df.child_table_df.fieldname, df.fieldname) | |||||
label = f"{df.child_table_df.fieldname}.{df.fieldname}" | |||||
header.append(label) | header.append(label) | ||||
@@ -253,5 +251,5 @@ class Exporter: | |||||
def build_xlsx_response(self): | def build_xlsx_response(self): | ||||
build_xlsx_response(self.get_csv_array_for_export(), _(self.doctype)) | build_xlsx_response(self.get_csv_array_for_export(), _(self.doctype)) | ||||
def group_children_data_by_parent(self, children_data: typing.Dict[str, list]): | |||||
def group_children_data_by_parent(self, children_data: dict[str, list]): | |||||
return groupby_metric(children_data, key="parent") | return groupby_metric(children_data, key="parent") |
@@ -150,7 +150,7 @@ class Importer: | |||||
if self.console: | if self.console: | ||||
update_progress_bar( | update_progress_bar( | ||||
"Importing {0} records".format(total_payload_count), | |||||
f"Importing {total_payload_count} records", | |||||
current_index, | current_index, | ||||
total_payload_count, | total_payload_count, | ||||
) | ) | ||||
@@ -342,7 +342,7 @@ class Importer: | |||||
row_number = json.loads(log.get("row_indexes"))[0] | row_number = json.loads(log.get("row_indexes"))[0] | ||||
status = "Success" if log.get("success") else "Failure" | status = "Success" if log.get("success") else "Failure" | ||||
message = ( | message = ( | ||||
"Successfully Imported {0}".format(log.get("docname")) | |||||
"Successfully Imported {}".format(log.get("docname")) | |||||
if log.get("success") | if log.get("success") | ||||
else log.get("messages") | else log.get("messages") | ||||
) | ) | ||||
@@ -357,19 +357,17 @@ class Importer: | |||||
if successful_records: | if successful_records: | ||||
print() | print() | ||||
print( | |||||
"Successfully imported {0} records out of {1}".format(len(successful_records), len(import_log)) | |||||
) | |||||
print(f"Successfully imported {len(successful_records)} records out of {len(import_log)}") | |||||
if failed_records: | if failed_records: | ||||
print("Failed to import {0} records".format(len(failed_records))) | |||||
file_name = "{0}_import_on_{1}.txt".format(self.doctype, frappe.utils.now()) | |||||
print("Check {0} for errors".format(os.path.join("sites", file_name))) | |||||
print(f"Failed to import {len(failed_records)} records") | |||||
file_name = f"{self.doctype}_import_on_{frappe.utils.now()}.txt" | |||||
print("Check {} for errors".format(os.path.join("sites", file_name))) | |||||
text = "" | text = "" | ||||
for w in failed_records: | for w in failed_records: | ||||
text += "Row Indexes: {0}\n".format(str(w.get("row_indexes", []))) | |||||
text += "Messages:\n{0}\n".format("\n".join(w.get("messages", []))) | |||||
text += "Traceback:\n{0}\n\n".format(w.get("exception")) | |||||
text += "Row Indexes: {}\n".format(str(w.get("row_indexes", []))) | |||||
text += "Messages:\n{}\n".format("\n".join(w.get("messages", []))) | |||||
text += "Traceback:\n{}\n\n".format(w.get("exception")) | |||||
with open(file_name, "w") as f: | with open(file_name, "w") as f: | ||||
f.write(text) | f.write(text) | ||||
@@ -384,7 +382,7 @@ class Importer: | |||||
other_warnings.append(w) | other_warnings.append(w) | ||||
for row_number, warnings in warnings_by_row.items(): | for row_number, warnings in warnings_by_row.items(): | ||||
print("Row {0}".format(row_number)) | |||||
print(f"Row {row_number}") | |||||
for w in warnings: | for w in warnings: | ||||
print(w.get("message")) | print(w.get("message")) | ||||
@@ -578,7 +576,7 @@ class ImportFile: | |||||
extn = os.path.splitext(file_path)[1][1:] | extn = os.path.splitext(file_path)[1][1:] | ||||
file_content = None | file_content = None | ||||
with io.open(file_path, mode="rb") as f: | |||||
with open(file_path, mode="rb") as f: | |||||
file_content = f.read() | file_content = f.read() | ||||
return file_content, extn | return file_content, extn | ||||
@@ -991,9 +989,7 @@ class Column: | |||||
self.warnings.append( | self.warnings.append( | ||||
{ | { | ||||
"col": self.column_number, | "col": self.column_number, | ||||
"message": ( | |||||
"The following values do not exist for {}: {}".format(self.df.options, missing_values) | |||||
), | |||||
"message": (f"The following values do not exist for {self.df.options}: {missing_values}"), | |||||
"type": "warning", | "type": "warning", | ||||
} | } | ||||
) | ) | ||||
@@ -1023,8 +1019,8 @@ class Column: | |||||
{ | { | ||||
"col": self.column_number, | "col": self.column_number, | ||||
"message": ( | "message": ( | ||||
"The following values are invalid: {0}. Values must be" | |||||
" one of {1}".format(invalid_values, valid_values) | |||||
"The following values are invalid: {}. Values must be" | |||||
" one of {}".format(invalid_values, valid_values) | |||||
), | ), | ||||
} | } | ||||
) | ) | ||||
@@ -1110,9 +1106,9 @@ def build_fields_dict_for_column_matching(parent_doctype): | |||||
) | ) | ||||
else: | else: | ||||
name_headers = ( | name_headers = ( | ||||
"{0}.name".format(table_df.fieldname), # fieldname | |||||
"ID ({0})".format(table_df.label), # label | |||||
"{0} ({1})".format(_("ID"), translated_table_label), # translated label | |||||
f"{table_df.fieldname}.name", # fieldname | |||||
f"ID ({table_df.label})", # label | |||||
"{} ({})".format(_("ID"), translated_table_label), # translated label | |||||
) | ) | ||||
name_df.is_child_table_field = True | name_df.is_child_table_field = True | ||||
@@ -1164,11 +1160,11 @@ def build_fields_dict_for_column_matching(parent_doctype): | |||||
for header in ( | for header in ( | ||||
# fieldname | # fieldname | ||||
"{0}.{1}".format(table_df.fieldname, df.fieldname), | |||||
f"{table_df.fieldname}.{df.fieldname}", | |||||
# label | # label | ||||
"{0} ({1})".format(label, table_df.label), | |||||
f"{label} ({table_df.label})", | |||||
# translated label | # translated label | ||||
"{0} ({1})".format(translated_label, translated_table_label), | |||||
f"{translated_label} ({translated_table_label})", | |||||
): | ): | ||||
out[header] = new_df | out[header] = new_df | ||||
@@ -1177,8 +1173,8 @@ def build_fields_dict_for_column_matching(parent_doctype): | |||||
autoname_field = get_autoname_field(parent_doctype) | autoname_field = get_autoname_field(parent_doctype) | ||||
if autoname_field: | if autoname_field: | ||||
for header in ( | for header in ( | ||||
"ID ({})".format(autoname_field.label), # label | |||||
"{0} ({1})".format(_("ID"), _(autoname_field.label)), # translated label | |||||
f"ID ({autoname_field.label})", # label | |||||
"{} ({})".format(_("ID"), _(autoname_field.label)), # translated label | |||||
# ID field should also map to the autoname field | # ID field should also map to the autoname field | ||||
"ID", | "ID", | ||||
_("ID"), | _("ID"), | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
# import frappe | # import frappe | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -248,7 +248,7 @@ class DocType(Document): | |||||
self.flags.update_fields_to_fetch_queries = [] | self.flags.update_fields_to_fetch_queries = [] | ||||
if set(old_fields_to_fetch) != set(df.fieldname for df in new_meta.get_fields_to_fetch()): | |||||
if set(old_fields_to_fetch) != {df.fieldname for df in new_meta.get_fields_to_fetch()}: | |||||
for df in new_meta.get_fields_to_fetch(): | for df in new_meta.get_fields_to_fetch(): | ||||
if df.fieldname not in old_fields_to_fetch: | if df.fieldname not in old_fields_to_fetch: | ||||
link_fieldname, source_fieldname = df.fetch_from.split(".", 1) | link_fieldname, source_fieldname = df.fetch_from.split(".", 1) | ||||
@@ -385,7 +385,7 @@ class DocType(Document): | |||||
try: | try: | ||||
frappe.db.updatedb(self.name, Meta(self)) | frappe.db.updatedb(self.name, Meta(self)) | ||||
except Exception as e: | except Exception as e: | ||||
print("\n\nThere was an issue while migrating the DocType: {}\n".format(self.name)) | |||||
print(f"\n\nThere was an issue while migrating the DocType: {self.name}\n") | |||||
raise e | raise e | ||||
self.change_modified_of_parent() | self.change_modified_of_parent() | ||||
@@ -552,7 +552,7 @@ class DocType(Document): | |||||
for fname in ("{}.js", "{}.py", "{}_list.js", "{}_calendar.js", "test_{}.py", "test_{}.js"): | for fname in ("{}.js", "{}.py", "{}_list.js", "{}_calendar.js", "test_{}.py", "test_{}.js"): | ||||
fname = os.path.join(new_path, fname.format(frappe.scrub(new))) | fname = os.path.join(new_path, fname.format(frappe.scrub(new))) | ||||
if os.path.exists(fname): | if os.path.exists(fname): | ||||
with open(fname, "r") as f: | |||||
with open(fname) as f: | |||||
code = f.read() | code = f.read() | ||||
with open(fname, "w") as f: | with open(fname, "w") as f: | ||||
if fname.endswith(".js"): | if fname.endswith(".js"): | ||||
@@ -569,7 +569,7 @@ class DocType(Document): | |||||
f.write(file_content) | f.write(file_content) | ||||
# updating json file with new name | # updating json file with new name | ||||
doctype_json_path = os.path.join(new_path, "{}.json".format(frappe.scrub(new))) | |||||
doctype_json_path = os.path.join(new_path, f"{frappe.scrub(new)}.json") | |||||
current_data = frappe.get_file_json(doctype_json_path) | current_data = frappe.get_file_json(doctype_json_path) | ||||
current_data["name"] = new | current_data["name"] = new | ||||
@@ -643,7 +643,7 @@ class DocType(Document): | |||||
path = get_file_path(self.module, "DocType", self.name) | path = get_file_path(self.module, "DocType", self.name) | ||||
if os.path.exists(path): | if os.path.exists(path): | ||||
try: | try: | ||||
with open(path, "r") as txtfile: | |||||
with open(path) as txtfile: | |||||
olddoc = json.loads(txtfile.read()) | olddoc = json.loads(txtfile.read()) | ||||
old_field_names = [f["fieldname"] for f in olddoc.get("fields", [])] | old_field_names = [f["fieldname"] for f in olddoc.get("fields", [])] | ||||
@@ -804,7 +804,7 @@ class DocType(Document): | |||||
{"label": "Old Parent", "fieldtype": "Link", "options": self.name, "fieldname": "old_parent"}, | {"label": "Old Parent", "fieldtype": "Link", "options": self.name, "fieldname": "old_parent"}, | ||||
) | ) | ||||
parent_field_label = "Parent {}".format(self.name) | |||||
parent_field_label = f"Parent {self.name}" | |||||
parent_field_name = frappe.scrub(parent_field_label) | parent_field_name = frappe.scrub(parent_field_label) | ||||
self.append( | self.append( | ||||
"fields", | "fields", | ||||
@@ -1417,7 +1417,7 @@ def validate_fields(meta): | |||||
def check_max_height(docfield): | def check_max_height(docfield): | ||||
if getattr(docfield, "max_height", None) and (docfield.max_height[-2:] not in ("px", "em")): | if getattr(docfield, "max_height", None) and (docfield.max_height[-2:] not in ("px", "em")): | ||||
frappe.throw("Max for {} height must be in px, em, rem".format(frappe.bold(docfield.fieldname))) | |||||
frappe.throw(f"Max for {frappe.bold(docfield.fieldname)} height must be in px, em, rem") | |||||
def check_no_of_ratings(docfield): | def check_no_of_ratings(docfield): | ||||
if docfield.fieldtype == "Rating": | if docfield.fieldtype == "Rating": | ||||
@@ -1,10 +1,8 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import random | import random | ||||
import string | import string | ||||
import unittest | import unittest | ||||
from typing import Dict, List, Optional | |||||
from unittest.mock import patch | from unittest.mock import patch | ||||
import frappe | import frappe | ||||
@@ -187,7 +185,7 @@ class TestDocType(unittest.TestCase): | |||||
"module": "Core", | "module": "Core", | ||||
"custom": 1, | "custom": 1, | ||||
"fields": [ | "fields": [ | ||||
{"fieldname": "{0}_field".format(field_option), "fieldtype": "Data", "options": field_option} | |||||
{"fieldname": f"{field_option}_field", "fieldtype": "Data", "options": field_option} | |||||
], | ], | ||||
} | } | ||||
) | ) | ||||
@@ -711,10 +709,10 @@ class TestDocType(unittest.TestCase): | |||||
def new_doctype( | def new_doctype( | ||||
name: Optional[str] = None, | |||||
name: str | None = None, | |||||
unique: bool = False, | unique: bool = False, | ||||
depends_on: str = "", | depends_on: str = "", | ||||
fields: Optional[List[Dict]] = None, | |||||
fields: list[dict] | None = None, | |||||
**kwargs, | **kwargs, | ||||
): | ): | ||||
if not name: | if not name: | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
# import frappe | # import frappe | ||||
@@ -1,7 +1,6 @@ | |||||
# Copyright (c) 2022, Frappe Technologies and contributors | # Copyright (c) 2022, Frappe Technologies and contributors | ||||
# For license information, please see license.txt | # For license information, please see license.txt | ||||
from typing import List, Set | |||||
import frappe | import frappe | ||||
from frappe import _ | from frappe import _ | ||||
@@ -25,7 +24,7 @@ class DocumentNamingSettings(Document): | |||||
return {"transactions": transactions, "prefixes": prefixes} | return {"transactions": transactions, "prefixes": prefixes} | ||||
def _get_transactions(self) -> List[str]: | |||||
def _get_transactions(self) -> list[str]: | |||||
readable_doctypes = set(get_doctypes_with_read()) | readable_doctypes = set(get_doctypes_with_read()) | ||||
@@ -34,7 +33,7 @@ class DocumentNamingSettings(Document): | |||||
return sorted(readable_doctypes.intersection(standard + custom)) | return sorted(readable_doctypes.intersection(standard + custom)) | ||||
def _get_prefixes(self, doctypes) -> List[str]: | |||||
def _get_prefixes(self, doctypes) -> list[str]: | |||||
"""Get all prefixes for naming series. | """Get all prefixes for naming series. | ||||
- For all templates prefix is evaluated considering today's date | - For all templates prefix is evaluated considering today's date | ||||
@@ -63,7 +62,7 @@ class DocumentNamingSettings(Document): | |||||
return self._evaluate_and_clean_templates(series_templates) | return self._evaluate_and_clean_templates(series_templates) | ||||
def _evaluate_and_clean_templates(self, series_templates: Set[str]) -> List[str]: | |||||
def _evaluate_and_clean_templates(self, series_templates: set[str]) -> list[str]: | |||||
evalauted_prefix = set() | evalauted_prefix = set() | ||||
series = frappe.qb.DocType("Series") | series = frappe.qb.DocType("Series") | ||||
@@ -79,7 +78,7 @@ class DocumentNamingSettings(Document): | |||||
return sorted(evalauted_prefix) | return sorted(evalauted_prefix) | ||||
def get_options_list(self, options: str) -> List[str]: | |||||
def get_options_list(self, options: str) -> list[str]: | |||||
return [op.strip() for op in options.split("\n") if op.strip()] | return [op.strip() for op in options.split("\n") if op.strip()] | ||||
@frappe.whitelist() | @frappe.whitelist() | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and contributors | # Copyright (c) 2017, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -113,8 +112,8 @@ class Domain(Document): | |||||
# enable | # enable | ||||
frappe.db.sql( | frappe.db.sql( | ||||
"""update `tabPortal Menu Item` set enabled=1 | """update `tabPortal Menu Item` set enabled=1 | ||||
where route in ({0})""".format( | |||||
", ".join('"{0}"'.format(d) for d in self.data.allow_sidebar_items) | |||||
where route in ({})""".format( | |||||
", ".join(f'"{d}"' for d in self.data.allow_sidebar_items) | |||||
) | ) | ||||
) | ) | ||||
@@ -125,7 +124,7 @@ class Domain(Document): | |||||
# enable | # enable | ||||
frappe.db.sql( | frappe.db.sql( | ||||
"""update `tabPortal Menu Item` set enabled=0 | """update `tabPortal Menu Item` set enabled=0 | ||||
where route in ({0})""".format( | |||||
", ".join('"{0}"'.format(d) for d in self.data.remove_sidebar_items) | |||||
where route in ({})""".format( | |||||
", ".join(f'"{d}"' for d in self.data.remove_sidebar_items) | |||||
) | ) | ||||
) | ) |
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and Contributors | # Copyright (c) 2017, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and contributors | # Copyright (c) 2017, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -32,7 +31,7 @@ class DomainSettings(Document): | |||||
def restrict_roles_and_modules(self): | def restrict_roles_and_modules(self): | ||||
"""Disable all restricted roles and set `restrict_to_domain` property in Module Def""" | """Disable all restricted roles and set `restrict_to_domain` property in Module Def""" | ||||
active_domains = frappe.get_active_domains() | active_domains = frappe.get_active_domains() | ||||
all_domains = list((frappe.get_hooks("domains") or {})) | |||||
all_domains = list(frappe.get_hooks("domains") or {}) | |||||
def remove_role(role): | def remove_role(role): | ||||
frappe.db.delete("Has Role", {"role": role}) | frappe.db.delete("Has Role", {"role": role}) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -7,7 +7,6 @@ import os | |||||
import re | import re | ||||
import shutil | import shutil | ||||
import zipfile | import zipfile | ||||
from typing import List, Optional, Union | |||||
from urllib.parse import quote, unquote | from urllib.parse import quote, unquote | ||||
from PIL import Image, ImageFile, ImageOps | from PIL import Image, ImageFile, ImageOps | ||||
@@ -134,7 +133,7 @@ class File(Document): | |||||
shutil.move(source, target) | shutil.move(source, target) | ||||
self.flags.pop("original_path") | self.flags.pop("original_path") | ||||
def get_name_based_on_parent_folder(self) -> Union[str, None]: | |||||
def get_name_based_on_parent_folder(self) -> str | None: | |||||
if self.folder: | if self.folder: | ||||
return os.path.join(self.folder, self.file_name) | return os.path.join(self.folder, self.file_name) | ||||
@@ -328,7 +327,7 @@ class File(Document): | |||||
file_path = get_files_path(file_name, is_private=self.is_private) | file_path = get_files_path(file_name, is_private=self.is_private) | ||||
with open(file_path, "rb") as f: | with open(file_path, "rb") as f: | ||||
self.content_hash = get_content_hash(f.read()) | self.content_hash = get_content_hash(f.read()) | ||||
except IOError: | |||||
except OSError: | |||||
frappe.throw(_("File {0} does not exist").format(file_path)) | frappe.throw(_("File {0} does not exist").format(file_path)) | ||||
def make_thumbnail( | def make_thumbnail( | ||||
@@ -347,7 +346,7 @@ class File(Document): | |||||
image, filename, extn = get_local_image(self.file_url) | image, filename, extn = get_local_image(self.file_url) | ||||
else: | else: | ||||
image, filename, extn = get_web_image(self.file_url) | image, filename, extn = get_web_image(self.file_url) | ||||
except (HTTPError, SSLError, IOError, TypeError): | |||||
except (HTTPError, SSLError, OSError, TypeError): | |||||
return | return | ||||
size = width, height | size = width, height | ||||
@@ -364,7 +363,7 @@ class File(Document): | |||||
if set_as_thumbnail: | if set_as_thumbnail: | ||||
self.db_set("thumbnail_url", thumbnail_url) | self.db_set("thumbnail_url", thumbnail_url) | ||||
except IOError: | |||||
except OSError: | |||||
frappe.msgprint(_("Unable to write file format for {0}").format(path)) | frappe.msgprint(_("Unable to write file format for {0}").format(path)) | ||||
return | return | ||||
@@ -387,7 +386,7 @@ class File(Document): | |||||
else: | else: | ||||
self.delete_file_data_content(only_thumbnail=True) | self.delete_file_data_content(only_thumbnail=True) | ||||
def unzip(self) -> List["File"]: | |||||
def unzip(self) -> list["File"]: | |||||
"""Unzip current file and replace it by its children""" | """Unzip current file and replace it by its children""" | ||||
if not self.file_url.endswith(".zip"): | if not self.file_url.endswith(".zip"): | ||||
frappe.throw(_("{0} is not a zip file").format(self.file_name)) | frappe.throw(_("{0} is not a zip file").format(self.file_name)) | ||||
@@ -506,7 +505,7 @@ class File(Document): | |||||
def save_file( | def save_file( | ||||
self, | self, | ||||
content: Optional[Union[bytes, str]] = None, | |||||
content: bytes | str | None = None, | |||||
decode=False, | decode=False, | ||||
ignore_existing_file_check=False, | ignore_existing_file_check=False, | ||||
overwrite=False, | overwrite=False, | ||||
@@ -4,7 +4,7 @@ import mimetypes | |||||
import os | import os | ||||
import re | import re | ||||
from io import BytesIO | from io import BytesIO | ||||
from typing import TYPE_CHECKING, Optional, Tuple, Union | |||||
from typing import TYPE_CHECKING, Optional | |||||
from urllib.parse import unquote | from urllib.parse import unquote | ||||
import requests | import requests | ||||
@@ -55,8 +55,8 @@ def setup_folder_path(filename: str, new_parent: str) -> None: | |||||
def get_extension( | def get_extension( | ||||
filename, | filename, | ||||
extn: Optional[str] = None, | |||||
content: Optional[bytes] = None, | |||||
extn: str | None = None, | |||||
content: bytes | None = None, | |||||
response: Optional["Response"] = None, | response: Optional["Response"] = None, | ||||
) -> str: | ) -> str: | ||||
mimetype = None | mimetype = None | ||||
@@ -83,7 +83,7 @@ def get_extension( | |||||
return extn | return extn | ||||
def get_local_image(file_url: str) -> Tuple["ImageFile", str, str]: | |||||
def get_local_image(file_url: str) -> tuple["ImageFile", str, str]: | |||||
if file_url.startswith("/private"): | if file_url.startswith("/private"): | ||||
file_url_path = (file_url.lstrip("/"),) | file_url_path = (file_url.lstrip("/"),) | ||||
else: | else: | ||||
@@ -93,7 +93,7 @@ def get_local_image(file_url: str) -> Tuple["ImageFile", str, str]: | |||||
try: | try: | ||||
image = Image.open(file_path) | image = Image.open(file_path) | ||||
except IOError: | |||||
except OSError: | |||||
frappe.throw(_("Unable to read file format for {0}").format(file_url)) | frappe.throw(_("Unable to read file format for {0}").format(file_url)) | ||||
content = None | content = None | ||||
@@ -102,7 +102,7 @@ def get_local_image(file_url: str) -> Tuple["ImageFile", str, str]: | |||||
filename, extn = file_url.rsplit(".", 1) | filename, extn = file_url.rsplit(".", 1) | ||||
except ValueError: | except ValueError: | ||||
# no extn | # no extn | ||||
with open(file_path, "r") as f: | |||||
with open(file_path) as f: | |||||
content = f.read() | content = f.read() | ||||
filename = file_url | filename = file_url | ||||
@@ -113,7 +113,7 @@ def get_local_image(file_url: str) -> Tuple["ImageFile", str, str]: | |||||
return image, filename, extn | return image, filename, extn | ||||
def get_web_image(file_url: str) -> Tuple["ImageFile", str, str]: | |||||
def get_web_image(file_url: str) -> tuple["ImageFile", str, str]: | |||||
# download | # download | ||||
file_url = frappe.utils.get_url(file_url) | file_url = frappe.utils.get_url(file_url) | ||||
r = requests.get(file_url, stream=True) | r = requests.get(file_url, stream=True) | ||||
@@ -179,13 +179,13 @@ def remove_file_by_url(file_url: str, doctype: str = None, name: str = None) -> | |||||
return remove_file(fid=fid) | return remove_file(fid=fid) | ||||
def get_content_hash(content: Union[bytes, str]) -> str: | |||||
def get_content_hash(content: bytes | str) -> str: | |||||
if isinstance(content, str): | if isinstance(content, str): | ||||
content = content.encode() | content = content.encode() | ||||
return hashlib.md5(content).hexdigest() # nosec | return hashlib.md5(content).hexdigest() # nosec | ||||
def generate_file_name(name: str, suffix: Optional[str] = None, is_private: bool = False) -> str: | |||||
def generate_file_name(name: str, suffix: str | None = None, is_private: bool = False) -> str: | |||||
"""Generate conflict-free file name. Suffix will be ignored if name available. If the | """Generate conflict-free file name. Suffix will be ignored if name available. If the | ||||
provided suffix doesn't result in an available path, a random suffix will be picked. | provided suffix doesn't result in an available path, a random suffix will be picked. | ||||
""" | """ | ||||
@@ -203,7 +203,7 @@ def generate_file_name(name: str, suffix: Optional[str] = None, is_private: bool | |||||
return candidate_path | return candidate_path | ||||
def get_file_name(fname: str, optional_suffix: Optional[str] = None) -> str: | |||||
def get_file_name(fname: str, optional_suffix: str | None = None) -> str: | |||||
# convert to unicode | # convert to unicode | ||||
fname = cstr(fname) | fname = cstr(fname) | ||||
partial, extn = os.path.splitext(fname) | partial, extn = os.path.splitext(fname) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and contributors | # Copyright (c) 2017, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
# import frappe | # import frappe | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -42,7 +41,7 @@ def export_languages_json(): | |||||
def sync_languages(): | def sync_languages(): | ||||
"""Sync frappe/geo/languages.json with Language""" | """Sync frappe/geo/languages.json with Language""" | ||||
with open(frappe.get_app_path("frappe", "geo", "languages.json"), "r") as f: | |||||
with open(frappe.get_app_path("frappe", "geo", "languages.json")) as f: | |||||
data = json.loads(f.read()) | data = json.loads(f.read()) | ||||
for l in data: | for l in data: | ||||
@@ -59,7 +58,7 @@ def sync_languages(): | |||||
def update_language_names(): | def update_language_names(): | ||||
"""Update frappe/geo/languages.json names (for use via patch)""" | """Update frappe/geo/languages.json names (for use via patch)""" | ||||
with open(frappe.get_app_path("frappe", "geo", "languages.json"), "r") as f: | |||||
with open(frappe.get_app_path("frappe", "geo", "languages.json")) as f: | |||||
data = json.loads(f.read()) | data = json.loads(f.read()) | ||||
for l in data: | for l in data: | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
# import frappe | # import frappe | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -29,7 +29,7 @@ class ModuleDef(Document): | |||||
"""Adds to `[app]/modules.txt`""" | """Adds to `[app]/modules.txt`""" | ||||
modules = None | modules = None | ||||
if not frappe.local.module_app.get(frappe.scrub(self.name)): | if not frappe.local.module_app.get(frappe.scrub(self.name)): | ||||
with open(frappe.get_app_path(self.app_name, "modules.txt"), "r") as f: | |||||
with open(frappe.get_app_path(self.app_name, "modules.txt")) as f: | |||||
content = f.read() | content = f.read() | ||||
if not self.name in content.splitlines(): | if not self.name in content.splitlines(): | ||||
modules = list(filter(None, content.splitlines())) | modules = list(filter(None, content.splitlines())) | ||||
@@ -50,7 +50,7 @@ class ModuleDef(Document): | |||||
modules = None | modules = None | ||||
if frappe.local.module_app.get(frappe.scrub(self.name)): | if frappe.local.module_app.get(frappe.scrub(self.name)): | ||||
with open(frappe.get_app_path(self.app_name, "modules.txt"), "r") as f: | |||||
with open(frappe.get_app_path(self.app_name, "modules.txt")) as f: | |||||
content = f.read() | content = f.read() | ||||
if self.name in content.splitlines(): | if self.name in content.splitlines(): | ||||
modules = list(filter(None, content.splitlines())) | modules = list(filter(None, content.splitlines())) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
# import frappe | # import frappe | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
# import frappe | # import frappe | ||||
@@ -15,7 +15,5 @@ class Package(Document): | |||||
@frappe.whitelist() | @frappe.whitelist() | ||||
def get_license_text(license_type): | def get_license_text(license_type): | ||||
with open( | |||||
os.path.join(os.path.dirname(__file__), "licenses", license_type + ".md"), "r" | |||||
) as textfile: | |||||
with open(os.path.join(os.path.dirname(__file__), "licenses", license_type + ".md")) as textfile: | |||||
return textfile.read() | return textfile.read() |
@@ -44,7 +44,7 @@ class PackageImport(Document): | |||||
package_path = frappe.get_site_path("packages", package_name) | package_path = frappe.get_site_path("packages", package_name) | ||||
# import Package | # import Package | ||||
with open(os.path.join(package_path, package_name + ".json"), "r") as packagefile: | |||||
with open(os.path.join(package_path, package_name + ".json")) as packagefile: | |||||
doc_dict = json.loads(packagefile.read()) | doc_dict = json.loads(packagefile.read()) | ||||
frappe.flags.package = import_doc(doc_dict) | frappe.flags.package = import_doc(doc_dict) | ||||
@@ -60,6 +60,6 @@ class PackageImport(Document): | |||||
# import files | # import files | ||||
for file in files: | for file in files: | ||||
import_file_by_path(file, force=self.force, ignore_version=True) | import_file_by_path(file, force=self.force, ignore_version=True) | ||||
log.append("Imported {}".format(file)) | |||||
log.append(f"Imported {file}") | |||||
self.log = "\n".join(log) | self.log = "\n".join(log) |
@@ -87,7 +87,7 @@ class PackageRelease(Document): | |||||
def make_tarfile(self, package): | def make_tarfile(self, package): | ||||
# make tarfile | # make tarfile | ||||
filename = "{}.tar.gz".format(self.name) | |||||
filename = f"{self.name}.tar.gz" | |||||
subprocess.check_output( | subprocess.check_output( | ||||
["tar", "czf", filename, package.package_name], cwd=frappe.get_site_path("packages") | ["tar", "czf", filename, package.package_name], cwd=frappe.get_site_path("packages") | ||||
) | ) | ||||
@@ -79,7 +79,7 @@ class Page(Document): | |||||
) | ) | ||||
def as_dict(self, no_nulls=False): | def as_dict(self, no_nulls=False): | ||||
d = super(Page, self).as_dict(no_nulls=no_nulls) | |||||
d = super().as_dict(no_nulls=no_nulls) | |||||
for key in ("script", "style", "content"): | for key in ("script", "style", "content"): | ||||
d[key] = self.get(key) | d[key] = self.get(key) | ||||
return d | return d | ||||
@@ -120,20 +120,20 @@ class Page(Document): | |||||
# script | # script | ||||
fpath = os.path.join(path, page_name + ".js") | fpath = os.path.join(path, page_name + ".js") | ||||
if os.path.exists(fpath): | if os.path.exists(fpath): | ||||
with open(fpath, "r") as f: | |||||
with open(fpath) as f: | |||||
self.script = render_include(f.read()) | self.script = render_include(f.read()) | ||||
self.script += f"\n\n//# sourceURL={page_name}.js" | self.script += f"\n\n//# sourceURL={page_name}.js" | ||||
# css | # css | ||||
fpath = os.path.join(path, page_name + ".css") | fpath = os.path.join(path, page_name + ".css") | ||||
if os.path.exists(fpath): | if os.path.exists(fpath): | ||||
with open(fpath, "r") as f: | |||||
with open(fpath) as f: | |||||
self.style = safe_decode(f.read()) | self.style = safe_decode(f.read()) | ||||
# html as js template | # html as js template | ||||
for fname in os.listdir(path): | for fname in os.listdir(path): | ||||
if fname.endswith(".html"): | if fname.endswith(".html"): | ||||
with open(os.path.join(path, fname), "r") as f: | |||||
with open(os.path.join(path, fname)) as f: | |||||
template = f.read() | template = f.read() | ||||
if "<!-- jinja -->" in template: | if "<!-- jinja -->" in template: | ||||
context = frappe._dict({}) | context = frappe._dict({}) | ||||