@@ -22,3 +22,6 @@ b2fc959307c7c79f5584625569d5aed04133ba13 | |||||
# Format codebase and sort imports | # Format codebase and sort imports | ||||
c0c5b2ebdddbe8898ce2d5e5365f4931ff73b6bf | c0c5b2ebdddbe8898ce2d5e5365f4931ff73b6bf | ||||
# update python code to use 3.10 supported features | |||||
81b37cb7d2160866afa2496873656afe53f0c145 |
@@ -17,21 +17,23 @@ if [ "$TYPE" == "server" ]; then | |||||
fi | fi | ||||
if [ "$DB" == "mariadb" ];then | if [ "$DB" == "mariadb" ];then | ||||
sudo apt install mariadb-client-10.3 | |||||
mysql --host 127.0.0.1 --port 3306 -u root -e "SET GLOBAL character_set_server = 'utf8mb4'"; | |||||
mysql --host 127.0.0.1 --port 3306 -u root -e "SET GLOBAL collation_server = 'utf8mb4_unicode_ci'"; | |||||
curl -LsS -O https://downloads.mariadb.com/MariaDB/mariadb_repo_setup | |||||
sudo bash mariadb_repo_setup --mariadb-server-version=10.6 | |||||
sudo apt install mariadb-client | |||||
mysql --host 127.0.0.1 --port 3306 -u root -e "CREATE DATABASE test_frappe_consumer"; | |||||
mysql --host 127.0.0.1 --port 3306 -u root -e "CREATE USER 'test_frappe_consumer'@'localhost' IDENTIFIED BY 'test_frappe_consumer'"; | |||||
mysql --host 127.0.0.1 --port 3306 -u root -e "GRANT ALL PRIVILEGES ON \`test_frappe_consumer\`.* TO 'test_frappe_consumer'@'localhost'"; | |||||
mariadb --host 127.0.0.1 --port 3306 -u root -ptravis -e "SET GLOBAL character_set_server = 'utf8mb4'"; | |||||
mariadb --host 127.0.0.1 --port 3306 -u root -ptravis -e "SET GLOBAL collation_server = 'utf8mb4_unicode_ci'"; | |||||
mysql --host 127.0.0.1 --port 3306 -u root -e "CREATE DATABASE test_frappe_producer"; | |||||
mysql --host 127.0.0.1 --port 3306 -u root -e "CREATE USER 'test_frappe_producer'@'localhost' IDENTIFIED BY 'test_frappe_producer'"; | |||||
mysql --host 127.0.0.1 --port 3306 -u root -e "GRANT ALL PRIVILEGES ON \`test_frappe_producer\`.* TO 'test_frappe_producer'@'localhost'"; | |||||
mariadb --host 127.0.0.1 --port 3306 -u root -ptravis -e "CREATE DATABASE test_frappe_consumer"; | |||||
mariadb --host 127.0.0.1 --port 3306 -u root -ptravis -e "CREATE USER 'test_frappe_consumer'@'localhost' IDENTIFIED BY 'test_frappe_consumer'"; | |||||
mariadb --host 127.0.0.1 --port 3306 -u root -ptravis -e "GRANT ALL PRIVILEGES ON \`test_frappe_consumer\`.* TO 'test_frappe_consumer'@'localhost'"; | |||||
mysql --host 127.0.0.1 --port 3306 -u root -e "UPDATE mysql.user SET Password=PASSWORD('travis') WHERE User='root'"; | |||||
mysql --host 127.0.0.1 --port 3306 -u root -e "FLUSH PRIVILEGES"; | |||||
fi | |||||
mariadb --host 127.0.0.1 --port 3306 -u root -ptravis -e "CREATE DATABASE test_frappe_producer"; | |||||
mariadb --host 127.0.0.1 --port 3306 -u root -ptravis -e "CREATE USER 'test_frappe_producer'@'localhost' IDENTIFIED BY 'test_frappe_producer'"; | |||||
mariadb --host 127.0.0.1 --port 3306 -u root -ptravis -e "GRANT ALL PRIVILEGES ON \`test_frappe_producer\`.* TO 'test_frappe_producer'@'localhost'"; | |||||
mariadb --host 127.0.0.1 --port 3306 -u root -ptravis -e "FLUSH PRIVILEGES"; | |||||
fi | |||||
if [ "$DB" == "postgres" ];then | if [ "$DB" == "postgres" ];then | ||||
echo "travis" | psql -h 127.0.0.1 -p 5432 -c "CREATE DATABASE test_frappe_consumer" -U postgres; | echo "travis" | psql -h 127.0.0.1 -p 5432 -c "CREATE DATABASE test_frappe_consumer" -U postgres; | ||||
@@ -9,7 +9,7 @@ from functools import lru_cache | |||||
@lru_cache(maxsize=None) | @lru_cache(maxsize=None) | ||||
def fetch_pr_data(pr_number, repo, endpoint): | |||||
def fetch_pr_data(pr_number, repo, endpoint=""): | |||||
api_url = f"https://api.github.com/repos/{repo}/pulls/{pr_number}" | api_url = f"https://api.github.com/repos/{repo}/pulls/{pr_number}" | ||||
if endpoint: | if endpoint: | ||||
@@ -37,7 +37,7 @@ def has_run_ui_tests_label(pr_number, repo="frappe/frappe"): | |||||
return has_label(pr_number, "Run UI Tests", repo) | return has_label(pr_number, "Run UI Tests", repo) | ||||
def has_label(pr_number, label, repo="frappe/frappe"): | def has_label(pr_number, label, repo="frappe/frappe"): | ||||
return any([label["name"] for label in fetch_pr_data(pr_number, repo, "")["labels"] if label["name"] == label]) | |||||
return any([fetched_label["name"] for fetched_label in fetch_pr_data(pr_number, repo)["labels"] if fetched_label["name"] == label]) | |||||
def is_py(file): | def is_py(file): | ||||
return file.endswith("py") | return file.endswith("py") | ||||
@@ -49,7 +49,7 @@ def is_frontend_code(file): | |||||
return file.lower().endswith((".css", ".scss", ".less", ".sass", ".styl", ".js", ".ts", ".vue")) | return file.lower().endswith((".css", ".scss", ".less", ".sass", ".styl", ".js", ".ts", ".vue")) | ||||
def is_docs(file): | def is_docs(file): | ||||
regex = re.compile(r'\.(md|png|jpg|jpeg|csv)$|^.github|LICENSE') | |||||
regex = re.compile(r'\.(md|png|jpg|jpeg|csv|svg)$|^.github|LICENSE') | |||||
return bool(regex.search(file)) | return bool(regex.search(file)) | ||||
@@ -18,10 +18,10 @@ jobs: | |||||
name: Patch Test | name: Patch Test | ||||
services: | services: | ||||
mysql: | |||||
image: mariadb:10.3 | |||||
mariadb: | |||||
image: mariadb:10.6 | |||||
env: | env: | ||||
MYSQL_ALLOW_EMPTY_PASSWORD: YES | |||||
MARIADB_ROOT_PASSWORD: travis | |||||
ports: | ports: | ||||
- 3306:3306 | - 3306:3306 | ||||
options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3 | options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3 | ||||
@@ -39,9 +39,9 @@ jobs: | |||||
fi | fi | ||||
- name: Setup Python | - name: Setup Python | ||||
uses: "gabrielfalcao/pyenv-action@v9" | |||||
uses: "gabrielfalcao/pyenv-action@v10" | |||||
with: | with: | ||||
versions: 3.10:latest, 3.7:latest, 2.7:latest | |||||
versions: 3.10:latest, 3.7:latest | |||||
- name: Setup Node | - name: Setup Node | ||||
uses: actions/setup-node@v3 | uses: actions/setup-node@v3 | ||||
@@ -123,14 +123,10 @@ jobs: | |||||
cd apps/frappe/ | cd apps/frappe/ | ||||
git remote set-url upstream https://github.com/frappe/frappe.git | git remote set-url upstream https://github.com/frappe/frappe.git | ||||
pyenv global $(pyenv versions | grep '3.7') | |||||
for version in $(seq 12 13) | for version in $(seq 12 13) | ||||
do | do | ||||
echo "Updating to v$version" | echo "Updating to v$version" | ||||
if [ $version == 12 ]; then | |||||
pyenv global $(pyenv versions | grep '2.7') | |||||
elif [ $version == 13 ]; then | |||||
pyenv global $(pyenv versions | grep '3.7') | |||||
fi | |||||
branch_name="version-$version-hotfix" | branch_name="version-$version-hotfix" | ||||
git fetch --depth 1 upstream $branch_name:$branch_name | git fetch --depth 1 upstream $branch_name:$branch_name | ||||
git checkout -q -f $branch_name | git checkout -q -f $branch_name | ||||
@@ -27,10 +27,10 @@ jobs: | |||||
name: Python Unit Tests (MariaDB) | name: Python Unit Tests (MariaDB) | ||||
services: | services: | ||||
mysql: | |||||
image: mariadb:10.3 | |||||
mariadb: | |||||
image: mariadb:10.6 | |||||
env: | env: | ||||
MYSQL_ALLOW_EMPTY_PASSWORD: YES | |||||
MARIADB_ROOT_PASSWORD: travis | |||||
ports: | ports: | ||||
- 3306:3306 | - 3306:3306 | ||||
options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3 | options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3 | ||||
@@ -26,10 +26,10 @@ jobs: | |||||
name: UI Tests (Cypress) | name: UI Tests (Cypress) | ||||
services: | services: | ||||
mysql: | |||||
image: mariadb:10.3 | |||||
mariadb: | |||||
image: mariadb:10.6 | |||||
env: | env: | ||||
MYSQL_ALLOW_EMPTY_PASSWORD: YES | |||||
MARIADB_ROOT_PASSWORD: travis | |||||
ports: | ports: | ||||
- 3306:3306 | - 3306:3306 | ||||
options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3 | options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3 | ||||
@@ -36,7 +36,6 @@ dist/ | |||||
downloads/ | downloads/ | ||||
eggs/ | eggs/ | ||||
.eggs/ | .eggs/ | ||||
lib/ | |||||
lib64/ | lib64/ | ||||
parts/ | parts/ | ||||
sdist/ | sdist/ | ||||
@@ -5,7 +5,7 @@ fail_fast: false | |||||
repos: | repos: | ||||
- repo: https://github.com/pre-commit/pre-commit-hooks | - repo: https://github.com/pre-commit/pre-commit-hooks | ||||
rev: v4.0.1 | |||||
rev: v4.3.0 | |||||
hooks: | hooks: | ||||
- id: trailing-whitespace | - id: trailing-whitespace | ||||
files: "frappe.*" | files: "frappe.*" | ||||
@@ -15,6 +15,16 @@ repos: | |||||
args: ['--branch', 'develop'] | args: ['--branch', 'develop'] | ||||
- id: check-merge-conflict | - id: check-merge-conflict | ||||
- id: check-ast | - id: check-ast | ||||
- id: check-json | |||||
- id: check-toml | |||||
- id: check-yaml | |||||
- id: debug-statements | |||||
- repo: https://github.com/asottile/pyupgrade | |||||
rev: v2.34.0 | |||||
hooks: | |||||
- id: pyupgrade | |||||
args: ['--py310-plus'] | |||||
- repo: https://github.com/adityahase/black | - repo: https://github.com/adityahase/black | ||||
rev: 9cb0a69f4d0030cdf687eddf314468b39ed54119 | rev: 9cb0a69f4d0030cdf687eddf314468b39ed54119 | ||||
@@ -31,9 +41,7 @@ repos: | |||||
rev: 3.9.2 | rev: 3.9.2 | ||||
hooks: | hooks: | ||||
- id: flake8 | - id: flake8 | ||||
additional_dependencies: [ | |||||
'flake8-bugbear', | |||||
] | |||||
additional_dependencies: ['flake8-bugbear',] | |||||
args: ['--config', '.github/helper/flake8.conf'] | args: ['--config', '.github/helper/flake8.conf'] | ||||
ci: | ci: | ||||
@@ -1,2 +0,0 @@ | |||||
disable=access-member-before-definition | |||||
disable=no-member |
@@ -1,4 +0,0 @@ | |||||
BASEDIR := $(realpath .) | |||||
clean: | |||||
find $(BASEDIR) | grep -E "__pycache__|\.pyc" | xargs rm -rf |
@@ -1,50 +1,32 @@ | |||||
## Frappe framework includes these public works | |||||
### Javascript / CSS | |||||
- Bootstrap: MIT License, (c) Twitter Inc, https://getbootstrap.com | |||||
- JQuery: MIT License, (c) JQuery Foundation, http://jquery.org/license | |||||
- JQuery UI: MIT License / GPL 2, (c) JQuery Foundation, https://jqueryui.com/about | |||||
- JQuery UI Bootstrap Theme: MIT / GPL 2, (c) Addy Osmani, http://addyosmani.github.com/jquery-ui-bootstrap | |||||
- QUnit: MIT License, (c) JQuery Foundation, http://jquery.org/license | |||||
- jquery.event.drag, MIT License, (c) 2010 Three Dub Media - http://threedubmedia.com | |||||
- JQuery Cookie Plugin, MIT / GPL 2, (c) 2011, Klaus Hartl | |||||
- JQuery Time Picker, MIT License, (c) 2013 Trent Richardson, http://trentrichardson.com/examples/timepicker | |||||
- JQuery Hotkeys Plugin, MIT License, (c) 2010, John Resig | |||||
- prettydate.js, MIT License, (c) 2011, John Resig | |||||
- JQuery Resize Event, MIT License, (c) 2010 "Cowboy" Ben Alman | |||||
- excanvas.js, Apache License Version 2.0, (c) 2006 Google Inc | |||||
- showdown.js - Javascript Markdown, BSD-style Open Source License, (c) 2007 John Fraser | |||||
- Beautify HTML - MIT License, (c) 2007-2013 Einar Lielmanis and contributors. | |||||
- JQuery Gantt - MIT License, http://taitems.github.com/jQuery.Gantt/ | |||||
- SlickGrid - MIT License, https://github.com/mleibman/SlickGrid | |||||
- MomentJS - MIT License, https://github.com/moment/moment | |||||
- JSColor - LGPL, (c) Jan Odvarko, http://jscolor.com | |||||
- FullCalendar - MIT License, (c) 2013 Adam Shaw, http://fullcalendar.io/license/ | |||||
- Sortable - MIT License (c) 2013-2015 Lebedev Konstantin http://rubaxa.github.io/Sortable/ | |||||
### Python | |||||
- minify.js - MIT License, (c) 2002 Douglas Crockford | |||||
## 3rd-Party Software Report | |||||
### Icon Fonts | |||||
The following 3rd-party software packages may be used by or distributed with <https://github.com/frappe/frappe>. | |||||
- Font Awesome - http://fontawesome.io/ | |||||
- Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL) | |||||
- Code License: MIT (http://choosealicense.com/licenses/mit/) | |||||
- Octicons (c) GitHub Inc, https://octicons.github.com/ | |||||
- Font License: SIL OFL 1.1 (http://scripts.sil.org/OFL) | |||||
- Code License: MIT (http://choosealicense.com/licenses/mit/) | |||||
- Ionicons - MIT License, http://ionicons.com/ | |||||
- Bootstrap: MIT License, (c) Twitter Inc, <https://getbootstrap.com> | |||||
- JQuery: MIT License, (c) JQuery Foundation, <http://jquery.org/license> | |||||
- FullCalendar - MIT License, (c) 2013 Adam Shaw, <http://fullcalendar.io/license/> | |||||
- JSignature - MIT License, (c) 2012 Willow Systems Corp <http://willow-systems.com>, (c) 2010 Brinley Ang <http://www.unbolt.net> | |||||
- PhotoSwipe - MIT License, (c) 2014-2015 Dmitry Semenov, <http://dimsemenov.com> | |||||
- Leaflet - (c) 2010-2016, Vladimir Agafonkin, (c) 2010-2011, CloudMade | |||||
- Leaflet.Locate - (c) 2016 Dominik Moritz | |||||
- Leaflet.draw - (c) 2012-2017, Jacob Toye, Jon West, Smartrak | |||||
- Leaflet.EasyButton - MIT License, (C) 2014 Daniel Montague | |||||
- Fluxify - GNU GENERAL PUBLIC LICENSE Version 2 (C) 1989 - 1991 Free Software Foundation, Inc., <http://fsf.org/> | |||||
### IP Address Database | |||||
### Icon Fonts | |||||
- GeoIP: (c) 2014 MaxMind, http://dev.maxmind.com/geoip/geoip2/downloadable/ | |||||
- Font Awesome - <http://fontawesome.io/> | |||||
- Font License: SIL OFL 1.1 (<http://scripts.sil.org/OFL>) | |||||
- Code License: MIT (<http://choosealicense.com/licenses/mit/>) | |||||
- Octicons (c) GitHub Inc, <https://octicons.github.com/> | |||||
- Font License: SIL OFL 1.1 (<http://scripts.sil.org/OFL>) | |||||
- Code License: MIT (<http://choosealicense.com/licenses/mit/>) | |||||
- Inter - SIL Open Font License, 1.1 (c) 2020 Rasmus Andersson (<https://github.com/rsms/inter>) | |||||
### Wallpaper | |||||
### IP Address Database | |||||
- Version 5 Wallpaper: http://magdeleine.co/photo-nick-west-n-139/ (Public Domain) | |||||
- GeoIP: (c) 2014 MaxMind, <http://dev.maxmind.com/geoip/geoip2/downloadable/> | |||||
--- | --- | ||||
Last updated: 1st Jan 2015 | |||||
Last updated: 4th July 2022 |
@@ -12,6 +12,7 @@ let app_paths = app_list | |||||
module.exports = { | module.exports = { | ||||
includePaths: [node_modules_path, ...app_paths], | includePaths: [node_modules_path, ...app_paths], | ||||
quietDeps: true, | |||||
importer: function(url) { | importer: function(url) { | ||||
if (url.startsWith("~")) { | if (url.startsWith("~")) { | ||||
// strip ~ so that it can resolve from node_modules | // strip ~ so that it can resolve from node_modules | ||||
@@ -17,7 +17,7 @@ import json | |||||
import os | import os | ||||
import re | import re | ||||
import warnings | import warnings | ||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union | |||||
from typing import TYPE_CHECKING, Any, Callable, Literal, Optional, overload | |||||
import click | import click | ||||
from werkzeug.local import Local, release_local | from werkzeug.local import Local, release_local | ||||
@@ -79,7 +79,7 @@ class _dict(dict): | |||||
return _dict(self) | return _dict(self) | ||||
def _(msg, lang=None, context=None) -> str: | |||||
def _(msg: str, lang: str | None = None, context: str | None = None) -> str: | |||||
"""Returns translated string in current lang, if exists. | """Returns translated string in current lang, if exists. | ||||
Usage: | Usage: | ||||
_('Change') | _('Change') | ||||
@@ -104,7 +104,7 @@ def _(msg, lang=None, context=None) -> str: | |||||
translated_string = "" | translated_string = "" | ||||
if context: | if context: | ||||
string_key = "{msg}:{context}".format(msg=msg, context=context) | |||||
string_key = f"{msg}:{context}" | |||||
translated_string = get_full_dict(lang).get(string_key) | translated_string = get_full_dict(lang).get(string_key) | ||||
if not translated_string: | if not translated_string: | ||||
@@ -114,7 +114,7 @@ def _(msg, lang=None, context=None) -> str: | |||||
return translated_string or non_translated_string | return translated_string or non_translated_string | ||||
def as_unicode(text, encoding="utf-8"): | |||||
def as_unicode(text: str, encoding: str = "utf-8") -> str: | |||||
"""Convert to unicode if required""" | """Convert to unicode if required""" | ||||
if isinstance(text, str): | if isinstance(text, str): | ||||
return text | return text | ||||
@@ -126,7 +126,7 @@ def as_unicode(text, encoding="utf-8"): | |||||
return str(text) | return str(text) | ||||
def get_lang_dict(fortype, name=None): | |||||
def get_lang_dict(fortype: str, name: str | None = None) -> dict[str, str]: | |||||
"""Returns the translated language dict for the given type and name. | """Returns the translated language dict for the given type and name. | ||||
:param fortype: must be one of `doctype`, `page`, `report`, `include`, `jsfile`, `boot` | :param fortype: must be one of `doctype`, `page`, `report`, `include`, `jsfile`, `boot` | ||||
@@ -136,11 +136,11 @@ def get_lang_dict(fortype, name=None): | |||||
return get_dict(fortype, name) | return get_dict(fortype, name) | ||||
def set_user_lang(user, user_language=None): | |||||
def set_user_lang(user: str, user_language: str | None = None) -> None: | |||||
"""Guess and set user language for the session. `frappe.local.lang`""" | """Guess and set user language for the session. `frappe.local.lang`""" | ||||
from frappe.translate import get_user_lang | from frappe.translate import get_user_lang | ||||
local.lang = get_user_lang(user) | |||||
local.lang = get_user_lang(user) or user_language | |||||
# local-globals | # local-globals | ||||
@@ -166,24 +166,22 @@ lang = local("lang") | |||||
if TYPE_CHECKING: | if TYPE_CHECKING: | ||||
from frappe.database.mariadb.database import MariaDBDatabase | from frappe.database.mariadb.database import MariaDBDatabase | ||||
from frappe.database.postgres.database import PostgresDatabase | from frappe.database.postgres.database import PostgresDatabase | ||||
from frappe.model.document import Document | |||||
from frappe.query_builder.builder import MariaDB, Postgres | from frappe.query_builder.builder import MariaDB, Postgres | ||||
from frappe.utils.redis_wrapper import RedisWrapper | from frappe.utils.redis_wrapper import RedisWrapper | ||||
db: Union[MariaDBDatabase, PostgresDatabase] | |||||
qb: Union[MariaDB, Postgres] | |||||
db: MariaDBDatabase | PostgresDatabase | |||||
qb: MariaDB | Postgres | |||||
# end: static analysis hack | # end: static analysis hack | ||||
def init(site, sites_path=None, new_site=False): | |||||
def init(site: str, sites_path: str = ".", new_site: bool = False) -> None: | |||||
"""Initialize frappe for the current site. Reset thread locals `frappe.local`""" | """Initialize frappe for the current site. Reset thread locals `frappe.local`""" | ||||
if getattr(local, "initialised", None): | if getattr(local, "initialised", None): | ||||
return | return | ||||
if not sites_path: | |||||
sites_path = "." | |||||
local.error_log = [] | local.error_log = [] | ||||
local.message_log = [] | local.message_log = [] | ||||
local.debug_log = [] | local.debug_log = [] | ||||
@@ -251,7 +249,9 @@ def init(site, sites_path=None, new_site=False): | |||||
local.initialised = True | local.initialised = True | ||||
def connect(site=None, db_name=None, set_admin_as_user=True): | |||||
def connect( | |||||
site: str | None = None, db_name: str | None = None, set_admin_as_user: bool = True | |||||
) -> None: | |||||
"""Connect to site database instance. | """Connect to site database instance. | ||||
:param site: If site is given, calls `frappe.init`. | :param site: If site is given, calls `frappe.init`. | ||||
@@ -288,7 +288,7 @@ def connect_replica(): | |||||
local.db = local.replica_db | local.db = local.replica_db | ||||
def get_site_config(sites_path=None, site_path=None): | |||||
def get_site_config(sites_path: str | None = None, site_path: str | None = None) -> dict[str, Any]: | |||||
"""Returns `site_config.json` combined with `sites/common_site_config.json`. | """Returns `site_config.json` combined with `sites/common_site_config.json`. | ||||
`site_config` is a set of site wide settings like database name, password, email etc.""" | `site_config` is a set of site wide settings like database name, password, email etc.""" | ||||
config = {} | config = {} | ||||
@@ -311,15 +311,15 @@ def get_site_config(sites_path=None, site_path=None): | |||||
try: | try: | ||||
config.update(get_file_json(site_config)) | config.update(get_file_json(site_config)) | ||||
except Exception as error: | except Exception as error: | ||||
click.secho("{0}/site_config.json is invalid".format(local.site), fg="red") | |||||
click.secho(f"{local.site}/site_config.json is invalid", fg="red") | |||||
print(error) | print(error) | ||||
elif local.site and not local.flags.new_site: | elif local.site and not local.flags.new_site: | ||||
raise IncorrectSitePath("{0} does not exist".format(local.site)) | |||||
raise IncorrectSitePath(f"{local.site} does not exist") | |||||
return _dict(config) | return _dict(config) | ||||
def get_conf(site=None): | |||||
def get_conf(site: str | None = None) -> dict[str, Any]: | |||||
if hasattr(local, "conf"): | if hasattr(local, "conf"): | ||||
return local.conf | return local.conf | ||||
@@ -363,14 +363,14 @@ def cache() -> "RedisWrapper": | |||||
return redis_server | return redis_server | ||||
def get_traceback(with_context=False): | |||||
def get_traceback(with_context: bool = False) -> str: | |||||
"""Returns error traceback.""" | """Returns error traceback.""" | ||||
from frappe.utils import get_traceback | from frappe.utils import get_traceback | ||||
return get_traceback(with_context=with_context) | return get_traceback(with_context=with_context) | ||||
def errprint(msg): | |||||
def errprint(msg: str) -> None: | |||||
"""Log error. This is sent back as `exc` in response. | """Log error. This is sent back as `exc` in response. | ||||
:param msg: Message.""" | :param msg: Message.""" | ||||
@@ -381,11 +381,11 @@ def errprint(msg): | |||||
error_log.append({"exc": msg}) | error_log.append({"exc": msg}) | ||||
def print_sql(enable=True): | |||||
def print_sql(enable: bool = True) -> None: | |||||
return cache().set_value("flag_print_sql", enable) | return cache().set_value("flag_print_sql", enable) | ||||
def log(msg): | |||||
def log(msg: str) -> None: | |||||
"""Add to `debug_log`. | """Add to `debug_log`. | ||||
:param msg: Message.""" | :param msg: Message.""" | ||||
@@ -397,17 +397,17 @@ def log(msg): | |||||
def msgprint( | def msgprint( | ||||
msg, | |||||
title=None, | |||||
raise_exception=0, | |||||
as_table=False, | |||||
as_list=False, | |||||
indicator=None, | |||||
alert=False, | |||||
primary_action=None, | |||||
is_minimizable=None, | |||||
wide=None, | |||||
): | |||||
msg: str, | |||||
title: str | None = None, | |||||
raise_exception: bool | type[Exception] = False, | |||||
as_table: bool = False, | |||||
as_list: bool = False, | |||||
indicator: Literal["blue", "green", "orange", "red", "yellow"] | None = None, | |||||
alert: bool = False, | |||||
primary_action: str = None, | |||||
is_minimizable: bool = False, | |||||
wide: bool = False, | |||||
) -> None: | |||||
"""Print a message to the user (via HTTP response). | """Print a message to the user (via HTTP response). | ||||
Messages are sent in the `__server_messages` property in the | Messages are sent in the `__server_messages` property in the | ||||
response JSON and shown in a pop-up / modal. | response JSON and shown in a pop-up / modal. | ||||
@@ -504,7 +504,14 @@ def clear_last_message(): | |||||
local.message_log = local.message_log[:-1] | local.message_log = local.message_log[:-1] | ||||
def throw(msg, exc=ValidationError, title=None, is_minimizable=None, wide=None, as_list=False): | |||||
def throw( | |||||
msg: str, | |||||
exc: type[Exception] = ValidationError, | |||||
title: str | None = None, | |||||
is_minimizable: bool = False, | |||||
wide: bool = False, | |||||
as_list: bool = False, | |||||
) -> None: | |||||
"""Throw execption and show message (`msgprint`). | """Throw execption and show message (`msgprint`). | ||||
:param msg: Message. | :param msg: Message. | ||||
@@ -520,12 +527,6 @@ def throw(msg, exc=ValidationError, title=None, is_minimizable=None, wide=None, | |||||
) | ) | ||||
def emit_js(js, user=False, **kwargs): | |||||
if user is False: | |||||
user = session.user | |||||
publish_realtime("eval_js", js, user=user, **kwargs) | |||||
def create_folder(path, with_init=False): | def create_folder(path, with_init=False): | ||||
"""Create a folder in the given path and add an `__init__.py` file (optional). | """Create a folder in the given path and add an `__init__.py` file (optional). | ||||
@@ -540,7 +541,7 @@ def create_folder(path, with_init=False): | |||||
touch_file(os.path.join(path, "__init__.py")) | touch_file(os.path.join(path, "__init__.py")) | ||||
def set_user(username): | |||||
def set_user(username: str): | |||||
"""Set current user. | """Set current user. | ||||
:param username: **User** name to set as current user.""" | :param username: **User** name to set as current user.""" | ||||
@@ -563,7 +564,7 @@ def get_user(): | |||||
return local.user_perms | return local.user_perms | ||||
def get_roles(username=None): | |||||
def get_roles(username=None) -> list[str]: | |||||
"""Returns roles of current user.""" | """Returns roles of current user.""" | ||||
if not local.session: | if not local.session: | ||||
return ["Guest"] | return ["Guest"] | ||||
@@ -813,7 +814,7 @@ def write_only(): | |||||
return innfn | return innfn | ||||
def only_for(roles, message=False): | |||||
def only_for(roles: list[str] | str, message=False): | |||||
"""Raise `frappe.PermissionError` if the user does not have any of the given **Roles**. | """Raise `frappe.PermissionError` if the user does not have any of the given **Roles**. | ||||
:param roles: List of roles to check.""" | :param roles: List of roles to check.""" | ||||
@@ -846,7 +847,7 @@ def get_domain_data(module): | |||||
raise | raise | ||||
def clear_cache(user=None, doctype=None): | |||||
def clear_cache(user: str | None = None, doctype: str | None = None): | |||||
"""Clear **User**, **DocType** or global cache. | """Clear **User**, **DocType** or global cache. | ||||
:param user: If user is given, only user cache is cleared. | :param user: If user is given, only user cache is cleared. | ||||
@@ -979,7 +980,7 @@ def has_website_permission(doc=None, ptype="read", user=None, verbose=False, doc | |||||
return False | return False | ||||
def is_table(doctype): | |||||
def is_table(doctype: str) -> bool: | |||||
"""Returns True if `istable` property (indicating child Table) is set for given DocType.""" | """Returns True if `istable` property (indicating child Table) is set for given DocType.""" | ||||
def get_tables(): | def get_tables(): | ||||
@@ -989,14 +990,16 @@ def is_table(doctype): | |||||
return doctype in tables | return doctype in tables | ||||
def get_precision(doctype, fieldname, currency=None, doc=None): | |||||
def get_precision( | |||||
doctype: str, fieldname: str, currency: str | None = None, doc: Optional["Document"] = None | |||||
) -> int: | |||||
"""Get precision for a given field""" | """Get precision for a given field""" | ||||
from frappe.model.meta import get_field_precision | from frappe.model.meta import get_field_precision | ||||
return get_field_precision(get_meta(doctype).get_field(fieldname), doc, currency) | return get_field_precision(get_meta(doctype).get_field(fieldname), doc, currency) | ||||
def generate_hash(txt: Optional[str] = None, length: Optional[int] = None) -> str: | |||||
def generate_hash(txt: str | None = None, length: int | None = None) -> str: | |||||
"""Generates random hash for given text + current timestamp + random string.""" | """Generates random hash for given text + current timestamp + random string.""" | ||||
import hashlib | import hashlib | ||||
import time | import time | ||||
@@ -1018,7 +1021,12 @@ def reset_metadata_version(): | |||||
return v | return v | ||||
def new_doc(doctype, parent_doc=None, parentfield=None, as_dict=False): | |||||
def new_doc( | |||||
doctype: str, | |||||
parent_doc: Optional["Document"] = None, | |||||
parentfield: str | None = None, | |||||
as_dict: bool = False, | |||||
) -> "Document": | |||||
"""Returns a new document of the given DocType with defaults set. | """Returns a new document of the given DocType with defaults set. | ||||
:param doctype: DocType of the new document. | :param doctype: DocType of the new document. | ||||
@@ -1036,6 +1044,16 @@ def set_value(doctype, docname, fieldname, value=None): | |||||
return frappe.client.set_value(doctype, docname, fieldname, value) | return frappe.client.set_value(doctype, docname, fieldname, value) | ||||
@overload | |||||
def get_cached_doc(doctype, docname, _allow_dict=True) -> dict: | |||||
... | |||||
@overload | |||||
def get_cached_doc(*args, **kwargs) -> "Document": | |||||
... | |||||
def get_cached_doc(*args, **kwargs): | def get_cached_doc(*args, **kwargs): | ||||
allow_dict = kwargs.pop("_allow_dict", False) | allow_dict = kwargs.pop("_allow_dict", False) | ||||
@@ -1076,7 +1094,7 @@ def get_cached_doc(*args, **kwargs): | |||||
return doc | return doc | ||||
def can_cache_doc(args): | |||||
def can_cache_doc(args) -> str | None: | |||||
""" | """ | ||||
Determine if document should be cached based on get_doc params. | Determine if document should be cached based on get_doc params. | ||||
Returns cache key if doc can be cached, None otherwise. | Returns cache key if doc can be cached, None otherwise. | ||||
@@ -1093,7 +1111,7 @@ def can_cache_doc(args): | |||||
return get_document_cache_key(doctype, name) | return get_document_cache_key(doctype, name) | ||||
def get_document_cache_key(doctype, name): | |||||
def get_document_cache_key(doctype: str, name: str): | |||||
return f"{doctype}::{name}" | return f"{doctype}::{name}" | ||||
@@ -1109,7 +1127,9 @@ def clear_document_cache(doctype, name): | |||||
delattr(local, "website_settings") | delattr(local, "website_settings") | ||||
def get_cached_value(doctype, name, fieldname="name", as_dict=False): | |||||
def get_cached_value( | |||||
doctype: str, name: str, fieldname: str = "name", as_dict: bool = False | |||||
) -> Any: | |||||
try: | try: | ||||
doc = get_cached_doc(doctype, name, _allow_dict=True) | doc = get_cached_doc(doctype, name, _allow_dict=True) | ||||
except DoesNotExistError: | except DoesNotExistError: | ||||
@@ -1127,7 +1147,7 @@ def get_cached_value(doctype, name, fieldname="name", as_dict=False): | |||||
return values | return values | ||||
def get_doc(*args, **kwargs): | |||||
def get_doc(*args, **kwargs) -> "Document": | |||||
"""Return a `frappe.model.document.Document` object of the given type and name. | """Return a `frappe.model.document.Document` object of the given type and name. | ||||
:param arg1: DocType name as string **or** document JSON. | :param arg1: DocType name as string **or** document JSON. | ||||
@@ -1186,16 +1206,16 @@ def get_meta_module(doctype): | |||||
def delete_doc( | def delete_doc( | ||||
doctype=None, | |||||
name=None, | |||||
force=0, | |||||
ignore_doctypes=None, | |||||
for_reload=False, | |||||
ignore_permissions=False, | |||||
flags=None, | |||||
ignore_on_trash=False, | |||||
ignore_missing=True, | |||||
delete_permanently=False, | |||||
doctype: str | None = None, | |||||
name: str | None = None, | |||||
force: bool = False, | |||||
ignore_doctypes: list[str] | None = None, | |||||
for_reload: bool = False, | |||||
ignore_permissions: bool = False, | |||||
flags: None = None, | |||||
ignore_on_trash: bool = False, | |||||
ignore_missing: bool = True, | |||||
delete_permanently: bool = False, | |||||
): | ): | ||||
"""Delete a document. Calls `frappe.model.delete_doc.delete_doc`. | """Delete a document. Calls `frappe.model.delete_doc.delete_doc`. | ||||
@@ -1238,7 +1258,13 @@ def reload_doctype(doctype, force=False, reset_permissions=False): | |||||
) | ) | ||||
def reload_doc(module, dt=None, dn=None, force=False, reset_permissions=False): | |||||
def reload_doc( | |||||
module: str, | |||||
dt: str | None = None, | |||||
dn: str | None = None, | |||||
force: bool = False, | |||||
reset_permissions: bool = False, | |||||
): | |||||
"""Reload Document from model (`[module]/[doctype]/[name]/[name].json`) files. | """Reload Document from model (`[module]/[doctype]/[name]/[name].json`) files. | ||||
:param module: Module name. | :param module: Module name. | ||||
@@ -1289,12 +1315,12 @@ def get_module(modulename): | |||||
return importlib.import_module(modulename) | return importlib.import_module(modulename) | ||||
def scrub(txt): | |||||
def scrub(txt: str) -> str: | |||||
"""Returns sluggified string. e.g. `Sales Order` becomes `sales_order`.""" | """Returns sluggified string. e.g. `Sales Order` becomes `sales_order`.""" | ||||
return cstr(txt).replace(" ", "_").replace("-", "_").lower() | return cstr(txt).replace(" ", "_").replace("-", "_").lower() | ||||
def unscrub(txt): | |||||
def unscrub(txt: str) -> str: | |||||
"""Returns titlified string. e.g. `sales_order` becomes `Sales Order`.""" | """Returns titlified string. e.g. `sales_order` becomes `Sales Order`.""" | ||||
return txt.replace("_", " ").replace("-", " ").title() | return txt.replace("_", " ").replace("-", " ").title() | ||||
@@ -1402,7 +1428,7 @@ def get_doc_hooks(): | |||||
@request_cache | @request_cache | ||||
def _load_app_hooks(app_name: Optional[str] = None): | |||||
def _load_app_hooks(app_name: str | None = None): | |||||
hooks = {} | hooks = {} | ||||
apps = [app_name] if app_name else get_installed_apps(sort=True) | apps = [app_name] if app_name else get_installed_apps(sort=True) | ||||
@@ -1425,7 +1451,7 @@ def _load_app_hooks(app_name: Optional[str] = None): | |||||
def get_hooks( | def get_hooks( | ||||
hook: str = None, default: Optional[Any] = "_KEEP_DEFAULT_LIST", app_name: str = None | |||||
hook: str = None, default: Any | None = "_KEEP_DEFAULT_LIST", app_name: str = None | |||||
) -> _dict: | ) -> _dict: | ||||
"""Get hooks via `app/hooks.py` | """Get hooks via `app/hooks.py` | ||||
@@ -1508,7 +1534,7 @@ def get_file_items(path, raise_not_found=False, ignore_empty_lines=True): | |||||
def get_file_json(path): | def get_file_json(path): | ||||
"""Read a file and return parsed JSON object.""" | """Read a file and return parsed JSON object.""" | ||||
with open(path, "r") as f: | |||||
with open(path) as f: | |||||
return json.load(f) | return json.load(f) | ||||
@@ -1518,15 +1544,15 @@ def read_file(path, raise_not_found=False): | |||||
path = path.encode("utf-8") | path = path.encode("utf-8") | ||||
if os.path.exists(path): | if os.path.exists(path): | ||||
with open(path, "r") as f: | |||||
with open(path) as f: | |||||
return as_unicode(f.read()) | return as_unicode(f.read()) | ||||
elif raise_not_found: | elif raise_not_found: | ||||
raise IOError("{} Not Found".format(path)) | |||||
raise OSError(f"{path} Not Found") | |||||
else: | else: | ||||
return None | return None | ||||
def get_attr(method_string): | |||||
def get_attr(method_string: str) -> Any: | |||||
"""Get python method object from its name.""" | """Get python method object from its name.""" | ||||
app_name = method_string.split(".")[0] | app_name = method_string.split(".")[0] | ||||
if ( | if ( | ||||
@@ -1541,7 +1567,7 @@ def get_attr(method_string): | |||||
return getattr(get_module(modulename), methodname) | return getattr(get_module(modulename), methodname) | ||||
def call(fn, *args, **kwargs): | |||||
def call(fn: str | Callable, *args, **kwargs): | |||||
"""Call a function and match arguments.""" | """Call a function and match arguments.""" | ||||
if isinstance(fn, str): | if isinstance(fn, str): | ||||
fn = get_attr(fn) | fn = get_attr(fn) | ||||
@@ -1551,7 +1577,7 @@ def call(fn, *args, **kwargs): | |||||
return fn(*args, **newargs) | return fn(*args, **newargs) | ||||
def get_newargs(fn: Callable, kwargs: Dict[str, Any]) -> Dict[str, Any]: | |||||
def get_newargs(fn: Callable, kwargs: dict[str, Any]) -> dict[str, Any]: | |||||
"""Remove any kwargs that are not supported by the function. | """Remove any kwargs that are not supported by the function. | ||||
Example: | Example: | ||||
@@ -1650,7 +1676,7 @@ def import_doc(path): | |||||
import_doc(path) | import_doc(path) | ||||
def copy_doc(doc, ignore_no_copy=True): | |||||
def copy_doc(doc: "Document", ignore_no_copy: bool = True) -> "Document": | |||||
"""No_copy fields also get copied.""" | """No_copy fields also get copied.""" | ||||
import copy | import copy | ||||
@@ -1788,8 +1814,8 @@ def redirect_to_message(title, html, http_status_code=None, context=None, indica | |||||
if indicator_color: | if indicator_color: | ||||
message["context"].update({"indicator_color": indicator_color}) | message["context"].update({"indicator_color": indicator_color}) | ||||
cache().set_value("message_id:{0}".format(message_id), message, expires_in_sec=60) | |||||
location = "/message?id={0}".format(message_id) | |||||
cache().set_value(f"message_id:{message_id}", message, expires_in_sec=60) | |||||
location = f"/message?id={message_id}" | |||||
if not getattr(local, "is_ajax", False): | if not getattr(local, "is_ajax", False): | ||||
local.response["type"] = "redirect" | local.response["type"] = "redirect" | ||||
@@ -1875,7 +1901,7 @@ def get_value(*args, **kwargs): | |||||
return db.get_value(*args, **kwargs) | return db.get_value(*args, **kwargs) | ||||
def as_json(obj: Union[Dict, List], indent=1, separators=None) -> str: | |||||
def as_json(obj: dict | list, indent=1, separators=None) -> str: | |||||
from frappe.utils.response import json_handler | from frappe.utils.response import json_handler | ||||
if separators is None: | if separators is None: | ||||
@@ -1906,7 +1932,7 @@ def get_test_records(doctype): | |||||
get_module_path(get_doctype_module(doctype)), "doctype", scrub(doctype), "test_records.json" | get_module_path(get_doctype_module(doctype)), "doctype", scrub(doctype), "test_records.json" | ||||
) | ) | ||||
if os.path.exists(path): | if os.path.exists(path): | ||||
with open(path, "r") as f: | |||||
with open(path) as f: | |||||
return json.loads(f.read()) | return json.loads(f.read()) | ||||
else: | else: | ||||
return [] | return [] | ||||
@@ -2186,7 +2212,7 @@ def get_desk_link(doctype, name): | |||||
def bold(text): | def bold(text): | ||||
return "<strong>{0}</strong>".format(text) | |||||
return f"<strong>{text}</strong>" | |||||
def safe_eval(code, eval_globals=None, eval_locals=None): | def safe_eval(code, eval_globals=None, eval_locals=None): | ||||
@@ -2214,10 +2240,10 @@ def safe_eval(code, eval_globals=None, eval_locals=None): | |||||
for attribute in UNSAFE_ATTRIBUTES: | for attribute in UNSAFE_ATTRIBUTES: | ||||
if attribute in code: | if attribute in code: | ||||
throw('Illegal rule {0}. Cannot use "{1}"'.format(bold(code), attribute)) | |||||
throw(f'Illegal rule {bold(code)}. Cannot use "{attribute}"') | |||||
if "__" in code: | if "__" in code: | ||||
throw('Illegal rule {0}. Cannot use "__"'.format(bold(code))) | |||||
throw(f'Illegal rule {bold(code)}. Cannot use "__"') | |||||
if not eval_globals: | if not eval_globals: | ||||
eval_globals = {} | eval_globals = {} | ||||
@@ -167,7 +167,7 @@ def validate_auth(): | |||||
""" | """ | ||||
Authenticate and sets user for the request. | Authenticate and sets user for the request. | ||||
""" | """ | ||||
authorization_header = frappe.get_request_header("Authorization", str()).split(" ") | |||||
authorization_header = frappe.get_request_header("Authorization", "").split(" ") | |||||
if len(authorization_header) == 2: | if len(authorization_header) == 2: | ||||
validate_oauth(authorization_header) | validate_oauth(authorization_header) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -34,7 +33,7 @@ SAFE_HTTP_METHODS = ("GET", "HEAD", "OPTIONS") | |||||
UNSAFE_HTTP_METHODS = ("POST", "PUT", "DELETE", "PATCH") | UNSAFE_HTTP_METHODS = ("POST", "PUT", "DELETE", "PATCH") | ||||
class RequestContext(object): | |||||
class RequestContext: | |||||
def __init__(self, environ): | def __init__(self, environ): | ||||
self.request = Request(environ) | self.request = Request(environ) | ||||
@@ -223,10 +222,6 @@ def handle_exception(e): | |||||
or (frappe.local.request.path.startswith("/api/") and not accept_header.startswith("text")) | or (frappe.local.request.path.startswith("/api/") and not accept_header.startswith("text")) | ||||
) | ) | ||||
if frappe.conf.get("developer_mode"): | |||||
# don't fail silently | |||||
print(frappe.get_traceback()) | |||||
if respond_as_json: | if respond_as_json: | ||||
# handle ajax responses first | # handle ajax responses first | ||||
# if the request is ajax, send back the trace or error message | # if the request is ajax, send back the trace or error message | ||||
@@ -290,6 +285,10 @@ def handle_exception(e): | |||||
if return_as_message: | if return_as_message: | ||||
response = get_response("message", http_status_code=http_status_code) | response = get_response("message", http_status_code=http_status_code) | ||||
if frappe.conf.get("developer_mode") and not respond_as_json: | |||||
# don't fail silently for non-json response errors | |||||
print(frappe.get_traceback()) | |||||
return response | return response | ||||
@@ -331,12 +330,10 @@ def serve( | |||||
if not os.environ.get("NO_STATICS"): | if not os.environ.get("NO_STATICS"): | ||||
application = SharedDataMiddleware( | application = SharedDataMiddleware( | ||||
application, {str("/assets"): str(os.path.join(sites_path, "assets"))} | |||||
application, {"/assets": str(os.path.join(sites_path, "assets"))} | |||||
) | ) | ||||
application = StaticDataMiddleware( | |||||
application, {str("/files"): str(os.path.abspath(sites_path))} | |||||
) | |||||
application = StaticDataMiddleware(application, {"/files": str(os.path.abspath(sites_path))}) | |||||
application.debug = True | application.debug = True | ||||
application.config = {"SERVER_NAME": "localhost:8000"} | application.config = {"SERVER_NAME": "localhost:8000"} | ||||
@@ -471,7 +471,7 @@ def get_login_attempt_tracker(user_name: str, raise_locked_exception: bool = Tru | |||||
return tracker | return tracker | ||||
class LoginAttemptTracker(object): | |||||
class LoginAttemptTracker: | |||||
"""Track login attemts of a user. | """Track login attemts of a user. | ||||
Lock the account for s number of seconds if there have been n consecutive unsuccessful attempts to log in. | Lock the account for s number of seconds if there have been n consecutive unsuccessful attempts to log in. | ||||
@@ -1,7 +1,7 @@ | |||||
# Copyright (c) 2022, Frappe Technologies and contributors | # Copyright (c) 2022, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
from typing import Dict, Iterable, List | |||||
from collections.abc import Iterable | |||||
import frappe | import frappe | ||||
from frappe import _ | from frappe import _ | ||||
@@ -157,7 +157,7 @@ class AssignmentRule(Document): | |||||
return assignment_days and today not in assignment_days | return assignment_days and today not in assignment_days | ||||
def get_assignments(doc) -> List[Dict]: | |||||
def get_assignments(doc) -> list[dict]: | |||||
return frappe.get_all( | return frappe.get_all( | ||||
"ToDo", | "ToDo", | ||||
fields=["name", "assignment_rule"], | fields=["name", "assignment_rule"], | ||||
@@ -228,7 +228,7 @@ def apply(doc=None, method=None, doctype=None, name=None): | |||||
) | ) | ||||
# multiple auto assigns | # multiple auto assigns | ||||
assignment_rule_docs: List[AssignmentRule] = [ | |||||
assignment_rule_docs: list[AssignmentRule] = [ | |||||
frappe.get_cached_doc("Assignment Rule", d.get("name")) for d in assignment_rules | frappe.get_cached_doc("Assignment Rule", d.get("name")) for d in assignment_rules | ||||
] | ] | ||||
@@ -356,11 +356,11 @@ def update_due_date(doc, state=None): | |||||
todo_doc.save(ignore_permissions=True) | todo_doc.save(ignore_permissions=True) | ||||
def get_assignment_rules() -> List[str]: | |||||
def get_assignment_rules() -> list[str]: | |||||
return frappe.get_all("Assignment Rule", filters={"disabled": 0}, pluck="document_type") | return frappe.get_all("Assignment Rule", filters={"disabled": 0}, pluck="document_type") | ||||
def get_repeated(values: Iterable) -> List: | |||||
def get_repeated(values: Iterable) -> list: | |||||
unique = set() | unique = set() | ||||
repeated = set() | repeated = set() | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors | # Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2018, Frappe Technologies and Contributors | # Copyright (c) 2018, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -200,7 +199,7 @@ class TestAutoRepeat(unittest.TestCase): | |||||
# next_schedule_date is set as on or after current date | # next_schedule_date is set as on or after current date | ||||
# it should not be a previous month's date | # it should not be a previous month's date | ||||
self.assertTrue((doc.next_schedule_date >= current_date)) | |||||
self.assertTrue(doc.next_schedule_date >= current_date) | |||||
todo = frappe.get_doc( | todo = frappe.get_doc( | ||||
dict( | dict( | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
# import frappe | # import frappe | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -77,6 +77,8 @@ def get_bootinfo(): | |||||
# add docs | # add docs | ||||
bootinfo.docs = doclist | bootinfo.docs = doclist | ||||
load_country_doc(bootinfo) | |||||
load_currency_docs(bootinfo) | |||||
for method in hooks.boot_session or []: | for method in hooks.boot_session or []: | ||||
frappe.get_attr(method)(bootinfo) | frappe.get_attr(method)(bootinfo) | ||||
@@ -417,3 +419,34 @@ def get_translatable_doctypes(): | |||||
"Property Setter", {"property": "translate_link_fields", "value": "1"}, pluck="doc_type" | "Property Setter", {"property": "translate_link_fields", "value": "1"}, pluck="doc_type" | ||||
) | ) | ||||
return dts + custom_dts | return dts + custom_dts | ||||
def load_country_doc(bootinfo): | |||||
country = frappe.db.get_default("country") | |||||
if not country: | |||||
return | |||||
try: | |||||
bootinfo.docs.append(frappe.get_cached_doc("Country", country)) | |||||
except Exception: | |||||
pass | |||||
def load_currency_docs(bootinfo): | |||||
currency = frappe.qb.DocType("Currency") | |||||
currency_docs = ( | |||||
frappe.qb.from_(currency) | |||||
.select( | |||||
currency.name, | |||||
currency.fraction, | |||||
currency.fraction_units, | |||||
currency.number_format, | |||||
currency.smallest_currency_fraction_value, | |||||
currency.symbol, | |||||
currency.symbol_on_right, | |||||
) | |||||
.where(currency.enabled == 1) | |||||
.run(as_dict=1, update={"doctype": ":Currency"}) | |||||
) | |||||
bootinfo.docs += currency_docs |
@@ -200,7 +200,7 @@ def symlink(target, link_name, overwrite=False): | |||||
try: | try: | ||||
# Pre-empt os.replace on a directory with a nicer message | # Pre-empt os.replace on a directory with a nicer message | ||||
if os.path.isdir(link_name): | if os.path.isdir(link_name): | ||||
raise IsADirectoryError("Cannot symlink over existing directory: '{}'".format(link_name)) | |||||
raise IsADirectoryError(f"Cannot symlink over existing directory: '{link_name}'") | |||||
try: | try: | ||||
os.replace(temp_link_name, link_name) | os.replace(temp_link_name, link_name) | ||||
except AttributeError: | except AttributeError: | ||||
@@ -239,10 +239,10 @@ def bundle( | |||||
make_asset_dirs(hard_link=hard_link) | make_asset_dirs(hard_link=hard_link) | ||||
mode = "production" if mode == "production" else "build" | mode = "production" if mode == "production" else "build" | ||||
command = "yarn run {mode}".format(mode=mode) | |||||
command = f"yarn run {mode}" | |||||
if apps: | if apps: | ||||
command += " --apps {apps}".format(apps=apps) | |||||
command += f" --apps {apps}" | |||||
if skip_frappe: | if skip_frappe: | ||||
command += " --skip_frappe" | command += " --skip_frappe" | ||||
@@ -263,7 +263,7 @@ def watch(apps=None): | |||||
command = "yarn run watch" | command = "yarn run watch" | ||||
if apps: | if apps: | ||||
command += " --apps {apps}".format(apps=apps) | |||||
command += f" --apps {apps}" | |||||
live_reload = frappe.utils.cint(os.environ.get("LIVE_RELOAD", frappe.conf.live_reload)) | live_reload = frappe.utils.cint(os.environ.get("LIVE_RELOAD", frappe.conf.live_reload)) | ||||
@@ -349,13 +349,13 @@ def get_js(items): | |||||
frappe.throw(_("Invalid file path: {0}").format("/".join(src))) | frappe.throw(_("Invalid file path: {0}").format("/".join(src))) | ||||
contentpath = os.path.join(frappe.local.sites_path, *src) | contentpath = os.path.join(frappe.local.sites_path, *src) | ||||
with open(contentpath, "r") as srcfile: | |||||
with open(contentpath) as srcfile: | |||||
code = frappe.utils.cstr(srcfile.read()) | code = frappe.utils.cstr(srcfile.read()) | ||||
if frappe.local.lang != "en": | if frappe.local.lang != "en": | ||||
messages = frappe.get_lang_dict("jsfile", contentpath) | messages = frappe.get_lang_dict("jsfile", contentpath) | ||||
messages = json.dumps(messages) | messages = json.dumps(messages) | ||||
code += "\n\n$.extend(frappe._messages, {})".format(messages) | |||||
code += f"\n\n$.extend(frappe._messages, {messages})" | |||||
out.append(code) | out.append(code) | ||||
@@ -99,7 +99,7 @@ def scheduler(context, state, site=None): | |||||
frappe.utils.scheduler.enable_scheduler() | frappe.utils.scheduler.enable_scheduler() | ||||
frappe.db.commit() | frappe.db.commit() | ||||
print("Scheduler {0}d for site {1}".format(state, site)) | |||||
print(f"Scheduler {state}d for site {site}") | |||||
finally: | finally: | ||||
frappe.destroy() | frappe.destroy() | ||||
@@ -167,7 +167,7 @@ def purge_jobs(site=None, queue=None, event=None): | |||||
frappe.init(site or "") | frappe.init(site or "") | ||||
count = purge_pending_jobs(event=event, site=site, queue=queue) | count = purge_pending_jobs(event=event, site=site, queue=queue) | ||||
print("Purged {} jobs".format(count)) | |||||
print(f"Purged {count} jobs") | |||||
@click.command("schedule") | @click.command("schedule") | ||||
@@ -203,11 +203,11 @@ def ready_for_migration(context, site=None): | |||||
pending_jobs = get_pending_jobs(site=site) | pending_jobs = get_pending_jobs(site=site) | ||||
if pending_jobs: | if pending_jobs: | ||||
print("NOT READY for migration: site {0} has pending background jobs".format(site)) | |||||
print(f"NOT READY for migration: site {site} has pending background jobs") | |||||
sys.exit(1) | sys.exit(1) | ||||
else: | else: | ||||
print("READY for migration: site {0} does not have any background jobs".format(site)) | |||||
print(f"READY for migration: site {site} does not have any background jobs") | |||||
return 0 | return 0 | ||||
finally: | finally: | ||||
@@ -257,7 +257,7 @@ def restore( | |||||
os.remove(private) | os.remove(private) | ||||
_backup.decryption_rollback() | _backup.decryption_rollback() | ||||
success_message = "Site {0} has been restored{1}".format( | |||||
success_message = "Site {} has been restored{}".format( | |||||
site, " with files" if (with_public_files or with_private_files) else "" | site, " with files" if (with_public_files or with_private_files) else "" | ||||
) | ) | ||||
click.secho(success_message, fg="green") | click.secho(success_message, fg="green") | ||||
@@ -414,12 +414,12 @@ def install_app(context, apps, force=False): | |||||
try: | try: | ||||
_install_app(app, verbose=context.verbose, force=force) | _install_app(app, verbose=context.verbose, force=force) | ||||
except frappe.IncompatibleApp as err: | except frappe.IncompatibleApp as err: | ||||
err_msg = ":\n{}".format(err) if str(err) else "" | |||||
print("App {} is Incompatible with Site {}{}".format(app, site, err_msg)) | |||||
err_msg = f":\n{err}" if str(err) else "" | |||||
print(f"App {app} is Incompatible with Site {site}{err_msg}") | |||||
exit_code = 1 | exit_code = 1 | ||||
except Exception as err: | except Exception as err: | ||||
err_msg = ": {}\n{}".format(str(err), frappe.get_traceback()) | |||||
print("An error occurred while installing {}{}".format(app, err_msg)) | |||||
err_msg = f": {str(err)}\n{frappe.get_traceback()}" | |||||
print(f"An error occurred while installing {app}{err_msg}") | |||||
exit_code = 1 | exit_code = 1 | ||||
frappe.destroy() | frappe.destroy() | ||||
@@ -449,8 +449,8 @@ def list_apps(context, format): | |||||
apps = frappe.get_single("Installed Applications").installed_applications | apps = frappe.get_single("Installed Applications").installed_applications | ||||
if apps: | if apps: | ||||
name_len, ver_len = [max([len(x.get(y)) for x in apps]) for y in ["app_name", "app_version"]] | |||||
template = "{{0:{0}}} {{1:{1}}} {{2}}".format(name_len, ver_len) | |||||
name_len, ver_len = (max(len(x.get(y)) for x in apps) for y in ["app_name", "app_version"]) | |||||
template = f"{{0:{name_len}}} {{1:{ver_len}}} {{2}}" | |||||
installed_applications = [ | installed_applications = [ | ||||
template.format(app.app_name, app.app_version, app.git_branch) for app in apps | template.format(app.app_name, app.app_version, app.git_branch) for app in apps | ||||
@@ -608,7 +608,7 @@ def reload_doctype(context, doctype): | |||||
def add_to_hosts(context): | def add_to_hosts(context): | ||||
"Add site to hosts" | "Add site to hosts" | ||||
for site in context.sites: | for site in context.sites: | ||||
frappe.commands.popen("echo 127.0.0.1\t{0} | sudo tee -a /etc/hosts".format(site)) | |||||
frappe.commands.popen(f"echo 127.0.0.1\t{site} | sudo tee -a /etc/hosts") | |||||
if not context.sites: | if not context.sites: | ||||
raise SiteNotSpecifiedError | raise SiteNotSpecifiedError | ||||
@@ -624,9 +624,9 @@ def use(site, sites_path="."): | |||||
if os.path.exists(os.path.join(sites_path, site)): | if os.path.exists(os.path.join(sites_path, site)): | ||||
with open(os.path.join(sites_path, "currentsite.txt"), "w") as sitefile: | with open(os.path.join(sites_path, "currentsite.txt"), "w") as sitefile: | ||||
sitefile.write(site) | sitefile.write(site) | ||||
print("Current Site set to {}".format(site)) | |||||
print(f"Current Site set to {site}") | |||||
else: | else: | ||||
print("Site {} does not exist".format(site)) | |||||
print(f"Site {site} does not exist") | |||||
@click.command("backup") | @click.command("backup") | ||||
@@ -700,7 +700,7 @@ def backup( | |||||
) | ) | ||||
except Exception: | except Exception: | ||||
click.secho( | click.secho( | ||||
"Backup failed for Site {0}. Database or site_config.json may be corrupted".format(site), | |||||
f"Backup failed for Site {site}. Database or site_config.json may be corrupted", | |||||
fg="red", | fg="red", | ||||
) | ) | ||||
if verbose: | if verbose: | ||||
@@ -714,7 +714,7 @@ def backup( | |||||
odb.print_summary() | odb.print_summary() | ||||
click.secho( | click.secho( | ||||
"Backup for Site {0} has been successfully completed{1}".format( | |||||
"Backup for Site {} has been successfully completed{}".format( | |||||
site, " with files" if with_files else "" | site, " with files" if with_files else "" | ||||
), | ), | ||||
fg="green", | fg="green", | ||||
@@ -831,8 +831,8 @@ def _drop_site( | |||||
else: | else: | ||||
messages = [ | messages = [ | ||||
"=" * 80, | "=" * 80, | ||||
"Error: The operation has stopped because backup of {0}'s database failed.".format(site), | |||||
"Reason: {0}\n".format(str(err)), | |||||
f"Error: The operation has stopped because backup of {site}'s database failed.", | |||||
f"Reason: {str(err)}\n", | |||||
"Fix the issue and try again.", | "Fix the issue and try again.", | ||||
"Hint: Use 'bench drop-site {0} --force' to force the removal of {0}".format(site), | "Hint: Use 'bench drop-site {0} --force' to force the removal of {0}".format(site), | ||||
] | ] | ||||
@@ -1082,7 +1082,7 @@ def build_search_index(context): | |||||
if not site: | if not site: | ||||
raise SiteNotSpecifiedError | raise SiteNotSpecifiedError | ||||
print("Building search index for {}".format(site)) | |||||
print(f"Building search index for {site}") | |||||
frappe.init(site=site) | frappe.init(site=site) | ||||
frappe.connect() | frappe.connect() | ||||
try: | try: | ||||
@@ -387,7 +387,7 @@ def import_doc(context, path, force=False): | |||||
if not os.path.exists(path): | if not os.path.exists(path): | ||||
path = os.path.join("..", path) | path = os.path.join("..", path) | ||||
if not os.path.exists(path): | if not os.path.exists(path): | ||||
print("Invalid path {0}".format(path)) | |||||
print(f"Invalid path {path}") | |||||
sys.exit(1) | sys.exit(1) | ||||
for site in context.sites: | for site in context.sites: | ||||
@@ -471,7 +471,7 @@ def bulk_rename(context, doctype, path): | |||||
site = get_site(context) | site = get_site(context) | ||||
with open(path, "r") as csvfile: | |||||
with open(path) as csvfile: | |||||
rows = read_csv_content(csvfile.read()) | rows = read_csv_content(csvfile.read()) | ||||
frappe.init(site=site) | frappe.init(site=site) | ||||
@@ -566,7 +566,7 @@ def jupyter(context): | |||||
try: | try: | ||||
os.stat(jupyter_notebooks_path) | os.stat(jupyter_notebooks_path) | ||||
except OSError: | except OSError: | ||||
print("Creating folder to keep jupyter notebooks at {}".format(jupyter_notebooks_path)) | |||||
print(f"Creating folder to keep jupyter notebooks at {jupyter_notebooks_path}") | |||||
os.mkdir(jupyter_notebooks_path) | os.mkdir(jupyter_notebooks_path) | ||||
bin_path = os.path.abspath("../env/bin") | bin_path = os.path.abspath("../env/bin") | ||||
print( | print( | ||||
@@ -585,9 +585,9 @@ frappe.db.connect() | |||||
) | ) | ||||
) | ) | ||||
os.execv( | os.execv( | ||||
"{0}/jupyter".format(bin_path), | |||||
f"{bin_path}/jupyter", | |||||
[ | [ | ||||
"{0}/jupyter".format(bin_path), | |||||
f"{bin_path}/jupyter", | |||||
"notebook", | "notebook", | ||||
jupyter_notebooks_path, | jupyter_notebooks_path, | ||||
], | ], | ||||
@@ -780,7 +780,7 @@ def run_tests( | |||||
if not (allow_tests or os.environ.get("CI")): | if not (allow_tests or os.environ.get("CI")): | ||||
click.secho("Testing is disabled for the site!", bold=True) | click.secho("Testing is disabled for the site!", bold=True) | ||||
click.secho("You can enable tests by entering following command:") | click.secho("You can enable tests by entering following command:") | ||||
click.secho("bench --site {0} set-config allow_tests true".format(site), fg="green") | |||||
click.secho(f"bench --site {site} set-config allow_tests true", fg="green") | |||||
return | return | ||||
frappe.init(site=site) | frappe.init(site=site) | ||||
@@ -963,7 +963,7 @@ def request(context, args=None, path=None): | |||||
if args.startswith("/api/method"): | if args.startswith("/api/method"): | ||||
frappe.local.form_dict.cmd = args.split("?")[0].split("/")[-1] | frappe.local.form_dict.cmd = args.split("?")[0].split("/")[-1] | ||||
elif path: | elif path: | ||||
with open(os.path.join("..", path), "r") as f: | |||||
with open(os.path.join("..", path)) as f: | |||||
args = json.loads(f.read()) | args = json.loads(f.read()) | ||||
frappe.local.form_dict = frappe._dict(args) | frappe.local.form_dict = frappe._dict(args) | ||||
@@ -3,7 +3,6 @@ | |||||
import functools | import functools | ||||
import re | import re | ||||
from typing import Dict, List | |||||
import frappe | import frappe | ||||
from frappe import _ | from frappe import _ | ||||
@@ -117,9 +116,7 @@ def get_permission_query_conditions(doctype): | |||||
# when everything is not permitted | # when everything is not permitted | ||||
for df in links.get("not_permitted_links"): | for df in links.get("not_permitted_links"): | ||||
# like ifnull(customer, '')='' and ifnull(supplier, '')='' | # like ifnull(customer, '')='' and ifnull(supplier, '')='' | ||||
conditions.append( | |||||
"ifnull(`tab{doctype}`.`{fieldname}`, '')=''".format(doctype=doctype, fieldname=df.fieldname) | |||||
) | |||||
conditions.append(f"ifnull(`tab{doctype}`.`{df.fieldname}`, '')=''") | |||||
return "( " + " and ".join(conditions) + " )" | return "( " + " and ".join(conditions) + " )" | ||||
@@ -128,9 +125,7 @@ def get_permission_query_conditions(doctype): | |||||
for df in links.get("permitted_links"): | for df in links.get("permitted_links"): | ||||
# like ifnull(customer, '')!='' or ifnull(supplier, '')!='' | # like ifnull(customer, '')!='' or ifnull(supplier, '')!='' | ||||
conditions.append( | |||||
"ifnull(`tab{doctype}`.`{fieldname}`, '')!=''".format(doctype=doctype, fieldname=df.fieldname) | |||||
) | |||||
conditions.append(f"ifnull(`tab{doctype}`.`{df.fieldname}`, '')!=''") | |||||
return "( " + " or ".join(conditions) + " )" | return "( " + " or ".join(conditions) + " )" | ||||
@@ -171,8 +166,8 @@ def delete_contact_and_address(doctype, docname): | |||||
@frappe.whitelist() | @frappe.whitelist() | ||||
@frappe.validate_and_sanitize_search_inputs | @frappe.validate_and_sanitize_search_inputs | ||||
def filter_dynamic_link_doctypes( | def filter_dynamic_link_doctypes( | ||||
doctype, txt: str, searchfield, start, page_len, filters: Dict | |||||
) -> List[List[str]]: | |||||
doctype, txt: str, searchfield, start, page_len, filters: dict | |||||
) -> list[list[str]]: | |||||
from frappe.permissions import get_doctypes_with_read | from frappe.permissions import get_doctypes_with_read | ||||
txt = txt or "" | txt = txt or "" | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -236,7 +235,7 @@ def address_query(doctype, txt, searchfield, start, page_len, filters): | |||||
meta = frappe.get_meta("Address") | meta = frappe.get_meta("Address") | ||||
for fieldname, value in filters.items(): | for fieldname, value in filters.items(): | ||||
if meta.get_field(fieldname) or fieldname in frappe.db.DEFAULT_COLUMNS: | if meta.get_field(fieldname) or fieldname in frappe.db.DEFAULT_COLUMNS: | ||||
condition += " and {field}={value}".format(field=fieldname, value=frappe.db.escape(value)) | |||||
condition += f" and {fieldname}={frappe.db.escape(value)}" | |||||
searchfields = meta.get_search_fields() | searchfields = meta.get_search_fields() | ||||
@@ -246,9 +245,9 @@ def address_query(doctype, txt, searchfield, start, page_len, filters): | |||||
search_condition = "" | search_condition = "" | ||||
for field in searchfields: | for field in searchfields: | ||||
if search_condition == "": | if search_condition == "": | ||||
search_condition += "`tabAddress`.`{field}` like %(txt)s".format(field=field) | |||||
search_condition += f"`tabAddress`.`{field}` like %(txt)s" | |||||
else: | else: | ||||
search_condition += " or `tabAddress`.`{field}` like %(txt)s".format(field=field) | |||||
search_condition += f" or `tabAddress`.`{field}` like %(txt)s" | |||||
return frappe.db.sql( | return frappe.db.sql( | ||||
"""select | """select | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -290,7 +290,7 @@ def get_contact_with_phone_number(number): | |||||
return | return | ||||
contacts = frappe.get_all( | contacts = frappe.get_all( | ||||
"Contact Phone", filters=[["phone", "like", "%{0}".format(number)]], fields=["parent"], limit=1 | |||||
"Contact Phone", filters=[["phone", "like", f"%{number}"]], fields=["parent"], limit=1 | |||||
) | ) | ||||
return contacts[0].parent if contacts else None | return contacts[0].parent if contacts else None | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and Contributors | # Copyright (c) 2017, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and contributors | # Copyright (c) 2017, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and Contributors | # Copyright (c) 2017, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and contributors | # Copyright (c) 2017, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and Contributors | # Copyright (c) 2017, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,5 +1,4 @@ | |||||
import json | import json | ||||
from typing import Dict, List | |||||
import frappe | import frappe | ||||
from frappe.core.doctype.file.file import File, setup_folder_path | from frappe.core.doctype.file.file import File, setup_folder_path | ||||
@@ -14,7 +13,7 @@ def unzip_file(name: str): | |||||
@frappe.whitelist() | @frappe.whitelist() | ||||
def get_attached_images(doctype: str, names: List[str]) -> frappe._dict: | |||||
def get_attached_images(doctype: str, names: list[str]) -> frappe._dict: | |||||
"""get list of image urls attached in form | """get list of image urls attached in form | ||||
returns {name: ['image.jpg', 'image.png']}""" | returns {name: ['image.jpg', 'image.png']}""" | ||||
@@ -40,7 +39,7 @@ def get_attached_images(doctype: str, names: List[str]) -> frappe._dict: | |||||
@frappe.whitelist() | @frappe.whitelist() | ||||
def get_files_in_folder(folder: str, start: int = 0, page_length: int = 20) -> Dict: | |||||
def get_files_in_folder(folder: str, start: int = 0, page_length: int = 20) -> dict: | |||||
start = cint(start) | start = cint(start) | ||||
page_length = cint(page_length) | page_length = cint(page_length) | ||||
@@ -66,7 +65,7 @@ def get_files_in_folder(folder: str, start: int = 0, page_length: int = 20) -> D | |||||
@frappe.whitelist() | @frappe.whitelist() | ||||
def get_files_by_search_text(text: str) -> List[Dict]: | |||||
def get_files_by_search_text(text: str) -> list[dict]: | |||||
if not text: | if not text: | ||||
return [] | return [] | ||||
@@ -102,7 +101,7 @@ def create_new_folder(file_name: str, folder: str) -> File: | |||||
@frappe.whitelist() | @frappe.whitelist() | ||||
def move_file(file_list: List[File], new_parent: str, old_parent: str) -> None: | |||||
def move_file(file_list: list[File], new_parent: str, old_parent: str) -> None: | |||||
if isinstance(file_list, str): | if isinstance(file_list, str): | ||||
file_list = json.loads(file_list) | file_list = json.loads(file_list) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -28,7 +27,7 @@ class TestAccessLog(unittest.TestCase): | |||||
"User", frappe.session.user, fieldname="api_secret" | "User", frappe.session.user, fieldname="api_secret" | ||||
) | ) | ||||
api_key = frappe.db.get_value("User", "Administrator", "api_key") | api_key = frappe.db.get_value("User", "Administrator", "api_key") | ||||
self.header = {"Authorization": "token {}:{}".format(api_key, generated_secret)} | |||||
self.header = {"Authorization": f"token {api_key}:{generated_secret}"} | |||||
self.test_html_template = """ | self.test_html_template = """ | ||||
<!DOCTYPE html> | <!DOCTYPE html> | ||||
@@ -74,9 +74,7 @@ def get_feed_match_conditions(user=None, doctype="Comment"): | |||||
user_permissions = frappe.permissions.get_user_permissions(user) | user_permissions = frappe.permissions.get_user_permissions(user) | ||||
can_read = frappe.get_user().get_can_read() | can_read = frappe.get_user().get_can_read() | ||||
can_read_doctypes = [ | |||||
"'{}'".format(dt) for dt in list(set(can_read) - set(list(user_permissions))) | |||||
] | |||||
can_read_doctypes = [f"'{dt}'" for dt in list(set(can_read) - set(list(user_permissions)))] | |||||
if can_read_doctypes: | if can_read_doctypes: | ||||
conditions += [ | conditions += [ | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import time | import time | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import json | import json | ||||
@@ -51,7 +50,7 @@ class Comment(Document): | |||||
return | return | ||||
frappe.publish_realtime( | frappe.publish_realtime( | ||||
"update_docinfo_for_{}_{}".format(self.reference_doctype, self.reference_name), | |||||
f"update_docinfo_for_{self.reference_doctype}_{self.reference_name}", | |||||
{"doc": self.as_dict(), "key": key, "action": action}, | {"doc": self.as_dict(), "key": key, "action": action}, | ||||
after_commit=True, | after_commit=True, | ||||
) | ) | ||||
@@ -183,7 +182,7 @@ def update_comments_in_parent(reference_doctype, reference_name, _comments): | |||||
try: | try: | ||||
# use sql, so that we do not mess with the timestamp | # use sql, so that we do not mess with the timestamp | ||||
frappe.db.sql( | frappe.db.sql( | ||||
"""update `tab{0}` set `_comments`=%s where name=%s""".format(reference_doctype), # nosec | |||||
f"""update `tab{reference_doctype}` set `_comments`=%s where name=%s""", # nosec | |||||
(json.dumps(_comments[-100:]), reference_name), | (json.dumps(_comments[-100:]), reference_name), | ||||
) | ) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import json | import json | ||||
@@ -3,7 +3,6 @@ | |||||
from collections import Counter | from collections import Counter | ||||
from email.utils import getaddresses | from email.utils import getaddresses | ||||
from typing import List | |||||
from urllib.parse import unquote | from urllib.parse import unquote | ||||
from parse import compile | from parse import compile | ||||
@@ -204,7 +203,7 @@ class Communication(Document, CommunicationEmailMixin): | |||||
""" | """ | ||||
emails = split_emails(emails) if isinstance(emails, str) else (emails or []) | emails = split_emails(emails) if isinstance(emails, str) else (emails or []) | ||||
if exclude_displayname: | if exclude_displayname: | ||||
return [email.lower() for email in set([parse_addr(email)[1] for email in emails]) if email] | |||||
return [email.lower() for email in {parse_addr(email)[1] for email in emails} if email] | |||||
return [email.lower() for email in set(emails) if email] | return [email.lower() for email in set(emails) if email] | ||||
def to_list(self, exclude_displayname=True): | def to_list(self, exclude_displayname=True): | ||||
@@ -229,7 +228,7 @@ class Communication(Document, CommunicationEmailMixin): | |||||
def notify_change(self, action): | def notify_change(self, action): | ||||
frappe.publish_realtime( | frappe.publish_realtime( | ||||
"update_docinfo_for_{}_{}".format(self.reference_doctype, self.reference_name), | |||||
f"update_docinfo_for_{self.reference_doctype}_{self.reference_name}", | |||||
{"doc": self.as_dict(), "key": "communications", "action": action}, | {"doc": self.as_dict(), "key": "communications", "action": action}, | ||||
after_commit=True, | after_commit=True, | ||||
) | ) | ||||
@@ -425,7 +424,7 @@ def get_permission_query_conditions_for_communication(user): | |||||
) | ) | ||||
def get_contacts(email_strings: List[str], auto_create_contact=False) -> List[str]: | |||||
def get_contacts(email_strings: list[str], auto_create_contact=False) -> list[str]: | |||||
email_addrs = get_emails(email_strings) | email_addrs = get_emails(email_strings) | ||||
contacts = [] | contacts = [] | ||||
for email in email_addrs: | for email in email_addrs: | ||||
@@ -437,9 +436,7 @@ def get_contacts(email_strings: List[str], auto_create_contact=False) -> List[st | |||||
first_name = frappe.unscrub(email_parts[0]) | first_name = frappe.unscrub(email_parts[0]) | ||||
try: | try: | ||||
contact_name = ( | |||||
"{0}-{1}".format(first_name, email_parts[1]) if first_name == "Contact" else first_name | |||||
) | |||||
contact_name = f"{first_name}-{email_parts[1]}" if first_name == "Contact" else first_name | |||||
contact = frappe.get_doc( | contact = frappe.get_doc( | ||||
{"doctype": "Contact", "first_name": contact_name, "name": contact_name} | {"doctype": "Contact", "first_name": contact_name, "name": contact_name} | ||||
) | ) | ||||
@@ -455,7 +452,7 @@ def get_contacts(email_strings: List[str], auto_create_contact=False) -> List[st | |||||
return contacts | return contacts | ||||
def get_emails(email_strings: List[str]) -> List[str]: | |||||
def get_emails(email_strings: list[str]) -> list[str]: | |||||
email_addrs = [] | email_addrs = [] | ||||
for email_string in email_strings: | for email_string in email_strings: | ||||
@@ -522,7 +519,7 @@ def get_email_without_link(email): | |||||
except IndexError: | except IndexError: | ||||
return email | return email | ||||
return "{0}@{1}".format(email_id, email_host) | |||||
return f"{email_id}@{email_host}" | |||||
def update_parent_document_on_communication(doc): | def update_parent_document_on_communication(doc): | ||||
@@ -2,7 +2,7 @@ | |||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import json | import json | ||||
from typing import TYPE_CHECKING, Dict | |||||
from typing import TYPE_CHECKING | |||||
import frappe | import frappe | ||||
import frappe.email.smtp | import frappe.email.smtp | ||||
@@ -45,7 +45,7 @@ def make( | |||||
email_template=None, | email_template=None, | ||||
communication_type=None, | communication_type=None, | ||||
**kwargs, | **kwargs, | ||||
) -> Dict[str, str]: | |||||
) -> dict[str, str]: | |||||
"""Make a new communication. Checks for email permissions for specified Document. | """Make a new communication. Checks for email permissions for specified Document. | ||||
:param doctype: Reference DocType. | :param doctype: Reference DocType. | ||||
@@ -122,7 +122,7 @@ def _make( | |||||
email_template=None, | email_template=None, | ||||
communication_type=None, | communication_type=None, | ||||
add_signature=True, | add_signature=True, | ||||
) -> Dict[str, str]: | |||||
) -> dict[str, str]: | |||||
"""Internal method to make a new communication that ignores Permission checks.""" | """Internal method to make a new communication that ignores Permission checks.""" | ||||
sender = sender or get_formatted_email(frappe.session.user) | sender = sender or get_formatted_email(frappe.session.user) | ||||
@@ -1,5 +1,3 @@ | |||||
from typing import List | |||||
import frappe | import frappe | ||||
from frappe import _ | from frappe import _ | ||||
from frappe.core.utils import get_parent_doc | from frappe.core.utils import get_parent_doc | ||||
@@ -201,7 +199,7 @@ class CommunicationEmailMixin: | |||||
return _("Leave this conversation") | return _("Leave this conversation") | ||||
return "" | return "" | ||||
def exclude_emails_list(self, is_inbound_mail_communcation=False, include_sender=False) -> List: | |||||
def exclude_emails_list(self, is_inbound_mail_communcation=False, include_sender=False) -> list: | |||||
"""List of mail id's excluded while sending mail.""" | """List of mail id's excluded while sending mail.""" | ||||
all_ids = self.get_all_email_addresses(exclude_displayname=True) | all_ids = self.get_all_email_addresses(exclude_displayname=True) | ||||
@@ -236,7 +236,7 @@ class TestCommunication(unittest.TestCase): | |||||
"communication_medium": "Email", | "communication_medium": "Email", | ||||
"subject": "Document Link in Email", | "subject": "Document Link in Email", | ||||
"sender": "comm_sender@example.com", | "sender": "comm_sender@example.com", | ||||
"recipients": "comm_recipient+{0}+{1}@example.com".format(quote("Note"), quote(note.name)), | |||||
"recipients": "comm_recipient+{}+{}@example.com".format(quote("Note"), quote(note.name)), | |||||
} | } | ||||
).insert(ignore_permissions=True) | ).insert(ignore_permissions=True) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -331,7 +331,7 @@ class DataExporter: | |||||
order_by = None | order_by = None | ||||
table_columns = frappe.db.get_table_columns(self.parent_doctype) | table_columns = frappe.db.get_table_columns(self.parent_doctype) | ||||
if "lft" in table_columns and "rgt" in table_columns: | if "lft" in table_columns and "rgt" in table_columns: | ||||
order_by = "`tab{doctype}`.`lft` asc".format(doctype=self.parent_doctype) | |||||
order_by = f"`tab{self.parent_doctype}`.`lft` asc" | |||||
# get permitted data only | # get permitted data only | ||||
self.data = frappe.get_list( | self.data = frappe.get_list( | ||||
self.doctype, fields=["*"], filters=self.filters, limit_page_length=None, order_by=order_by | self.doctype, fields=["*"], filters=self.filters, limit_page_length=None, order_by=order_by | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors | # Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -132,8 +131,7 @@ class Exporter: | |||||
child_doctype = table_df.options | child_doctype = table_df.options | ||||
rows = self.add_data_row(child_doctype, child_row.parentfield, child_row, rows, i) | rows = self.add_data_row(child_doctype, child_row.parentfield, child_row, rows, i) | ||||
for row in rows: | |||||
yield row | |||||
yield from rows | |||||
def add_data_row(self, doctype, parentfield, doc, rows, row_idx): | def add_data_row(self, doctype, parentfield, doc, rows, row_idx): | ||||
if len(rows) < row_idx + 1: | if len(rows) < row_idx + 1: | ||||
@@ -156,14 +154,14 @@ class Exporter: | |||||
def get_data_as_docs(self): | def get_data_as_docs(self): | ||||
def format_column_name(df): | def format_column_name(df): | ||||
return "`tab{0}`.`{1}`".format(df.parent, df.fieldname) | |||||
return f"`tab{df.parent}`.`{df.fieldname}`" | |||||
filters = self.export_filters | filters = self.export_filters | ||||
if self.meta.is_nested_set(): | if self.meta.is_nested_set(): | ||||
order_by = "`tab{0}`.`lft` ASC".format(self.doctype) | |||||
order_by = f"`tab{self.doctype}`.`lft` ASC" | |||||
else: | else: | ||||
order_by = "`tab{0}`.`creation` DESC".format(self.doctype) | |||||
order_by = f"`tab{self.doctype}`.`creation` DESC" | |||||
parent_fields = [format_column_name(df) for df in self.fields if df.parent == self.doctype] | parent_fields = [format_column_name(df) for df in self.fields if df.parent == self.doctype] | ||||
parent_data = frappe.db.get_list( | parent_data = frappe.db.get_list( | ||||
@@ -183,7 +181,7 @@ class Exporter: | |||||
child_table_df = self.meta.get_field(key) | child_table_df = self.meta.get_field(key) | ||||
child_table_doctype = child_table_df.options | child_table_doctype = child_table_df.options | ||||
child_fields = ["name", "idx", "parent", "parentfield"] + list( | child_fields = ["name", "idx", "parent", "parentfield"] + list( | ||||
set([format_column_name(df) for df in self.fields if df.parent == child_table_doctype]) | |||||
{format_column_name(df) for df in self.fields if df.parent == child_table_doctype} | |||||
) | ) | ||||
data = frappe.db.get_all( | data = frappe.db.get_all( | ||||
child_table_doctype, | child_table_doctype, | ||||
@@ -211,16 +209,16 @@ class Exporter: | |||||
if is_parent: | if is_parent: | ||||
label = _(df.label) | label = _(df.label) | ||||
else: | else: | ||||
label = "{0} ({1})".format(_(df.label), _(df.child_table_df.label)) | |||||
label = f"{_(df.label)} ({_(df.child_table_df.label)})" | |||||
if label in header: | if label in header: | ||||
# this label is already in the header, | # this label is already in the header, | ||||
# which means two fields with the same label | # which means two fields with the same label | ||||
# add the fieldname to avoid clash | # add the fieldname to avoid clash | ||||
if is_parent: | if is_parent: | ||||
label = "{0}".format(df.fieldname) | |||||
label = f"{df.fieldname}" | |||||
else: | else: | ||||
label = "{0}.{1}".format(df.child_table_df.fieldname, df.fieldname) | |||||
label = f"{df.child_table_df.fieldname}.{df.fieldname}" | |||||
header.append(label) | header.append(label) | ||||
@@ -253,5 +251,5 @@ class Exporter: | |||||
def build_xlsx_response(self): | def build_xlsx_response(self): | ||||
build_xlsx_response(self.get_csv_array_for_export(), _(self.doctype)) | build_xlsx_response(self.get_csv_array_for_export(), _(self.doctype)) | ||||
def group_children_data_by_parent(self, children_data: typing.Dict[str, list]): | |||||
def group_children_data_by_parent(self, children_data: dict[str, list]): | |||||
return groupby_metric(children_data, key="parent") | return groupby_metric(children_data, key="parent") |
@@ -150,7 +150,7 @@ class Importer: | |||||
if self.console: | if self.console: | ||||
update_progress_bar( | update_progress_bar( | ||||
"Importing {0} records".format(total_payload_count), | |||||
f"Importing {total_payload_count} records", | |||||
current_index, | current_index, | ||||
total_payload_count, | total_payload_count, | ||||
) | ) | ||||
@@ -342,7 +342,7 @@ class Importer: | |||||
row_number = json.loads(log.get("row_indexes"))[0] | row_number = json.loads(log.get("row_indexes"))[0] | ||||
status = "Success" if log.get("success") else "Failure" | status = "Success" if log.get("success") else "Failure" | ||||
message = ( | message = ( | ||||
"Successfully Imported {0}".format(log.get("docname")) | |||||
"Successfully Imported {}".format(log.get("docname")) | |||||
if log.get("success") | if log.get("success") | ||||
else log.get("messages") | else log.get("messages") | ||||
) | ) | ||||
@@ -357,19 +357,17 @@ class Importer: | |||||
if successful_records: | if successful_records: | ||||
print() | print() | ||||
print( | |||||
"Successfully imported {0} records out of {1}".format(len(successful_records), len(import_log)) | |||||
) | |||||
print(f"Successfully imported {len(successful_records)} records out of {len(import_log)}") | |||||
if failed_records: | if failed_records: | ||||
print("Failed to import {0} records".format(len(failed_records))) | |||||
file_name = "{0}_import_on_{1}.txt".format(self.doctype, frappe.utils.now()) | |||||
print("Check {0} for errors".format(os.path.join("sites", file_name))) | |||||
print(f"Failed to import {len(failed_records)} records") | |||||
file_name = f"{self.doctype}_import_on_{frappe.utils.now()}.txt" | |||||
print("Check {} for errors".format(os.path.join("sites", file_name))) | |||||
text = "" | text = "" | ||||
for w in failed_records: | for w in failed_records: | ||||
text += "Row Indexes: {0}\n".format(str(w.get("row_indexes", []))) | |||||
text += "Messages:\n{0}\n".format("\n".join(w.get("messages", []))) | |||||
text += "Traceback:\n{0}\n\n".format(w.get("exception")) | |||||
text += "Row Indexes: {}\n".format(str(w.get("row_indexes", []))) | |||||
text += "Messages:\n{}\n".format("\n".join(w.get("messages", []))) | |||||
text += "Traceback:\n{}\n\n".format(w.get("exception")) | |||||
with open(file_name, "w") as f: | with open(file_name, "w") as f: | ||||
f.write(text) | f.write(text) | ||||
@@ -384,7 +382,7 @@ class Importer: | |||||
other_warnings.append(w) | other_warnings.append(w) | ||||
for row_number, warnings in warnings_by_row.items(): | for row_number, warnings in warnings_by_row.items(): | ||||
print("Row {0}".format(row_number)) | |||||
print(f"Row {row_number}") | |||||
for w in warnings: | for w in warnings: | ||||
print(w.get("message")) | print(w.get("message")) | ||||
@@ -578,7 +576,7 @@ class ImportFile: | |||||
extn = os.path.splitext(file_path)[1][1:] | extn = os.path.splitext(file_path)[1][1:] | ||||
file_content = None | file_content = None | ||||
with io.open(file_path, mode="rb") as f: | |||||
with open(file_path, mode="rb") as f: | |||||
file_content = f.read() | file_content = f.read() | ||||
return file_content, extn | return file_content, extn | ||||
@@ -991,9 +989,7 @@ class Column: | |||||
self.warnings.append( | self.warnings.append( | ||||
{ | { | ||||
"col": self.column_number, | "col": self.column_number, | ||||
"message": ( | |||||
"The following values do not exist for {}: {}".format(self.df.options, missing_values) | |||||
), | |||||
"message": (f"The following values do not exist for {self.df.options}: {missing_values}"), | |||||
"type": "warning", | "type": "warning", | ||||
} | } | ||||
) | ) | ||||
@@ -1023,8 +1019,8 @@ class Column: | |||||
{ | { | ||||
"col": self.column_number, | "col": self.column_number, | ||||
"message": ( | "message": ( | ||||
"The following values are invalid: {0}. Values must be" | |||||
" one of {1}".format(invalid_values, valid_values) | |||||
"The following values are invalid: {}. Values must be" | |||||
" one of {}".format(invalid_values, valid_values) | |||||
), | ), | ||||
} | } | ||||
) | ) | ||||
@@ -1110,9 +1106,9 @@ def build_fields_dict_for_column_matching(parent_doctype): | |||||
) | ) | ||||
else: | else: | ||||
name_headers = ( | name_headers = ( | ||||
"{0}.name".format(table_df.fieldname), # fieldname | |||||
"ID ({0})".format(table_df.label), # label | |||||
"{0} ({1})".format(_("ID"), translated_table_label), # translated label | |||||
f"{table_df.fieldname}.name", # fieldname | |||||
f"ID ({table_df.label})", # label | |||||
"{} ({})".format(_("ID"), translated_table_label), # translated label | |||||
) | ) | ||||
name_df.is_child_table_field = True | name_df.is_child_table_field = True | ||||
@@ -1164,11 +1160,11 @@ def build_fields_dict_for_column_matching(parent_doctype): | |||||
for header in ( | for header in ( | ||||
# fieldname | # fieldname | ||||
"{0}.{1}".format(table_df.fieldname, df.fieldname), | |||||
f"{table_df.fieldname}.{df.fieldname}", | |||||
# label | # label | ||||
"{0} ({1})".format(label, table_df.label), | |||||
f"{label} ({table_df.label})", | |||||
# translated label | # translated label | ||||
"{0} ({1})".format(translated_label, translated_table_label), | |||||
f"{translated_label} ({translated_table_label})", | |||||
): | ): | ||||
out[header] = new_df | out[header] = new_df | ||||
@@ -1177,8 +1173,8 @@ def build_fields_dict_for_column_matching(parent_doctype): | |||||
autoname_field = get_autoname_field(parent_doctype) | autoname_field = get_autoname_field(parent_doctype) | ||||
if autoname_field: | if autoname_field: | ||||
for header in ( | for header in ( | ||||
"ID ({})".format(autoname_field.label), # label | |||||
"{0} ({1})".format(_("ID"), _(autoname_field.label)), # translated label | |||||
f"ID ({autoname_field.label})", # label | |||||
"{} ({})".format(_("ID"), _(autoname_field.label)), # translated label | |||||
# ID field should also map to the autoname field | # ID field should also map to the autoname field | ||||
"ID", | "ID", | ||||
_("ID"), | _("ID"), | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
# import frappe | # import frappe | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and Contributors | # Copyright (c) 2019, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -248,7 +248,7 @@ class DocType(Document): | |||||
self.flags.update_fields_to_fetch_queries = [] | self.flags.update_fields_to_fetch_queries = [] | ||||
if set(old_fields_to_fetch) != set(df.fieldname for df in new_meta.get_fields_to_fetch()): | |||||
if set(old_fields_to_fetch) != {df.fieldname for df in new_meta.get_fields_to_fetch()}: | |||||
for df in new_meta.get_fields_to_fetch(): | for df in new_meta.get_fields_to_fetch(): | ||||
if df.fieldname not in old_fields_to_fetch: | if df.fieldname not in old_fields_to_fetch: | ||||
link_fieldname, source_fieldname = df.fetch_from.split(".", 1) | link_fieldname, source_fieldname = df.fetch_from.split(".", 1) | ||||
@@ -385,7 +385,7 @@ class DocType(Document): | |||||
try: | try: | ||||
frappe.db.updatedb(self.name, Meta(self)) | frappe.db.updatedb(self.name, Meta(self)) | ||||
except Exception as e: | except Exception as e: | ||||
print("\n\nThere was an issue while migrating the DocType: {}\n".format(self.name)) | |||||
print(f"\n\nThere was an issue while migrating the DocType: {self.name}\n") | |||||
raise e | raise e | ||||
self.change_modified_of_parent() | self.change_modified_of_parent() | ||||
@@ -552,7 +552,7 @@ class DocType(Document): | |||||
for fname in ("{}.js", "{}.py", "{}_list.js", "{}_calendar.js", "test_{}.py", "test_{}.js"): | for fname in ("{}.js", "{}.py", "{}_list.js", "{}_calendar.js", "test_{}.py", "test_{}.js"): | ||||
fname = os.path.join(new_path, fname.format(frappe.scrub(new))) | fname = os.path.join(new_path, fname.format(frappe.scrub(new))) | ||||
if os.path.exists(fname): | if os.path.exists(fname): | ||||
with open(fname, "r") as f: | |||||
with open(fname) as f: | |||||
code = f.read() | code = f.read() | ||||
with open(fname, "w") as f: | with open(fname, "w") as f: | ||||
if fname.endswith(".js"): | if fname.endswith(".js"): | ||||
@@ -569,7 +569,7 @@ class DocType(Document): | |||||
f.write(file_content) | f.write(file_content) | ||||
# updating json file with new name | # updating json file with new name | ||||
doctype_json_path = os.path.join(new_path, "{}.json".format(frappe.scrub(new))) | |||||
doctype_json_path = os.path.join(new_path, f"{frappe.scrub(new)}.json") | |||||
current_data = frappe.get_file_json(doctype_json_path) | current_data = frappe.get_file_json(doctype_json_path) | ||||
current_data["name"] = new | current_data["name"] = new | ||||
@@ -643,7 +643,7 @@ class DocType(Document): | |||||
path = get_file_path(self.module, "DocType", self.name) | path = get_file_path(self.module, "DocType", self.name) | ||||
if os.path.exists(path): | if os.path.exists(path): | ||||
try: | try: | ||||
with open(path, "r") as txtfile: | |||||
with open(path) as txtfile: | |||||
olddoc = json.loads(txtfile.read()) | olddoc = json.loads(txtfile.read()) | ||||
old_field_names = [f["fieldname"] for f in olddoc.get("fields", [])] | old_field_names = [f["fieldname"] for f in olddoc.get("fields", [])] | ||||
@@ -652,14 +652,14 @@ class DocType(Document): | |||||
remaining_field_names = [f.fieldname for f in self.fields] | remaining_field_names = [f.fieldname for f in self.fields] | ||||
for fieldname in old_field_names: | for fieldname in old_field_names: | ||||
field_dict = list(filter(lambda d: d["fieldname"] == fieldname, docdict["fields"])) | |||||
field_dict = [f for f in docdict["fields"] if f["fieldname"] == fieldname] | |||||
if field_dict: | if field_dict: | ||||
new_field_dicts.append(field_dict[0]) | new_field_dicts.append(field_dict[0]) | ||||
if fieldname in remaining_field_names: | if fieldname in remaining_field_names: | ||||
remaining_field_names.remove(fieldname) | remaining_field_names.remove(fieldname) | ||||
for fieldname in remaining_field_names: | for fieldname in remaining_field_names: | ||||
field_dict = list(filter(lambda d: d["fieldname"] == fieldname, docdict["fields"])) | |||||
field_dict = [f for f in docdict["fields"] if f["fieldname"] == fieldname] | |||||
new_field_dicts.append(field_dict[0]) | new_field_dicts.append(field_dict[0]) | ||||
docdict["fields"] = new_field_dicts | docdict["fields"] = new_field_dicts | ||||
@@ -674,14 +674,14 @@ class DocType(Document): | |||||
remaining_field_names = [f["fieldname"] for f in docdict.get("fields", [])] | remaining_field_names = [f["fieldname"] for f in docdict.get("fields", [])] | ||||
for fieldname in docdict.get("field_order"): | for fieldname in docdict.get("field_order"): | ||||
field_dict = list(filter(lambda d: d["fieldname"] == fieldname, docdict.get("fields", []))) | |||||
field_dict = [f for f in docdict.get("fields", []) if f["fieldname"] == fieldname] | |||||
if field_dict: | if field_dict: | ||||
new_field_dicts.append(field_dict[0]) | new_field_dicts.append(field_dict[0]) | ||||
if fieldname in remaining_field_names: | if fieldname in remaining_field_names: | ||||
remaining_field_names.remove(fieldname) | remaining_field_names.remove(fieldname) | ||||
for fieldname in remaining_field_names: | for fieldname in remaining_field_names: | ||||
field_dict = list(filter(lambda d: d["fieldname"] == fieldname, docdict.get("fields", []))) | |||||
field_dict = [f for f in docdict.get("fields", []) if f["fieldname"] == fieldname] | |||||
new_field_dicts.append(field_dict[0]) | new_field_dicts.append(field_dict[0]) | ||||
docdict["fields"] = new_field_dicts | docdict["fields"] = new_field_dicts | ||||
@@ -804,7 +804,7 @@ class DocType(Document): | |||||
{"label": "Old Parent", "fieldtype": "Link", "options": self.name, "fieldname": "old_parent"}, | {"label": "Old Parent", "fieldtype": "Link", "options": self.name, "fieldname": "old_parent"}, | ||||
) | ) | ||||
parent_field_label = "Parent {}".format(self.name) | |||||
parent_field_label = f"Parent {self.name}" | |||||
parent_field_name = frappe.scrub(parent_field_label) | parent_field_name = frappe.scrub(parent_field_label) | ||||
self.append( | self.append( | ||||
"fields", | "fields", | ||||
@@ -1417,7 +1417,7 @@ def validate_fields(meta): | |||||
def check_max_height(docfield): | def check_max_height(docfield): | ||||
if getattr(docfield, "max_height", None) and (docfield.max_height[-2:] not in ("px", "em")): | if getattr(docfield, "max_height", None) and (docfield.max_height[-2:] not in ("px", "em")): | ||||
frappe.throw("Max for {} height must be in px, em, rem".format(frappe.bold(docfield.fieldname))) | |||||
frappe.throw(f"Max for {frappe.bold(docfield.fieldname)} height must be in px, em, rem") | |||||
def check_no_of_ratings(docfield): | def check_no_of_ratings(docfield): | ||||
if docfield.fieldtype == "Rating": | if docfield.fieldtype == "Rating": | ||||
@@ -1,10 +1,8 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import random | import random | ||||
import string | import string | ||||
import unittest | import unittest | ||||
from typing import Dict, List, Optional | |||||
from unittest.mock import patch | from unittest.mock import patch | ||||
import frappe | import frappe | ||||
@@ -187,7 +185,7 @@ class TestDocType(unittest.TestCase): | |||||
"module": "Core", | "module": "Core", | ||||
"custom": 1, | "custom": 1, | ||||
"fields": [ | "fields": [ | ||||
{"fieldname": "{0}_field".format(field_option), "fieldtype": "Data", "options": field_option} | |||||
{"fieldname": f"{field_option}_field", "fieldtype": "Data", "options": field_option} | |||||
], | ], | ||||
} | } | ||||
) | ) | ||||
@@ -711,10 +709,10 @@ class TestDocType(unittest.TestCase): | |||||
def new_doctype( | def new_doctype( | ||||
name: Optional[str] = None, | |||||
name: str | None = None, | |||||
unique: bool = False, | unique: bool = False, | ||||
depends_on: str = "", | depends_on: str = "", | ||||
fields: Optional[List[Dict]] = None, | |||||
fields: list[dict] | None = None, | |||||
**kwargs, | **kwargs, | ||||
): | ): | ||||
if not name: | if not name: | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2019, Frappe Technologies and contributors | # Copyright (c) 2019, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
# import frappe | # import frappe | ||||
@@ -1,7 +1,6 @@ | |||||
# Copyright (c) 2022, Frappe Technologies and contributors | # Copyright (c) 2022, Frappe Technologies and contributors | ||||
# For license information, please see license.txt | # For license information, please see license.txt | ||||
from typing import List, Set | |||||
import frappe | import frappe | ||||
from frappe import _ | from frappe import _ | ||||
@@ -25,7 +24,7 @@ class DocumentNamingSettings(Document): | |||||
return {"transactions": transactions, "prefixes": prefixes} | return {"transactions": transactions, "prefixes": prefixes} | ||||
def _get_transactions(self) -> List[str]: | |||||
def _get_transactions(self) -> list[str]: | |||||
readable_doctypes = set(get_doctypes_with_read()) | readable_doctypes = set(get_doctypes_with_read()) | ||||
@@ -34,7 +33,7 @@ class DocumentNamingSettings(Document): | |||||
return sorted(readable_doctypes.intersection(standard + custom)) | return sorted(readable_doctypes.intersection(standard + custom)) | ||||
def _get_prefixes(self, doctypes) -> List[str]: | |||||
def _get_prefixes(self, doctypes) -> list[str]: | |||||
"""Get all prefixes for naming series. | """Get all prefixes for naming series. | ||||
- For all templates prefix is evaluated considering today's date | - For all templates prefix is evaluated considering today's date | ||||
@@ -63,7 +62,7 @@ class DocumentNamingSettings(Document): | |||||
return self._evaluate_and_clean_templates(series_templates) | return self._evaluate_and_clean_templates(series_templates) | ||||
def _evaluate_and_clean_templates(self, series_templates: Set[str]) -> List[str]: | |||||
def _evaluate_and_clean_templates(self, series_templates: set[str]) -> list[str]: | |||||
evalauted_prefix = set() | evalauted_prefix = set() | ||||
series = frappe.qb.DocType("Series") | series = frappe.qb.DocType("Series") | ||||
@@ -79,7 +78,7 @@ class DocumentNamingSettings(Document): | |||||
return sorted(evalauted_prefix) | return sorted(evalauted_prefix) | ||||
def get_options_list(self, options: str) -> List[str]: | |||||
def get_options_list(self, options: str) -> list[str]: | |||||
return [op.strip() for op in options.split("\n") if op.strip()] | return [op.strip() for op in options.split("\n") if op.strip()] | ||||
@frappe.whitelist() | @frappe.whitelist() | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and contributors | # Copyright (c) 2017, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -113,8 +112,8 @@ class Domain(Document): | |||||
# enable | # enable | ||||
frappe.db.sql( | frappe.db.sql( | ||||
"""update `tabPortal Menu Item` set enabled=1 | """update `tabPortal Menu Item` set enabled=1 | ||||
where route in ({0})""".format( | |||||
", ".join('"{0}"'.format(d) for d in self.data.allow_sidebar_items) | |||||
where route in ({})""".format( | |||||
", ".join(f'"{d}"' for d in self.data.allow_sidebar_items) | |||||
) | ) | ||||
) | ) | ||||
@@ -125,7 +124,7 @@ class Domain(Document): | |||||
# enable | # enable | ||||
frappe.db.sql( | frappe.db.sql( | ||||
"""update `tabPortal Menu Item` set enabled=0 | """update `tabPortal Menu Item` set enabled=0 | ||||
where route in ({0})""".format( | |||||
", ".join('"{0}"'.format(d) for d in self.data.remove_sidebar_items) | |||||
where route in ({})""".format( | |||||
", ".join(f'"{d}"' for d in self.data.remove_sidebar_items) | |||||
) | ) | ||||
) | ) |
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and Contributors | # Copyright (c) 2017, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and contributors | # Copyright (c) 2017, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -32,7 +31,7 @@ class DomainSettings(Document): | |||||
def restrict_roles_and_modules(self): | def restrict_roles_and_modules(self): | ||||
"""Disable all restricted roles and set `restrict_to_domain` property in Module Def""" | """Disable all restricted roles and set `restrict_to_domain` property in Module Def""" | ||||
active_domains = frappe.get_active_domains() | active_domains = frappe.get_active_domains() | ||||
all_domains = list((frappe.get_hooks("domains") or {})) | |||||
all_domains = list(frappe.get_hooks("domains") or {}) | |||||
def remove_role(role): | def remove_role(role): | ||||
frappe.db.delete("Has Role", {"role": role}) | frappe.db.delete("Has Role", {"role": role}) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -7,7 +7,6 @@ import os | |||||
import re | import re | ||||
import shutil | import shutil | ||||
import zipfile | import zipfile | ||||
from typing import List, Optional, Union | |||||
from urllib.parse import quote, unquote | from urllib.parse import quote, unquote | ||||
from PIL import Image, ImageFile, ImageOps | from PIL import Image, ImageFile, ImageOps | ||||
@@ -134,7 +133,7 @@ class File(Document): | |||||
shutil.move(source, target) | shutil.move(source, target) | ||||
self.flags.pop("original_path") | self.flags.pop("original_path") | ||||
def get_name_based_on_parent_folder(self) -> Union[str, None]: | |||||
def get_name_based_on_parent_folder(self) -> str | None: | |||||
if self.folder: | if self.folder: | ||||
return os.path.join(self.folder, self.file_name) | return os.path.join(self.folder, self.file_name) | ||||
@@ -328,7 +327,7 @@ class File(Document): | |||||
file_path = get_files_path(file_name, is_private=self.is_private) | file_path = get_files_path(file_name, is_private=self.is_private) | ||||
with open(file_path, "rb") as f: | with open(file_path, "rb") as f: | ||||
self.content_hash = get_content_hash(f.read()) | self.content_hash = get_content_hash(f.read()) | ||||
except IOError: | |||||
except OSError: | |||||
frappe.throw(_("File {0} does not exist").format(file_path)) | frappe.throw(_("File {0} does not exist").format(file_path)) | ||||
def make_thumbnail( | def make_thumbnail( | ||||
@@ -347,7 +346,7 @@ class File(Document): | |||||
image, filename, extn = get_local_image(self.file_url) | image, filename, extn = get_local_image(self.file_url) | ||||
else: | else: | ||||
image, filename, extn = get_web_image(self.file_url) | image, filename, extn = get_web_image(self.file_url) | ||||
except (HTTPError, SSLError, IOError, TypeError): | |||||
except (HTTPError, SSLError, OSError, TypeError): | |||||
return | return | ||||
size = width, height | size = width, height | ||||
@@ -364,7 +363,7 @@ class File(Document): | |||||
if set_as_thumbnail: | if set_as_thumbnail: | ||||
self.db_set("thumbnail_url", thumbnail_url) | self.db_set("thumbnail_url", thumbnail_url) | ||||
except IOError: | |||||
except OSError: | |||||
frappe.msgprint(_("Unable to write file format for {0}").format(path)) | frappe.msgprint(_("Unable to write file format for {0}").format(path)) | ||||
return | return | ||||
@@ -387,7 +386,7 @@ class File(Document): | |||||
else: | else: | ||||
self.delete_file_data_content(only_thumbnail=True) | self.delete_file_data_content(only_thumbnail=True) | ||||
def unzip(self) -> List["File"]: | |||||
def unzip(self) -> list["File"]: | |||||
"""Unzip current file and replace it by its children""" | """Unzip current file and replace it by its children""" | ||||
if not self.file_url.endswith(".zip"): | if not self.file_url.endswith(".zip"): | ||||
frappe.throw(_("{0} is not a zip file").format(self.file_name)) | frappe.throw(_("{0} is not a zip file").format(self.file_name)) | ||||
@@ -506,7 +505,7 @@ class File(Document): | |||||
def save_file( | def save_file( | ||||
self, | self, | ||||
content: Optional[Union[bytes, str]] = None, | |||||
content: bytes | str | None = None, | |||||
decode=False, | decode=False, | ||||
ignore_existing_file_check=False, | ignore_existing_file_check=False, | ||||
overwrite=False, | overwrite=False, | ||||
@@ -4,7 +4,7 @@ import mimetypes | |||||
import os | import os | ||||
import re | import re | ||||
from io import BytesIO | from io import BytesIO | ||||
from typing import TYPE_CHECKING, Optional, Tuple, Union | |||||
from typing import TYPE_CHECKING, Optional | |||||
from urllib.parse import unquote | from urllib.parse import unquote | ||||
import requests | import requests | ||||
@@ -55,8 +55,8 @@ def setup_folder_path(filename: str, new_parent: str) -> None: | |||||
def get_extension( | def get_extension( | ||||
filename, | filename, | ||||
extn: Optional[str] = None, | |||||
content: Optional[bytes] = None, | |||||
extn: str | None = None, | |||||
content: bytes | None = None, | |||||
response: Optional["Response"] = None, | response: Optional["Response"] = None, | ||||
) -> str: | ) -> str: | ||||
mimetype = None | mimetype = None | ||||
@@ -83,7 +83,7 @@ def get_extension( | |||||
return extn | return extn | ||||
def get_local_image(file_url: str) -> Tuple["ImageFile", str, str]: | |||||
def get_local_image(file_url: str) -> tuple["ImageFile", str, str]: | |||||
if file_url.startswith("/private"): | if file_url.startswith("/private"): | ||||
file_url_path = (file_url.lstrip("/"),) | file_url_path = (file_url.lstrip("/"),) | ||||
else: | else: | ||||
@@ -93,7 +93,7 @@ def get_local_image(file_url: str) -> Tuple["ImageFile", str, str]: | |||||
try: | try: | ||||
image = Image.open(file_path) | image = Image.open(file_path) | ||||
except IOError: | |||||
except OSError: | |||||
frappe.throw(_("Unable to read file format for {0}").format(file_url)) | frappe.throw(_("Unable to read file format for {0}").format(file_url)) | ||||
content = None | content = None | ||||
@@ -102,7 +102,7 @@ def get_local_image(file_url: str) -> Tuple["ImageFile", str, str]: | |||||
filename, extn = file_url.rsplit(".", 1) | filename, extn = file_url.rsplit(".", 1) | ||||
except ValueError: | except ValueError: | ||||
# no extn | # no extn | ||||
with open(file_path, "r") as f: | |||||
with open(file_path) as f: | |||||
content = f.read() | content = f.read() | ||||
filename = file_url | filename = file_url | ||||
@@ -113,7 +113,7 @@ def get_local_image(file_url: str) -> Tuple["ImageFile", str, str]: | |||||
return image, filename, extn | return image, filename, extn | ||||
def get_web_image(file_url: str) -> Tuple["ImageFile", str, str]: | |||||
def get_web_image(file_url: str) -> tuple["ImageFile", str, str]: | |||||
# download | # download | ||||
file_url = frappe.utils.get_url(file_url) | file_url = frappe.utils.get_url(file_url) | ||||
r = requests.get(file_url, stream=True) | r = requests.get(file_url, stream=True) | ||||
@@ -179,13 +179,13 @@ def remove_file_by_url(file_url: str, doctype: str = None, name: str = None) -> | |||||
return remove_file(fid=fid) | return remove_file(fid=fid) | ||||
def get_content_hash(content: Union[bytes, str]) -> str: | |||||
def get_content_hash(content: bytes | str) -> str: | |||||
if isinstance(content, str): | if isinstance(content, str): | ||||
content = content.encode() | content = content.encode() | ||||
return hashlib.md5(content).hexdigest() # nosec | return hashlib.md5(content).hexdigest() # nosec | ||||
def generate_file_name(name: str, suffix: Optional[str] = None, is_private: bool = False) -> str: | |||||
def generate_file_name(name: str, suffix: str | None = None, is_private: bool = False) -> str: | |||||
"""Generate conflict-free file name. Suffix will be ignored if name available. If the | """Generate conflict-free file name. Suffix will be ignored if name available. If the | ||||
provided suffix doesn't result in an available path, a random suffix will be picked. | provided suffix doesn't result in an available path, a random suffix will be picked. | ||||
""" | """ | ||||
@@ -203,7 +203,7 @@ def generate_file_name(name: str, suffix: Optional[str] = None, is_private: bool | |||||
return candidate_path | return candidate_path | ||||
def get_file_name(fname: str, optional_suffix: Optional[str] = None) -> str: | |||||
def get_file_name(fname: str, optional_suffix: str | None = None) -> str: | |||||
# convert to unicode | # convert to unicode | ||||
fname = cstr(fname) | fname = cstr(fname) | ||||
partial, extn = os.path.splitext(fname) | partial, extn = os.path.splitext(fname) | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2017, Frappe Technologies and contributors | # Copyright (c) 2017, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
# import frappe | # import frappe | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and contributors | # Copyright (c) 2015, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -42,7 +41,7 @@ def export_languages_json(): | |||||
def sync_languages(): | def sync_languages(): | ||||
"""Sync frappe/geo/languages.json with Language""" | """Sync frappe/geo/languages.json with Language""" | ||||
with open(frappe.get_app_path("frappe", "geo", "languages.json"), "r") as f: | |||||
with open(frappe.get_app_path("frappe", "geo", "languages.json")) as f: | |||||
data = json.loads(f.read()) | data = json.loads(f.read()) | ||||
for l in data: | for l in data: | ||||
@@ -59,7 +58,7 @@ def sync_languages(): | |||||
def update_language_names(): | def update_language_names(): | ||||
"""Update frappe/geo/languages.json names (for use via patch)""" | """Update frappe/geo/languages.json names (for use via patch)""" | ||||
with open(frappe.get_app_path("frappe", "geo", "languages.json"), "r") as f: | |||||
with open(frappe.get_app_path("frappe", "geo", "languages.json")) as f: | |||||
data = json.loads(f.read()) | data = json.loads(f.read()) | ||||
for l in data: | for l in data: | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2015, Frappe Technologies and Contributors | # Copyright (c) 2015, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
import unittest | import unittest | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and contributors | # Copyright (c) 2020, Frappe Technologies and contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
@@ -1,4 +1,3 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2020, Frappe Technologies and Contributors | # Copyright (c) 2020, Frappe Technologies and Contributors | ||||
# License: MIT. See LICENSE | # License: MIT. See LICENSE | ||||
# import frappe | # import frappe | ||||