Przeglądaj źródła

Replaced urllib.quote imports with six.moves.urllib.parse.quote (#3837)

version-14
Aditya Hase 8 lat temu
committed by Rushabh Mehta
rodzic
commit
9d7d384a63
7 zmienionych plików z 14 dodań i 9 usunięć
  1. +1
    -1
      frappe/auth.py
  2. +2
    -1
      frappe/integrations/oauth2.py
  3. +3
    -2
      frappe/utils/__init__.py
  4. +2
    -1
      frappe/utils/data.py
  5. +1
    -1
      frappe/website/doctype/website_settings/website_settings.py
  6. +2
    -1
      frappe/www/rss.py
  7. +3
    -2
      frappe/www/sitemap.py

+ 1
- 1
frappe/auth.py Wyświetl plik

@@ -17,7 +17,7 @@ from frappe.translate import get_lang_code
from frappe.utils.password import check_password
from frappe.core.doctype.authentication_log.authentication_log import add_authentication_log

from urllib import quote
from six.moves.urllib.parse import quote

class HTTPRequest:
def __init__(self):


+ 2
- 1
frappe/integrations/oauth2.py Wyświetl plik

@@ -2,7 +2,8 @@ from __future__ import unicode_literals
import frappe, json
from frappe.oauth import OAuthWebRequestValidator, WebApplicationServer
from oauthlib.oauth2 import FatalClientError, OAuth2Error
from urllib import quote, urlencode
from urllib import urlencode
from six.moves.urllib.parse import quote
from werkzeug import url_fix
from urlparse import urlparse
from frappe.integrations.doctype.oauth_provider_settings.oauth_provider_settings import get_oauth_settings


+ 3
- 2
frappe/utils/__init__.py Wyświetl plik

@@ -5,7 +5,7 @@

from __future__ import unicode_literals, print_function
from werkzeug.test import Client
import os, re, urllib, sys, json, hashlib, requests, traceback
import os, re, sys, json, hashlib, requests, traceback
from markdown2 import markdown as _markdown
from .html_utils import sanitize_html
import frappe
@@ -13,6 +13,7 @@ from frappe.utils.identicon import Identicon
from email.utils import parseaddr, formataddr
# utility functions like cint, int, flt, etc.
from frappe.utils.data import *
from six.moves.urllib.parse import quote
from six import text_type

default_fields = ['doctype', 'name', 'owner', 'creation', 'modified', 'modified_by',
@@ -178,7 +179,7 @@ def dict_to_str(args, sep='&'):
"""
t = []
for k in args.keys():
t.append(str(k)+'='+urllib.quote(str(args[k] or '')))
t.append(str(k)+'='+quote(str(args[k] or '')))
return sep.join(t)

# Get Defaults


+ 2
- 1
frappe/utils/data.py Wyświetl plik

@@ -12,6 +12,7 @@ from babel.core import UnknownLocaleError
from dateutil import parser
from num2words import num2words
from six.moves import html_parser as HTMLParser
from six.moves.urllib.parse import quote
from html2text import html2text
from six import iteritems, text_type

@@ -795,7 +796,7 @@ def expand_relative_urls(html):
return html

def quoted(url):
return cstr(urllib.quote(encode(url), safe=b"~@#$&()*!+=:;,.?/'"))
return cstr(quote(encode(url), safe=b"~@#$&()*!+=:;,.?/'"))

def quote_urls(html):
def _quote_url(match):


+ 1
- 1
frappe/website/doctype/website_settings/website_settings.py Wyświetl plik

@@ -6,7 +6,7 @@ import frappe
from frappe import _
from frappe.utils import get_request_site_address, encode
from frappe.model.document import Document
from urllib import quote
from six.moves.urllib.parse import quote
from frappe.website.router import resolve_route
from frappe.website.doctype.website_theme.website_theme import add_website_theme



+ 2
- 1
frappe/www/rss.py Wyświetl plik

@@ -5,6 +5,7 @@ from __future__ import unicode_literals
import frappe
import urllib
from frappe.utils import escape_html, get_request_site_address, now, cstr
from six.moves.urllib.parse import quote

no_cache = 1
base_template_path = "templates/www/rss.xml"
@@ -20,7 +21,7 @@ def get_context(context):
order by published_on desc limit 20""", as_dict=1)

for blog in blog_list:
blog_page = cstr(urllib.quote(blog.name.encode("utf-8")))
blog_page = cstr(quote(blog.name.encode("utf-8")))
blog.link = urllib.basejoin(host, blog_page)
blog.content = escape_html(blog.content or "")



+ 3
- 2
frappe/www/sitemap.py Wyświetl plik

@@ -8,6 +8,7 @@ import frappe
from frappe.utils import get_request_site_address, get_datetime, nowdate
from frappe.website.router import get_pages, get_all_page_context_from_doctypes
from six import iteritems
from six.moves.urllib.parse import quote

no_cache = 1
no_sitemap = 1
@@ -20,13 +21,13 @@ def get_context(context):
for route, page in iteritems(get_pages()):
if not page.no_sitemap:
links.append({
"loc": urllib.basejoin(host, urllib.quote(page.name.encode("utf-8"))),
"loc": urllib.basejoin(host, quote(page.name.encode("utf-8"))),
"lastmod": nowdate()
})

for route, data in iteritems(get_all_page_context_from_doctypes()):
links.append({
"loc": urllib.basejoin(host, urllib.quote((route or "").encode("utf-8"))),
"loc": urllib.basejoin(host, quote((route or "").encode("utf-8"))),
"lastmod": get_datetime(data.get("modified")).strftime("%Y-%m-%d")
})



Ładowanie…
Anuluj
Zapisz