Browse Source

Replaced urllib.quote imports with six.moves.urllib.parse.quote (#3837)

version-14
Aditya Hase 8 years ago
committed by Rushabh Mehta
parent
commit
9d7d384a63
7 changed files with 14 additions and 9 deletions
  1. +1
    -1
      frappe/auth.py
  2. +2
    -1
      frappe/integrations/oauth2.py
  3. +3
    -2
      frappe/utils/__init__.py
  4. +2
    -1
      frappe/utils/data.py
  5. +1
    -1
      frappe/website/doctype/website_settings/website_settings.py
  6. +2
    -1
      frappe/www/rss.py
  7. +3
    -2
      frappe/www/sitemap.py

+ 1
- 1
frappe/auth.py View File

@@ -17,7 +17,7 @@ from frappe.translate import get_lang_code
from frappe.utils.password import check_password from frappe.utils.password import check_password
from frappe.core.doctype.authentication_log.authentication_log import add_authentication_log from frappe.core.doctype.authentication_log.authentication_log import add_authentication_log


from urllib import quote
from six.moves.urllib.parse import quote


class HTTPRequest: class HTTPRequest:
def __init__(self): def __init__(self):


+ 2
- 1
frappe/integrations/oauth2.py View File

@@ -2,7 +2,8 @@ from __future__ import unicode_literals
import frappe, json import frappe, json
from frappe.oauth import OAuthWebRequestValidator, WebApplicationServer from frappe.oauth import OAuthWebRequestValidator, WebApplicationServer
from oauthlib.oauth2 import FatalClientError, OAuth2Error from oauthlib.oauth2 import FatalClientError, OAuth2Error
from urllib import quote, urlencode
from urllib import urlencode
from six.moves.urllib.parse import quote
from werkzeug import url_fix from werkzeug import url_fix
from urlparse import urlparse from urlparse import urlparse
from frappe.integrations.doctype.oauth_provider_settings.oauth_provider_settings import get_oauth_settings from frappe.integrations.doctype.oauth_provider_settings.oauth_provider_settings import get_oauth_settings


+ 3
- 2
frappe/utils/__init__.py View File

@@ -5,7 +5,7 @@


from __future__ import unicode_literals, print_function from __future__ import unicode_literals, print_function
from werkzeug.test import Client from werkzeug.test import Client
import os, re, urllib, sys, json, hashlib, requests, traceback
import os, re, sys, json, hashlib, requests, traceback
from markdown2 import markdown as _markdown from markdown2 import markdown as _markdown
from .html_utils import sanitize_html from .html_utils import sanitize_html
import frappe import frappe
@@ -13,6 +13,7 @@ from frappe.utils.identicon import Identicon
from email.utils import parseaddr, formataddr from email.utils import parseaddr, formataddr
# utility functions like cint, int, flt, etc. # utility functions like cint, int, flt, etc.
from frappe.utils.data import * from frappe.utils.data import *
from six.moves.urllib.parse import quote
from six import text_type from six import text_type


default_fields = ['doctype', 'name', 'owner', 'creation', 'modified', 'modified_by', default_fields = ['doctype', 'name', 'owner', 'creation', 'modified', 'modified_by',
@@ -178,7 +179,7 @@ def dict_to_str(args, sep='&'):
""" """
t = [] t = []
for k in args.keys(): for k in args.keys():
t.append(str(k)+'='+urllib.quote(str(args[k] or '')))
t.append(str(k)+'='+quote(str(args[k] or '')))
return sep.join(t) return sep.join(t)


# Get Defaults # Get Defaults


+ 2
- 1
frappe/utils/data.py View File

@@ -12,6 +12,7 @@ from babel.core import UnknownLocaleError
from dateutil import parser from dateutil import parser
from num2words import num2words from num2words import num2words
from six.moves import html_parser as HTMLParser from six.moves import html_parser as HTMLParser
from six.moves.urllib.parse import quote
from html2text import html2text from html2text import html2text
from six import iteritems, text_type from six import iteritems, text_type


@@ -795,7 +796,7 @@ def expand_relative_urls(html):
return html return html


def quoted(url): def quoted(url):
return cstr(urllib.quote(encode(url), safe=b"~@#$&()*!+=:;,.?/'"))
return cstr(quote(encode(url), safe=b"~@#$&()*!+=:;,.?/'"))


def quote_urls(html): def quote_urls(html):
def _quote_url(match): def _quote_url(match):


+ 1
- 1
frappe/website/doctype/website_settings/website_settings.py View File

@@ -6,7 +6,7 @@ import frappe
from frappe import _ from frappe import _
from frappe.utils import get_request_site_address, encode from frappe.utils import get_request_site_address, encode
from frappe.model.document import Document from frappe.model.document import Document
from urllib import quote
from six.moves.urllib.parse import quote
from frappe.website.router import resolve_route from frappe.website.router import resolve_route
from frappe.website.doctype.website_theme.website_theme import add_website_theme from frappe.website.doctype.website_theme.website_theme import add_website_theme




+ 2
- 1
frappe/www/rss.py View File

@@ -5,6 +5,7 @@ from __future__ import unicode_literals
import frappe import frappe
import urllib import urllib
from frappe.utils import escape_html, get_request_site_address, now, cstr from frappe.utils import escape_html, get_request_site_address, now, cstr
from six.moves.urllib.parse import quote


no_cache = 1 no_cache = 1
base_template_path = "templates/www/rss.xml" base_template_path = "templates/www/rss.xml"
@@ -20,7 +21,7 @@ def get_context(context):
order by published_on desc limit 20""", as_dict=1) order by published_on desc limit 20""", as_dict=1)


for blog in blog_list: for blog in blog_list:
blog_page = cstr(urllib.quote(blog.name.encode("utf-8")))
blog_page = cstr(quote(blog.name.encode("utf-8")))
blog.link = urllib.basejoin(host, blog_page) blog.link = urllib.basejoin(host, blog_page)
blog.content = escape_html(blog.content or "") blog.content = escape_html(blog.content or "")




+ 3
- 2
frappe/www/sitemap.py View File

@@ -8,6 +8,7 @@ import frappe
from frappe.utils import get_request_site_address, get_datetime, nowdate from frappe.utils import get_request_site_address, get_datetime, nowdate
from frappe.website.router import get_pages, get_all_page_context_from_doctypes from frappe.website.router import get_pages, get_all_page_context_from_doctypes
from six import iteritems from six import iteritems
from six.moves.urllib.parse import quote


no_cache = 1 no_cache = 1
no_sitemap = 1 no_sitemap = 1
@@ -20,13 +21,13 @@ def get_context(context):
for route, page in iteritems(get_pages()): for route, page in iteritems(get_pages()):
if not page.no_sitemap: if not page.no_sitemap:
links.append({ links.append({
"loc": urllib.basejoin(host, urllib.quote(page.name.encode("utf-8"))),
"loc": urllib.basejoin(host, quote(page.name.encode("utf-8"))),
"lastmod": nowdate() "lastmod": nowdate()
}) })


for route, data in iteritems(get_all_page_context_from_doctypes()): for route, data in iteritems(get_all_page_context_from_doctypes()):
links.append({ links.append({
"loc": urllib.basejoin(host, urllib.quote((route or "").encode("utf-8"))),
"loc": urllib.basejoin(host, quote((route or "").encode("utf-8"))),
"lastmod": get_datetime(data.get("modified")).strftime("%Y-%m-%d") "lastmod": get_datetime(data.get("modified")).strftime("%Y-%m-%d")
}) })




Loading…
Cancel
Save