From a016155f19fcecba08475ce123e02fba1a5b9ed9 Mon Sep 17 00:00:00 2001 From: Brahma K Date: Tue, 23 Aug 2011 10:36:20 +0530 Subject: [PATCH 01/22] query_builder modified to take care of sub queries --- cgi-bin/webnotes/widgets/query_builder.py | 96 +++++++++++------------ cgi-bin/webnotes/widgets/search.py | 18 ++--- js/webpage/search.js | 63 ++++++++------- js/wnf.compressed.js | 4 +- 4 files changed, 94 insertions(+), 87 deletions(-) diff --git a/cgi-bin/webnotes/widgets/query_builder.py b/cgi-bin/webnotes/widgets/query_builder.py index 72e320cd70..6b4196b6d7 100644 --- a/cgi-bin/webnotes/widgets/query_builder.py +++ b/cgi-bin/webnotes/widgets/query_builder.py @@ -14,7 +14,7 @@ def get_search_criteria_list(dt): def load_report_list(): webnotes.response['rep_list'] = get_search_criteria_list(form.getvalue('dt')) - + # Get, scrub metadata # ==================================================================== @@ -37,20 +37,20 @@ def get_parent_dt(dt): def get_sql_meta(tl): std_columns = { - 'owner':('Owner', '', '', '100'), - 'creation':('Created on', 'Date', '', '100'), - 'modified':('Last modified on', 'Date', '', '100'), + 'owner':('Owner', '', '', '100'), + 'creation':('Created on', 'Date', '', '100'), + 'modified':('Last modified on', 'Date', '', '100'), 'modified_by':('Modified By', '', '', '100') } - + meta = {} - + for dt in tl: meta[dt] = std_columns.copy() # for table doctype, the ID is the parent id pdt = get_parent_dt(dt) - if pdt: + if pdt: meta[dt]['parent'] = ('ID', 'Link', pdt, '200') # get the field properties from DocField @@ -58,10 +58,10 @@ def get_sql_meta(tl): for r in res: if r[0]: meta[dt][r[0]] = (r[1], r[2], r[3], r[4]); - + # name meta[dt]['name'] = ('ID', 'Link', dt, '200') - + return meta # Additional conditions to fulfill match permission rules @@ -80,12 +80,12 @@ def getmatchcondition(dt, ud, ur): return '' return ' OR '.join(cond) - + def add_match_conditions(q, tl, ur, ud): sl = [] for dt in tl: s = getmatchcondition(dt, ud, ur) - if s: + if s: sl.append(s) # insert the conditions @@ -94,13 +94,13 @@ def add_match_conditions(q, tl, ur, ud): condition_end = q.find('ORDER BY')!=-1 and 'ORDER BY' or 'LIMIT' condition_end = q.find('GROUP BY')!=-1 and 'GROUP BY' or condition_end - + if q.find('ORDER BY')!=-1 or q.find('LIMIT')!=-1 or q.find('GROUP BY')!=-1: # if query continues beyond conditions q = q.split(condition_end) q = q[0] + condition_st + '(' + ' OR '.join(sl) + ') ' + condition_end + q[1] else: q = q + condition_st + '(' + ' OR '.join(sl) + ')' - + return q # execute server-side script from Search Criteria @@ -111,7 +111,7 @@ def exec_report(code, res, colnames=[], colwidths=[], coltypes=[], coloptions=[] for c in colnames: col_idx[c] = i i+=1 - + # load globals (api) from webnotes import * from webnotes.utils import * @@ -127,12 +127,12 @@ def exec_report(code, res, colnames=[], colwidths=[], coltypes=[], coloptions=[] NEWLINE = '\n' exec str(code) - + if out!=None: res = out return res, style, header_html, footer_html, page_template - + # ==================================================================== def guess_type(m): @@ -146,7 +146,7 @@ def guess_type(m): return 'Date' else: return 'Data' - + def build_description_simple(): colnames, coltypes, coloptions, colwidths = [], [], [], [] @@ -155,7 +155,7 @@ def build_description_simple(): coltypes.append(guess_type[m[0]]) coloptions.append('') colwidths.append('100') - + return colnames, coltypes, coloptions, colwidths # ==================================================================== @@ -180,27 +180,27 @@ def build_description_standard(meta, tl): if (not dt) and merged_meta.get(fn): # no "AS" given, find type from merged description - + desc = merged_meta[fn] colnames.append(desc[0] or fn) coltypes.append(desc[1] or '') coloptions.append(desc[2] or '') colwidths.append(desc[3] or '100') - + elif meta.get(dt,{}).has_key(fn): # type specified for a multi-table join # usually from Report Builder - + desc = meta[dt][fn] colnames.append(desc[0] or fn) coltypes.append(desc[1] or '') coloptions.append(desc[2] or '') colwidths.append(desc[3] or '100') - + else: # nothing found # guess - + colnames.append(fn) coltypes.append(guess_type(f[1])) coloptions.append('') @@ -214,21 +214,21 @@ def build_description_standard(meta, tl): def runquery(q='', ret=0, from_export=0): import webnotes.utils - formatted = cint(form.getvalue('formatted')) - + formatted = cint(form.getvalue('formatted')) + # CASE A: Simple Query # -------------------- if form.getvalue('simple_query') or form.getvalue('is_simple'): - q = form.getvalue('simple_query') or form.getvalue('query') + if not q: q = form.getvalue('simple_query') or form.getvalue('query') if q.split()[0].lower() != 'select': raise Exception, 'Query must be a SELECT' - + as_dict = cint(form.getvalue('as_dict')) res = sql(q, as_dict = as_dict, as_list = not as_dict, formatted=formatted) - + # build colnames etc from metadata colnames, coltypes, coloptions, colwidths = [], [], [], [] - + # CASE B: Standard Query # ----------------------- else: @@ -236,17 +236,17 @@ def runquery(q='', ret=0, from_export=0): tl = get_sql_tables(q) meta = get_sql_meta(tl) - + q = add_match_conditions(q, tl, webnotes.user.roles, webnotes.user.get_defaults()) - + # replace special variables q = q.replace('__user', session['user']) q = q.replace('__today', webnotes.utils.nowdate()) - + res = sql(q, as_list=1, formatted=formatted) colnames, coltypes, coloptions, colwidths = build_description_standard(meta, tl) - + # run server script # ----------------- style, header_html, footer_html, page_template = '', '', '', '' @@ -254,15 +254,15 @@ def runquery(q='', ret=0, from_export=0): sc_id = form.getvalue('sc_id') from webnotes.model.code import get_code sc_details = webnotes.conn.sql("select module, standard, server_script from `tabSearch Criteria` where name=%s", sc_id)[0] - if sc_details[1]!='No': + if sc_details[1]!='No': code = get_code(sc_details[0], 'Search Criteria', sc_id, 'py') else: code = sc_details[2] - + if code: filter_values = form.has_key('filter_values') and eval(form.getvalue('filter_values','')) or {} res, style, header_html, footer_html, page_template = exec_report(code, res, colnames, colwidths, coltypes, coloptions, filter_values, q, from_export) - + out['colnames'] = colnames out['coltypes'] = coltypes out['coloptions'] = coloptions @@ -270,17 +270,17 @@ def runquery(q='', ret=0, from_export=0): out['header_html'] = header_html out['footer_html'] = footer_html out['page_template'] = page_template - + if style: out['style'] = style - + # just the data - return if ret==1: - return res + return res out['values'] = res - # return num of entries + # return num of entries qm = form.has_key('query_max') and form.getvalue('query_max') or '' if qm and qm.strip(): if qm.split()[0].lower() != 'select': @@ -298,31 +298,31 @@ def runquery_csv(): # run query res = runquery(from_export = 1) - + q = form.getvalue('query') - + rep_name = form.getvalue('report_name') if not form.has_key('simple_query'): # Report Name if not rep_name: rep_name = get_sql_tables(q)[0] - + if not rep_name: rep_name = 'DataExport' - + # Headings heads = [] - + rows = [[rep_name], out['colnames']] + out['values'] - + from cStringIO import StringIO import csv - + f = StringIO() writer = csv.writer(f) for r in rows: writer.writerow(r) - + f.seek(0) out['result'] = f.read() out['type'] = 'csv' diff --git a/cgi-bin/webnotes/widgets/search.py b/cgi-bin/webnotes/widgets/search.py index 9d8da2b790..bfd1c08486 100644 --- a/cgi-bin/webnotes/widgets/search.py +++ b/cgi-bin/webnotes/widgets/search.py @@ -22,16 +22,16 @@ def getsearchfields(): webnotes.response['searchfields'] = [['name', 'ID', 'Data', '']] + res def make_query(fields, dt, key, txt, start, length): - return """SELECT %(fields)s - FROM `tab%(dt)s` + return """SELECT %(fields)s + FROM `tab%(dt)s` WHERE `tab%(dt)s`.`%(key)s` LIKE '%(txt)s' AND `tab%(dt)s`.docstatus != 2 - ORDER BY `tab%(dt)s`.`%(key)s` + ORDER BY `tab%(dt)s`.`%(key)s` DESC LIMIT %(start)s, %(len)s """ % { 'fields': fields, 'dt': dt, 'key': key, 'txt': txt + '%', - 'start': start, + 'start': start, 'len': length } @@ -48,7 +48,7 @@ def get_std_fields_list(dt, key): def build_for_autosuggest(res): from webnotes.utils import cstr - + results = [] for r in res: info = '' @@ -56,10 +56,10 @@ def build_for_autosuggest(res): info = ','.join([cstr(t) for t in r[1:]]) if len(info) > 30: info = info[:30] + '...' - + results.append({'id':r[0], 'value':r[0], 'info':info}) return results - + def scrub_custom_query(query, key, txt): if '%(key)s' in query: query = query.replace('%(key)s', key) @@ -74,7 +74,7 @@ def search_link(): txt = webnotes.form.getvalue('txt') dt = webnotes.form.getvalue('dt') query = webnotes.form.getvalue('query') - + if query: res = webnotes.conn.sql(scrub_custom_query(query, 'name', txt)) else: @@ -97,5 +97,5 @@ def search_widget(): query = scrub_custom_query(user_query, key, txt) else: query = make_query(', '.join(get_std_fields_list(dt, key)), dt, key, txt, webnotes.form.getvalue('start') or 0, webnotes.form.getvalue('page_len') or 50) - + webnotes.widgets.query_builder.runquery(query) diff --git a/js/webpage/search.js b/js/webpage/search.js index b0098ea80a..d921e4c310 100644 --- a/js/webpage/search.js +++ b/js/webpage/search.js @@ -21,8 +21,8 @@ function makeselector() { ['Button', 'Search'], ['HTML', 'Help'], ['HTML', 'Result'] - ]); - + ]); + // search with var inp = d.widgets['Beginning With']; var field_sel = d.widgets['Search By']; @@ -39,7 +39,7 @@ function makeselector() { } d.style = 'Link'; d.set_query_description() - + if(!d.sel_type)d.sel_type = 'Value'; d.set_title('Select a "'+ d.sel_type +'" for field "'+label+'"'); } @@ -47,18 +47,18 @@ function makeselector() { if(d.style!='Search') { d.rows['Result'].innerHTML =''; d.values_len = 0; - } + } d.style = 'Search'; if(d.input) { d.input = null; sel_type = null; } d.sel_type = get_label_doctype(dt); d.set_title('Quick Search for ' + dt); } - - inp.onkeydown = function(e) { + + inp.onkeydown = function(e) { if(isIE)var kc = window.event.keyCode; else var kc = e.keyCode; - if(kc==13) if(!btn.disabled)btn.onclick(); + if(kc==13) if(!btn.disabled)btn.onclick(); } d.set_query_description = function() { @@ -68,18 +68,18 @@ function makeselector() { d.rows['Help'].innerHTML ='' } } - d.onshow = function() { + d.onshow = function() { if(d.set_doctype!=d.sel_type) { d.rows['Result'].innerHTML =''; d.values_len = 0; } - - inp.value = ''; + + inp.value = ''; if(d.input && d.input.txt.value) { inp.value = d.input.txt.value; } try{inp.focus();} catch(e){} - + if(d.input) d.input.set_get_query(); // temp function to strip labels from search fields @@ -88,10 +88,10 @@ function makeselector() { for(var i=0; i Date: Tue, 23 Aug 2011 11:49:28 +0530 Subject: [PATCH 02/22] receive encoding error --- cgi-bin/webnotes/utils/email_lib/receive.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/cgi-bin/webnotes/utils/email_lib/receive.py b/cgi-bin/webnotes/utils/email_lib/receive.py index 7207d836f0..73d5e66c8c 100644 --- a/cgi-bin/webnotes/utils/email_lib/receive.py +++ b/cgi-bin/webnotes/utils/email_lib/receive.py @@ -39,8 +39,11 @@ class IncomingMail: """ get utf-8 encoded part content """ - return unicode(part.get_payload(decode=True),str(charset),"ignore").encode('utf8','replace') - + try: + return unicode(part.get_payload(decode=True),str(charset),"ignore").encode('utf8','replace') + except LookupError, e: + return part.get_payload() + def get_attachment(self, part, charset): """ Extracts an attachment From c7514a451afa984f96e9d58d992cc469fe28e0e6 Mon Sep 17 00:00:00 2001 From: Brahma K Date: Tue, 23 Aug 2011 11:51:15 +0530 Subject: [PATCH 03/22] utils.getlist returns sorted list based on idx --- cgi-bin/webnotes/model/doclist.py | 56 +++++++++++------------ cgi-bin/webnotes/model/utils.py | 75 ++++++++++++++++--------------- 2 files changed, 67 insertions(+), 64 deletions(-) diff --git a/cgi-bin/webnotes/model/doclist.py b/cgi-bin/webnotes/model/doclist.py index 6e0b816974..d58ca43026 100644 --- a/cgi-bin/webnotes/model/doclist.py +++ b/cgi-bin/webnotes/model/doclist.py @@ -18,7 +18,7 @@ class DocList: self.to_docstatus = 0 if dt and dn: self.load_from_db(dt, dn) - + def load_from_db(self, dt, dn): """ Load doclist from dt @@ -34,15 +34,15 @@ class DocList: doclist = [doc,] for t in tablefields: doclist += getchildren(doc.name, t[0], t[1], dt, prefix=prefix) - + self.docs = docs - + def __iter__(self): """ Make this iterable """ return self.docs.__iter__() - + def from_compressed(self, data, docname): """ Expand called from client @@ -50,13 +50,13 @@ class DocList: from webnotes.model.utils import expand self.docs = expand(data) self.objectify(docname) - + def objectify(self, docname=None): """ Converts self.docs from a list of dicts to list of Documents """ from webnotes.model.doc import Document - + self.docs = [Document(fielddata=d) for d in self.docs] if not docname: self.doc, self.children = self.docs[0], self.docs[1:] @@ -68,17 +68,17 @@ class DocList: self.doc = d else: self.children.append(d) - + def make_obj(self): """ Create a DocType object """ if self.obj: return self.obj - + from webnotes.model.code import get_obj self.obj = get_obj(doc=self.doc, doclist=self.children) return self.obj - + def next(self): """ Next doc @@ -99,13 +99,13 @@ class DocList: if (not is_single(self.doc.doctype)) and (not self.doc.fields.get('__islocal')): tmp = webnotes.conn.sql(""" - SELECT modified FROM `tab%s` WHERE name="%s" for update""" + SELECT modified FROM `tab%s` WHERE name="%s" for update""" % (self.doc.doctype, self.doc.name)) if tmp and str(tmp[0][0]) != str(self.doc.modified): webnotes.msgprint(""" - Document has been modified after you have opened it. - To maintain the integrity of the data, you will not be able to save your changes. + Document has been modified after you have opened it. + To maintain the integrity of the data, you will not be able to save your changes. Please refresh this document. [%s/%s]""" % (tmp[0][0], self.doc.modified), raise_exception=1) def check_permission(self): @@ -114,7 +114,7 @@ class DocList: """ if not self.doc.check_perm(verbose=1): webnotes.msgprint("Not enough permission to save %s" % self.doc.doctype, raise_exception=1) - + def check_links(self): """ Checks integrity of links (throws exception if links are invalid) @@ -125,11 +125,11 @@ class DocList: ref[d.doctype] = d.make_link_list() err_list += d.validate_links(ref[d.doctype]) - + if err_list: - webnotes.msgprint("""[Link Validation] Could not find the following values: %s. + webnotes.msgprint("""[Link Validation] Could not find the following values: %s. Please correct and resave. Document Not Saved.""" % ', '.join(err_list), raise_exception=1) - + def update_timestamps(self): """ Update owner, creation, modified_by, modified, docstatus @@ -137,17 +137,17 @@ class DocList: from webnotes.utils import now ts = now() user = webnotes.__dict__.get('session', {}).get('user') or 'Administrator' - + for d in self.docs: if self.doc.__islocal: d.owner = user d.creation = ts - + d.modified_by = user d.modified = ts if d.docstatus != 2: # don't update deleted d.docstatus = self.to_docstatus - + def prepare_for_save(self, check_links): """ Set owner, modified etc before saving @@ -170,7 +170,7 @@ class DocList: from webnotes.model.triggers import fire_event fire_event(self.doc, method) - + def save_main(self): """ Save the main doc @@ -179,7 +179,7 @@ class DocList: self.doc.save(cint(self.doc.__islocal)) except NameError, e: webnotes.msgprint('%s "%s" already exists' % (self.doc.doctype, self.doc.name)) - + # prompt if cancelled if webnotes.conn.get_value(self.doc.doctype, self.doc.name, 'docstatus')==2: webnotes.msgprint('[%s "%s" has been cancelled]' % (self.doc.doctype, self.doc.name)) @@ -192,7 +192,7 @@ class DocList: """ for d in self.children: deleted, local = d.fields.get('__deleted',0), d.fields.get('__islocal',0) - + if cint(local) and cint(deleted): pass @@ -201,7 +201,7 @@ class DocList: d.parent = self.doc.name # rename if reqd d.parenttype = self.doc.doctype - d.save(new = cint(local)) + d.save(new = cint(local)) def save(self, check_links=1): """ @@ -212,7 +212,7 @@ class DocList: self.save_main() self.save_children() self.run_method('on_update') - + def submit(self): """ Save & Submit - set docstatus = 1, run "on_submit" @@ -222,7 +222,7 @@ class DocList: self.to_docstatus = 1 self.save() self.run_method('on_submit') - + def cancel(self): """ Cancel - set docstatus 2, run "on_cancel" @@ -234,7 +234,7 @@ class DocList: self.save_main() self.save_children() self.run_method('on_cancel') - + def update_after_submit(self): """ Update after submit - some values changed after submit @@ -252,11 +252,11 @@ def getlist(doclist, parentfield): """ import webnotes.model.utils return webnotes.model.utils.getlist(doclist, parentfield) - + def copy_doclist(doclist, no_copy = []): """ Make a copy of the doclist """ import webnotes.model.utils return webnotes.model.utils.copy_doclist(doclist, no_copy) - + diff --git a/cgi-bin/webnotes/model/utils.py b/cgi-bin/webnotes/model/utils.py index 2d019273f1..b9892067d3 100644 --- a/cgi-bin/webnotes/model/utils.py +++ b/cgi-bin/webnotes/model/utils.py @@ -1,7 +1,7 @@ """ Model utilities, unclassified functions """ - + def expand(docs): """ Expand a doclist sent from the client side. (Internally used by the request handler) @@ -25,12 +25,12 @@ def compress(doclist): """ Compress a doclist before sending it to the client side. (Internally used by the request handler) - """ + """ if doclist and hasattr(doclist[0],'fields'): docs = [d.fields for d in doclist] else: docs = doclist - + kl, vl = {}, [] for d in docs: dt = d['doctype'] @@ -38,10 +38,10 @@ def compress(doclist): fl = d.keys() forbidden = ['server_code_compiled'] nl = ['doctype','localname','__oldparent','__unsaved'] - + # add client script for doctype, doctype due to ambiguity if dt=='DocType': nl.append('__client_script') - + for f in fl: if not (f in nl) and not (f in forbidden): nl.append(f) @@ -64,21 +64,24 @@ def compress(doclist): def getlist(doclist, field): """ Filter a list of records for a specific field from the full doclist - + Example:: - - # find all phone call details + + # find all phone call details dl = getlist(self.doclist, 'contact_updates') pl = [] for d in dl: if d.type=='Phone': pl.append(d) """ - + l = [] for d in doclist: if d.parent and (not d.parent.lower().startswith('old_parent:')) and d.parentfield == field: l.append(d) + + l.sort(lambda a, b: a.idx - b.idx) + return l # Copy doclist @@ -90,31 +93,31 @@ def copy_doclist(doclist, no_copy = []): Pass fields that are not to be copied in `no_copy` """ from webnotes.model.doc import Document - + cl = [] - + # main doc c = Document(fielddata = doclist[0].fields.copy()) - + # clear no_copy fields - for f in no_copy: + for f in no_copy: if c.fields.has_key(f): c.fields[f] = None - + c.name = None c.save(1) cl.append(c) - + # new parent name parent = c.name - + # children for d in doclist[1:]: c = Document(fielddata = d.fields.copy()) c.name = None - + # clear no_copy fields - for f in no_copy: + for f in no_copy: if c.fields.has_key(f): c.fields[f] = None @@ -138,18 +141,18 @@ def _make_html(doc, link_list): from webnotes.utils import cstr out = '' for k in doc.fields.keys(): - if k!='server_code_compiled': + if k!='server_code_compiled': v = cstr(doc.fields[k]) - + # link field if v and (k in link_list.keys()): dt = link_list[k] if type(dt)==str and dt.startswith('link:'): dt = dt[5:] - v = '%s' % (dt, v, v) - + v = '%s' % (dt, v, v) + out += '\t\n' % (cstr(k), v) - + out += '
%s%s
' return out @@ -159,13 +162,13 @@ def to_html(doclist): """ out = '' link_lists = {} - + for d in doclist: if not link_lists.get(d.doctype): link_lists[d.doctype] = d.make_link_list() out += _make_html(d, link_lists[d.doctype]) - + return out def commonify_doclist(doclist, with_comments=1): @@ -184,14 +187,14 @@ def commonify_doclist(doclist, with_comments=1): return c def strip_common(d): - for k in common_keys: + for k in common_keys: if k in d: del d[k] return d def make_common_dicts(doclist): - + common_dict = {} # one per doctype - + # make common dicts for all records for d in doclist: if not d['doctype'] in common_dict: @@ -206,15 +209,15 @@ def commonify_doclist(doclist, with_comments=1): common_dict = make_common_dicts(doclist) # make docs - final = [] + final = [] for d in doclist: f = strip_common(get_diff_dict(common_dict[d['doctype']], d)) f['doctype'] = d['doctype'] # keep doctype! - + # strip name for child records (only an auto generated number!) if f['doctype'] != doclist[0]['doctype']: del f['name'] - + if with_comments: f['##comment'] = d['doctype'] + ('name' in f and (', ' + f['name']) or '') final.append(f) @@ -226,10 +229,10 @@ def commonify_doclist(doclist, with_comments=1): if with_comments: d['##comment'] = 'These values are common for all ' + d['doctype'] commons.append(strip_common(d)) - + common_values = make_common(doclist) return [common_values]+commons+final - + def uncommonify_doclist(dl): """ Expands an commonified doclist @@ -249,13 +252,13 @@ def uncommonify_doclist(dl): final.append(d1) return final - + def pprint_doclist(doclist, with_comments = 1): """ Pretty Prints a doclist with common keys separated and comments """ from webnotes.utils import pprint_dict - + dictlist =[pprint_dict(d) for d in commonify_doclist(doclist, with_comments)] title = '# '+doclist[0]['doctype']+', '+doclist[0]['name'] return title + '\n[\n' + ',\n'.join(dictlist) + '\n]' @@ -268,5 +271,5 @@ def peval_doclist(txt): return uncommonify_doclist(eval(txt)) else: return eval(txt) - + return uncommonify_doclist(eval(txt)) From 89abceaa0d17bbdded3472946c2eb25024679b64 Mon Sep 17 00:00:00 2001 From: Rushabh Mehta Date: Thu, 25 Aug 2011 11:34:01 +0530 Subject: [PATCH 04/22] added ability to show / hide columns --- cgi-bin/webnotes/utils/email_lib/receive.py | 5 +- js/form.compressed.js | 26 +++---- js/widgets/form/form_grid.js | 6 -- js/widgets/form/grid.js | 77 +++++++++++---------- 4 files changed, 57 insertions(+), 57 deletions(-) diff --git a/cgi-bin/webnotes/utils/email_lib/receive.py b/cgi-bin/webnotes/utils/email_lib/receive.py index 73d5e66c8c..d6cc347998 100644 --- a/cgi-bin/webnotes/utils/email_lib/receive.py +++ b/cgi-bin/webnotes/utils/email_lib/receive.py @@ -131,7 +131,10 @@ class POP3Mailbox: num = len(self.pop.list()[1]) for m in range(num): msg = self.pop.retr(m+1) - self.process_message(IncomingMail('\n'.join(msg[1]))) + try: + self.process_message(IncomingMail('\n'.join(msg[1]))) + except: + pass self.pop.dele(m+1) self.pop.quit() diff --git a/js/form.compressed.js b/js/form.compressed.js index 64ea3993ac..d4c58ca5b8 100644 --- a/js/form.compressed.js +++ b/js/form.compressed.js @@ -256,18 +256,21 @@ _f.CodeField.prototype.init_editor=function(){var me=this;this.editor=tinymce.ge _f.CodeField.prototype.set_disp=function(val){$y(this.disp_area,{width:'90%'}) if(this.df.fieldtype=='Text Editor'){this.disp_area.innerHTML=val;}else{this.disp_area.innerHTML='';}} _f.cur_grid_cell=null;_f.Grid=function(parent){} -_f.Grid.prototype.init=function(parent,row_height){this.alt_row_bg='#F2F2FF';this.row_height=row_height;if(!row_height)this.row_height='26px';this.make_ui(parent);this.insert_column('','','Int','Sr','50px','',[1,0,0]);this.total_width=50;if(this.oninit)this.oninit();keypress_observers.push(this)} +_f.Grid.prototype.init=function(parent,row_height){this.col_idx_by_name={} +this.alt_row_bg='#F2F2FF';this.row_height=row_height;if(!row_height)this.row_height='26px';this.make_ui(parent);this.insert_column('','','Int','Sr','50px','',[1,0,0]);if(this.oninit)this.oninit();keypress_observers.push(this);} _f.Grid.prototype.make_ui=function(parent){var ht=make_table($a(parent,'div'),1,2,'100%',['60%','40%']);this.main_title=$td(ht,0,0);this.main_title.className='columnHeading';$td(ht,0,1).style.textAlign='right';this.tbar_div=$a($td(ht,0,1),'div','grid_tbarlinks');if(isIE)$y(this.tbar_div,{width:'200px'});this.tbar_tab=make_table(this.tbar_div,1,4,'100%',['25%','25%','25%','25%']);this.wrapper=$a(parent,'div','grid_wrapper');$h(this.wrapper,cint(screen.width*0.5)+'px');this.head_wrapper=$a(this.wrapper,'div','grid_head_wrapper');this.head_tab=$a(this.head_wrapper,'table','grid_head_table');this.head_row=this.head_tab.insertRow(0);this.tab_wrapper=$a(this.wrapper,'div','grid_tab_wrapper');this.tab=$a(this.tab_wrapper,'table','grid_table');var me=this;this.wrapper.onscroll=function(){me.head_wrapper.style.top=me.wrapper.scrollTop+'px';}} _f.Grid.prototype.show=function(){if(this.can_add_rows){$ds(this.tbar_div);}else{$dh(this.tbar_div);} $ds(this.wrapper);} _f.Grid.prototype.hide=function(){$dh(this.wrapper);$dh(this.tbar_div);} -_f.Grid.prototype.insert_column=function(doctype,fieldname,fieldtype,label,width,options,perm,reqd){var idx=this.head_row.cells.length;if(!width)width='100px';var col=this.head_row.insertCell(idx);col.doctype=doctype;col.fieldname=fieldname;col.fieldtype=fieldtype;col.innerHTML='
'+label+'
';col.label=label;if(reqd) -col.childNodes[0].style.color="#D22";this.total_width+=cint(width);$w(col,width);col.orig_width=col.style.width;col.options=options;col.perm=perm;} -_f.Grid.prototype.set_column_disp=function(label,show){for(var i=0;i'+label+'';col.label=label;if(reqd) +col.childNodes[0].style.color="#D22";col.style.width=width;col.options=options;col.perm=perm;this.col_idx_by_name[fieldname]=idx;} +_f.Grid.prototype.reset_table_width=function(){var w=0;for(var i=0,len=this.head_row.cells.length;ithis.tab.rows.length) +_f.Grid.prototype.set_data=function(data){this.cell_deselect();this.reset_table_width();if(data.length>this.tab.rows.length) this.append_rows(data.length-this.tab.rows.length);if(data.length'+label+'';c.cur_label=label;break;}}} _f.FormGrid.prototype.refresh=function(){var docset=getchildren(this.doctype,this.field.frm.docname,this.field.df.fieldname,this.field.frm.doctype);var data=[];for(var i=0;i this.tab.rows.length) From bdeecb59fe31b564b54765b26c2cf7e6ee228a10 Mon Sep 17 00:00:00 2001 From: Rushabh Mehta Date: Thu, 25 Aug 2011 11:38:02 +0530 Subject: [PATCH 05/22] removed msgprint --- js/form.compressed.js | 2 +- js/widgets/form/grid.js | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/js/form.compressed.js b/js/form.compressed.js index d4c58ca5b8..c6f5ff2489 100644 --- a/js/form.compressed.js +++ b/js/form.compressed.js @@ -263,7 +263,7 @@ _f.Grid.prototype.show=function(){if(this.can_add_rows){$ds(this.tbar_div);}else $ds(this.wrapper);} _f.Grid.prototype.hide=function(){$dh(this.wrapper);$dh(this.tbar_div);} _f.Grid.prototype.insert_column=function(doctype,fieldname,fieldtype,label,width,options,perm,reqd){var idx=this.head_row.cells.length;if(!width)width='140px';if((width+'').slice(-2)!='px'){width=width+'px';} -msgprint(width);var col=this.head_row.insertCell(idx);col.doctype=doctype;col.fieldname=fieldname;col.fieldtype=fieldtype;col.innerHTML='
'+label+'
';col.label=label;if(reqd) +var col=this.head_row.insertCell(idx);col.doctype=doctype;col.fieldname=fieldname;col.fieldtype=fieldtype;col.innerHTML='
'+label+'
';col.label=label;if(reqd) col.childNodes[0].style.color="#D22";col.style.width=width;col.options=options;col.perm=perm;this.col_idx_by_name[fieldname]=idx;} _f.Grid.prototype.reset_table_width=function(){var w=0;for(var i=0,len=this.head_row.cells.length;i Date: Thu, 25 Aug 2011 15:08:52 +0530 Subject: [PATCH 06/22] feature to restrict ip, by hours, cleanup of profile doctype --- cgi-bin/core/doctype/profile/profile.txt | 526 ++++-------------- .../search_criteria/search_criteria.txt | 74 +-- cgi-bin/webnotes/auth.py | 41 +- cgi-bin/webnotes/db.py | 11 +- cgi-bin/webnotes/model/utils.py | 24 +- js/widgets/report_builder/report_builder.js | 8 +- 6 files changed, 178 insertions(+), 506 deletions(-) diff --git a/cgi-bin/core/doctype/profile/profile.txt b/cgi-bin/core/doctype/profile/profile.txt index fd5cd3722f..3f117123e0 100644 --- a/cgi-bin/core/doctype/profile/profile.txt +++ b/cgi-bin/core/doctype/profile/profile.txt @@ -5,25 +5,22 @@ { 'creation': '2009-05-12 11:19:11', 'docstatus': 0, - 'modified': '2010-12-21 11:07:20', - 'modified_by': 'sneha@webnotestech.com', + 'modified': '2011-08-25 14:02:26', + 'modified_by': 'Administrator', 'owner': 'Administrator' }, # These values are common for all DocType { - '_last_update': '1303708853', + '_last_update': '1311340897', 'allow_attach': 1, 'allow_copy': 0, 'allow_email': 0, 'allow_print': 0, - 'client_script': 'cur_frm.cscript[\'Change Password\']= function(doc, cdt, cdn) {\n var error = false;\n if ((!doc.new_password)||(!doc.retype_new_password)){\n alert("Both fields are required!");\n error = true;\n }\n if (doc.new_password.length<4) {\n alert("Password must be atleast 4 characters long");\n error = true;\n }\n if(doc.new_password!=doc.retype_new_password) {\n alert("Passwords must match");\n error = true;\n }\n if(!/[A-Z]/.test(doc.new_password) || !/[0-9]/.test(doc.new_password) || !/[\\W_]/.test(doc.new_password)) {\n msgprint(\'New password must contain atleast 1 capital letter, 1 numeric and 1 special character.\');\n error = true;\n doc.new_password = \'\';\n refresh_field(\'new_password\');\n }\n if(!error) {\n cur_frm.runscript(\'update_password\', \'\', function(r,t) {\n\tdoc.new_password = \'\';\n\tdoc.retype_new_password = \'\';\n refresh_many([\'new_password\',\'retype_new_password\']);\n });\n }\n}\n\ncur_frm.cscript.validate = function(doc, cdt, cdn) {\n doc.new_password = \'\';\n doc.retype_new_password = \'\';\n}', 'colour': 'White:FFF', 'doctype': 'DocType', 'hide_heading': 0, 'hide_toolbar': 0, - 'idx': 0, - 'in_create': 1, 'issingle': 0, 'istable': 0, 'max_attachments': 1, @@ -31,10 +28,9 @@ 'name': '__common__', 'print_outline': 'Yes', 'read_only': 0, - 'section_style': 'Tray', - 'server_code_error': ' ', + 'search_fields': 'first_name, last_name', 'show_in_menu': 0, - 'version': 25 + 'version': 32 }, # These values are common for all DocField @@ -69,7 +65,6 @@ 'create': 1, 'doctype': 'DocPerm', 'execute': 0, - 'idx': 1, 'permlevel': 0, 'role': 'Administrator', 'submit': 0 @@ -79,7 +74,6 @@ { 'create': 1, 'doctype': 'DocPerm', - 'idx': 2, 'permlevel': 0, 'role': 'System Manager' }, @@ -87,7 +81,6 @@ # DocPerm { 'doctype': 'DocPerm', - 'idx': 3, 'permlevel': 1, 'role': 'Administrator' }, @@ -95,7 +88,6 @@ # DocPerm { 'doctype': 'DocPerm', - 'idx': 4, 'match': 'owner', 'permlevel': 0, 'role': 'All' @@ -103,85 +95,52 @@ # DocField { + 'default': '1', 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'hidden': 0, - 'idx': 1, - 'label': 'Details', - 'oldfieldtype': 'Section Break', - 'permlevel': 0, - 'reqd': 0, - 'search_index': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldtype': 'Column Break', - 'hidden': 0, - 'idx': 2, - 'label': 'Picture', - 'oldfieldtype': 'Column Break', - 'permlevel': 0, - 'reqd': 0, - 'search_index': 0, - 'width': '50%' - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldtype': 'Image', - 'idx': 3, - 'label': 'Profile Picture', - 'oldfieldtype': 'Image', - 'permlevel': 0 + 'fieldname': 'enabled', + 'fieldtype': 'Check', + 'label': 'Enabled', + 'oldfieldname': 'enabled', + 'oldfieldtype': 'Check', + 'permlevel': 1 }, # DocField { 'doctype': 'DocField', - 'fieldtype': 'Column Break', - 'idx': 4, - 'label': 'Contact', - 'oldfieldtype': 'Column Break', - 'permlevel': 0, - 'width': '50%' + 'fieldname': 'password', + 'fieldtype': 'Password', + 'label': 'Password', + 'permlevel': 1, + 'hidden': 1 }, - + # DocField { - 'default': '1', 'doctype': 'DocField', - 'fieldname': 'enabled', + 'fieldname': 'registered', 'fieldtype': 'Check', - 'idx': 5, - 'label': 'Enabled', - 'oldfieldname': 'enabled', - 'oldfieldtype': 'Check', - 'permlevel': 1 + 'label': 'Registered', + 'permlevel': 0, + 'hidden': 1 }, # DocField { - 'default': '1', 'doctype': 'DocField', - 'fieldname': 'send_email_invite', + 'fieldname': 'unsubscribed', 'fieldtype': 'Check', - 'idx': 6, - 'label': 'Send Email Invite', - 'oldfieldname': 'send_email_invite', - 'oldfieldtype': 'Check', - 'permlevel': 1 + 'label': 'Unsubscribed', + 'permlevel': 0, + 'hidden': 1 }, - + # DocField { 'doctype': 'DocField', 'fieldname': 'recent_documents', 'fieldtype': 'Text', 'hidden': 1, - 'idx': 8, 'label': 'Recent Documents', 'oldfieldname': 'recent_documents', 'oldfieldtype': 'Text', @@ -195,7 +154,6 @@ 'doctype': 'DocField', 'fieldname': 'first_name', 'fieldtype': 'Data', - 'idx': 9, 'label': 'First Name', 'oldfieldname': 'first_name', 'oldfieldtype': 'Data', @@ -208,7 +166,6 @@ 'doctype': 'DocField', 'fieldname': 'middle_name', 'fieldtype': 'Data', - 'idx': 10, 'label': 'Middle Name (Optional)', 'oldfieldname': 'middle_name', 'oldfieldtype': 'Data', @@ -220,46 +177,17 @@ 'doctype': 'DocField', 'fieldname': 'last_name', 'fieldtype': 'Data', - 'idx': 11, 'label': 'Last Name', 'oldfieldname': 'last_name', 'oldfieldtype': 'Data', 'permlevel': 0 }, - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'email', - 'fieldtype': 'Data', - 'hidden': 0, - 'idx': 12, - 'label': 'Email', - 'oldfieldname': 'email', - 'oldfieldtype': 'Data', - 'permlevel': 0, - 'reqd': 1, - 'search_index': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'birth_date', - 'fieldtype': 'Date', - 'idx': 13, - 'label': 'Birth Date', - 'oldfieldname': 'birth_date', - 'oldfieldtype': 'Date', - 'permlevel': 0 - }, - # DocField { 'doctype': 'DocField', 'fieldname': 'gender', 'fieldtype': 'Select', - 'idx': 14, 'label': 'Gender', 'oldfieldname': 'gender', 'oldfieldtype': 'Select', @@ -271,51 +199,74 @@ # DocField { 'doctype': 'DocField', - 'fieldname': 'occupation', + 'fieldtype': 'Column Break', + 'oldfieldtype': 'Column Break', + 'permlevel': 1, + 'width': '50%' + }, + + # DocField + { + 'doctype': 'DocField', + 'fieldname': 'email', 'fieldtype': 'Data', - 'idx': 15, - 'label': 'Designation', - 'oldfieldname': 'occupation', + 'hidden': 0, + 'label': 'Email', + 'oldfieldname': 'email', 'oldfieldtype': 'Data', 'permlevel': 0, + 'reqd': 1, 'search_index': 0 }, - + # DocField { 'doctype': 'DocField', 'fieldname': 'bio', 'fieldtype': 'Text', - 'idx': 16, 'label': 'Bio', 'oldfieldname': 'bio', 'oldfieldtype': 'Text', 'permlevel': 0, - 'search_index': 0 + 'search_index': 0, + 'hidden': 1 }, + # DocField { 'doctype': 'DocField', 'fieldname': 'interests', 'fieldtype': 'Text', - 'idx': 17, 'label': 'Interests', 'oldfieldname': 'interests', 'oldfieldtype': 'Text', + 'permlevel': 0, + 'hidden': 1 + }, + + # DocField + { + 'doctype': 'DocField', + 'fieldname': 'birth_date', + 'fieldtype': 'Date', + 'label': 'Birth Date', + 'oldfieldname': 'birth_date', + 'oldfieldtype': 'Date', 'permlevel': 0 }, + # DocField { 'doctype': 'DocField', 'fieldname': 'activities', 'fieldtype': 'Text', - 'idx': 18, 'label': 'Activities', 'oldfieldname': 'activities', 'oldfieldtype': 'Text', - 'permlevel': 0 + 'permlevel': 0, + 'hidden': 1 }, # DocField @@ -323,7 +274,6 @@ 'doctype': 'DocField', 'fieldname': 'messanger_status', 'fieldtype': 'Data', - 'idx': 19, 'label': 'Messanger Status', 'oldfieldname': 'messanger_status', 'oldfieldtype': 'Data', @@ -331,60 +281,11 @@ 'search_index': 0 }, - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'home_phone', - 'fieldtype': 'Data', - 'idx': 20, - 'label': 'Home Phone', - 'oldfieldname': 'home_phone', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'office_phone', - 'fieldtype': 'Data', - 'idx': 21, - 'label': 'Office Phone', - 'oldfieldname': 'office_phone', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'extension', - 'fieldtype': 'Data', - 'idx': 22, - 'label': 'Extension', - 'oldfieldname': 'extension', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'cell_no', - 'fieldtype': 'Data', - 'idx': 23, - 'label': 'Cell No', - 'oldfieldname': 'cell_no', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - # DocField { 'doctype': 'DocField', 'fieldname': 'user_type', 'fieldtype': 'Select', - 'idx': 24, 'label': 'User Type', 'oldfieldname': 'user_type', 'oldfieldtype': 'Select', @@ -392,150 +293,27 @@ 'permlevel': 0 }, - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'last_login', - 'fieldtype': 'Read Only', - 'hidden': 0, - 'idx': 25, - 'label': 'Last Login', - 'oldfieldname': 'last_login', - 'oldfieldtype': 'Read Only', - 'permlevel': 0, - 'reqd': 0, - 'search_index': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'last_ip', - 'fieldtype': 'Read Only', - 'idx': 26, - 'label': 'Last IP', - 'oldfieldname': 'last_ip', - 'oldfieldtype': 'Read Only', - 'permlevel': 0 - }, # DocField { 'doctype': 'DocField', 'fieldtype': 'Section Break', - 'idx': 27, - 'label': 'Address', - 'oldfieldtype': 'Section Break', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'line_1', - 'fieldtype': 'Data', - 'idx': 28, - 'label': 'Line 1', - 'oldfieldname': 'line_1', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'line_2', - 'fieldtype': 'Data', - 'idx': 29, - 'label': 'Line 2', - 'oldfieldname': 'line_2', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'city', - 'fieldtype': 'Data', - 'idx': 30, - 'label': 'City / Town', - 'oldfieldname': 'city', - 'oldfieldtype': 'Data', - 'permlevel': 0, - 'reqd': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'district', - 'fieldtype': 'Data', - 'idx': 31, - 'label': 'District', - 'oldfieldname': 'district', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'state', - 'fieldtype': 'Data', - 'idx': 32, - 'label': 'State', - 'oldfieldname': 'state', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'country', - 'fieldtype': 'Data', - 'idx': 33, - 'label': 'Country', - 'oldfieldname': 'country', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'pin', - 'fieldtype': 'Data', - 'idx': 34, - 'label': 'Pin', - 'oldfieldname': 'pin', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'idx': 35, - 'label': 'User Role', + 'hidden': 0, 'oldfieldtype': 'Section Break', - 'permlevel': 1 + 'permlevel': 1, + 'reqd': 0, + 'search_index': 0 }, - + # DocField { 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'hidden': 0, - 'idx': 36, + 'fieldtype': 'Column Break', 'label': 'Roles', - 'oldfieldtype': 'Section Break', + 'oldfieldtype': 'Column Break', 'permlevel': 1, - 'reqd': 0, - 'search_index': 0 - }, + 'width': '50%' + }, # DocField { @@ -545,7 +323,6 @@ 'fieldname': 'userroles', 'fieldtype': 'Table', 'hidden': 0, - 'idx': 37, 'label': 'User Roles', 'oldfieldname': 'userroles', 'oldfieldtype': 'Table', @@ -558,10 +335,9 @@ # DocField { 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'idx': 38, + 'fieldtype': 'Column Break', 'label': 'System Defaults', - 'oldfieldtype': 'Section Break', + 'oldfieldtype': 'Column Break', 'permlevel': 1, 'width': '50%' }, @@ -574,7 +350,6 @@ 'fieldname': 'defaults', 'fieldtype': 'Table', 'hidden': 0, - 'idx': 39, 'label': 'Defaults', 'oldfieldname': 'defaults', 'oldfieldtype': 'Table', @@ -588,145 +363,69 @@ { 'doctype': 'DocField', 'fieldtype': 'Section Break', - 'idx': 40, - 'label': 'Password', + 'label': 'Login Details', 'oldfieldtype': 'Section Break', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldtype': 'Column Break', - 'idx': 41, - 'label': 'Change Your Password', - 'oldfieldtype': 'Column Break', - 'permlevel': 1, - 'width': '50%' - }, - - # DocField - { - 'colour': 'Pink:FEF2EA', - 'doctype': 'DocField', - 'fieldname': 'password', - 'fieldtype': 'Data', - 'hidden': 1, - 'idx': 42, - 'label': 'Current Password', - 'oldfieldname': 'password', - 'oldfieldtype': 'Data', - 'permlevel': 1, - 'reqd': 0, - 'search_index': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'new_password', - 'fieldtype': 'Password', - 'idx': 43, - 'label': 'New Password', - 'oldfieldname': 'new_password', - 'oldfieldtype': 'Password', - 'permlevel': 1 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'retype_new_password', - 'fieldtype': 'Password', - 'idx': 44, - 'label': 'Retype New Password', - 'oldfieldname': 'retype_new_password', - 'oldfieldtype': 'Password', - 'permlevel': 1 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'password_last_updated', - 'fieldtype': 'Date', - 'hidden': 1, - 'idx': 45, - 'label': 'Password Last Updated', - 'oldfieldname': 'password_last_updated', - 'oldfieldtype': 'Date', - 'permlevel': 1, - 'print_hide': 1 + 'permlevel': 0, }, # DocField { 'doctype': 'DocField', - 'fieldtype': 'Button', - 'idx': 46, - 'label': 'Change Password', - 'oldfieldtype': 'Button', - 'permlevel': 1, - 'trigger': 'Client', - 'width': '120px' + 'fieldname': 'login_before', + 'fieldtype': 'Int', + 'label': 'Login Before', + 'permlevel': 0 }, # DocField { 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'idx': 47, - 'label': 'Attachment', - 'oldfieldtype': 'Section Break', - 'permlevel': 1 + 'fieldname': 'login_after', + 'fieldtype': 'Int', + 'label': 'Login After', + 'permlevel': 0 }, # DocField { - 'default': '0', 'doctype': 'DocField', - 'fieldname': 'social_points', - 'fieldtype': 'Int', - 'idx': 48, - 'label': 'Social Points', - 'oldfieldname': 'social_points', - 'oldfieldtype': 'Int', + 'fieldname': 'restrict_ip', + 'fieldtype': 'Data', + 'label': 'Restrict IP', 'permlevel': 0 }, # DocField { 'doctype': 'DocField', - 'fieldname': 'social_badge', - 'fieldtype': 'Data', - 'idx': 49, - 'label': 'Social Badge', - 'oldfieldname': 'social_badge', - 'oldfieldtype': 'Data', - 'permlevel': 0 + 'fieldtype': 'Column Break', + 'oldfieldtype': 'Column Break', + 'width': '50%' }, # DocField { 'doctype': 'DocField', - 'fieldname': 'avatar', - 'fieldtype': 'Data', - 'idx': 50, - 'label': 'Avatar', - 'oldfieldname': 'avatar', - 'oldfieldtype': 'Data', - 'permlevel': 0 + 'fieldname': 'last_login', + 'fieldtype': 'Read Only', + 'hidden': 0, + 'label': 'Last Login', + 'oldfieldname': 'last_login', + 'oldfieldtype': 'Read Only', + 'permlevel': 0, + 'reqd': 0, + 'search_index': 0 }, # DocField { 'doctype': 'DocField', - 'fieldtype': 'HTML', - 'idx': 51, - 'label': 'Attachment HTML', - 'oldfieldtype': 'HTML', - 'options': 'First attachment must be the picture', - 'permlevel': 1 + 'fieldname': 'last_ip', + 'fieldtype': 'Read Only', + 'label': 'Last IP', + 'oldfieldname': 'last_ip', + 'oldfieldtype': 'Read Only', + 'permlevel': 0 }, # DocField @@ -735,28 +434,9 @@ 'fieldname': 'file_list', 'fieldtype': 'Text', 'hidden': 1, - 'idx': 52, 'label': 'File List', 'oldfieldname': 'file_list', 'oldfieldtype': 'Text', 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'fiscal_year', - 'fieldtype': 'Select', - 'hidden': 1, - 'idx': 53, - 'in_filter': 1, - 'label': 'Fiscal Year', - 'no_copy': 1, - 'oldfieldname': 'fiscal_year', - 'oldfieldtype': 'Select', - 'options': 'link:Fiscal Year', - 'permlevel': 0, - 'print_hide': 1, - 'report_hide': 1 - } + } ] \ No newline at end of file diff --git a/cgi-bin/core/doctype/search_criteria/search_criteria.txt b/cgi-bin/core/doctype/search_criteria/search_criteria.txt index 0a4f09e55d..a3ca3ae369 100644 --- a/cgi-bin/core/doctype/search_criteria/search_criteria.txt +++ b/cgi-bin/core/doctype/search_criteria/search_criteria.txt @@ -87,10 +87,13 @@ # DocField { 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'idx': 1, - 'label': 'Details', - 'oldfieldtype': 'Section Break', + 'fieldname': 'criteria_name', + 'fieldtype': 'Data', + 'hidden': 0, + 'idx': 5, + 'label': 'Criteria Name', + 'oldfieldname': 'criteria_name', + 'oldfieldtype': 'Data', 'permlevel': 0 }, @@ -135,41 +138,6 @@ 'search_index': 1 }, - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'criteria_name', - 'fieldtype': 'Data', - 'hidden': 0, - 'idx': 5, - 'label': 'Criteria Name', - 'oldfieldname': 'criteria_name', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'description', - 'fieldtype': 'Text', - 'idx': 6, - 'label': 'Description', - 'oldfieldname': 'description', - 'oldfieldtype': 'Text', - 'permlevel': 0, - 'width': '300px' - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'idx': 7, - 'label': 'Query Details', - 'oldfieldtype': 'Section Break', - 'permlevel': 0 - }, # DocField { @@ -189,7 +157,7 @@ 'doctype': 'DocField', 'fieldname': 'filters', 'fieldtype': 'Text', - 'hidden': 0, + 'hidden': 1, 'idx': 9, 'label': 'Filters', 'oldfieldname': 'filters', @@ -288,32 +256,6 @@ 'permlevel': 0 }, - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'graph_series', - 'fieldtype': 'Data', - 'hidden': 0, - 'idx': 17, - 'label': 'Graph Series', - 'oldfieldname': 'graph_series', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'graph_values', - 'fieldtype': 'Data', - 'hidden': 0, - 'idx': 18, - 'label': 'Graph Values', - 'oldfieldname': 'graph_values', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - # DocField { 'doctype': 'DocField', diff --git a/cgi-bin/webnotes/auth.py b/cgi-bin/webnotes/auth.py index a3f04a1a1b..6858151560 100644 --- a/cgi-bin/webnotes/auth.py +++ b/cgi-bin/webnotes/auth.py @@ -127,8 +127,9 @@ class LoginManager: # --------------------------- def post_login(self): - self.validate_ip_address() self.run_trigger() + self.validate_ip_address() + self.validate_hour() # check password # -------------- @@ -186,16 +187,38 @@ class LoginManager: # ------------- def validate_ip_address(self): - try: - ip = webnotes.conn.sql("select ip_address from tabProfile where name = '%s'" % self.user)[0][0] or '' - except: return + ip_list = webnotes.conn.get_value('Profile', self.user, 'restrict_ip', ignore=True) + + if not ip_list: + return + + ip_list = ip_list.replace(",", "\n").split('\n') + ip_list = [i.strip() for i in ip_list] + + for ip in ip_list: + if webnotes.remote_ip.startswith(ip): + return - ip = ip.replace(",", "\n").split('\n') - ip = [i.strip() for i in ip] + webnotes.msgprint('Not allowed from this IP Address', raise_exception=1) + + def validate_hour(self): + """ + check if user is logging in during restricted hours + """ + login_before = int(webnotes.conn.get_value('Profile', self.user, 'login_before', ignore=True) or 0) + login_after = int(webnotes.conn.get_value('Profile', self.user, 'login_after', ignore=True) or 0) + + if not (login_before or login_after): + return - if ret and ip: - if not (webnotes.remote_ip.startswith(ip[0]) or (webnotes.remote_ip in ip)): - raise Exception, 'Not allowed from this IP Address' + from webnotes.utils import now_datetime + current_hour = int(now_datetime().strftime('%H')) + + if login_before and current_hour > login_before: + webnotes.msgprint('Not allowed to login after restricted hour', raise_exception=1) + + if login_after and current_hour < login_after: + webnotes.msgprint('Not allowed to login before restricted hour', raise_exception=1) # login as guest # -------------- diff --git a/cgi-bin/webnotes/db.py b/cgi-bin/webnotes/db.py index 45837a2051..f288714d47 100644 --- a/cgi-bin/webnotes/db.py +++ b/cgi-bin/webnotes/db.py @@ -255,7 +255,7 @@ class Database: # ====================================================================================== # get a single value from a record - def get_value(self, doctype, docname, fieldname): + def get_value(self, doctype, docname, fieldname, ignore=None): """ Get a single / multiple value from a record. @@ -266,8 +266,13 @@ class Database: if docname and (docname!=doctype or docname=='DocType'): if type(fieldname) in (list, tuple): fl = '`, `'.join(fieldname) - - r = self.sql("select `%s` from `tab%s` where name='%s'" % (fl, doctype, docname)) + try: + r = self.sql("select `%s` from `tab%s` where name='%s'" % (fl, doctype, docname)) + except Exception, e: + if e.args[0]==1054 and ignore: + return None + else: + raise e return r and (len(r[0]) > 1 and r[0] or r[0][0]) or None else: if type(fieldname) in (list, tuple): diff --git a/cgi-bin/webnotes/model/utils.py b/cgi-bin/webnotes/model/utils.py index b9892067d3..b8c45507f0 100644 --- a/cgi-bin/webnotes/model/utils.py +++ b/cgi-bin/webnotes/model/utils.py @@ -186,9 +186,11 @@ def commonify_doclist(doclist, with_comments=1): c[k] = doclist[0][k] return c - def strip_common(d): + def strip_common_and_idx(d): for k in common_keys: if k in d: del d[k] + + if 'idx' in d: del d['idx'] return d def make_common_dicts(doclist): @@ -211,7 +213,7 @@ def commonify_doclist(doclist, with_comments=1): # make docs final = [] for d in doclist: - f = strip_common(get_diff_dict(common_dict[d['doctype']], d)) + f = strip_common_and_idx(get_diff_dict(common_dict[d['doctype']], d)) f['doctype'] = d['doctype'] # keep doctype! # strip name for child records (only an auto generated number!) @@ -228,7 +230,7 @@ def commonify_doclist(doclist, with_comments=1): d['name']='__common__' if with_comments: d['##comment'] = 'These values are common for all ' + d['doctype'] - commons.append(strip_common(d)) + commons.append(strip_common_and_idx(d)) common_values = make_common(doclist) return [common_values]+commons+final @@ -237,18 +239,32 @@ def uncommonify_doclist(dl): """ Expands an commonified doclist """ + # first one has common values common_values = dl[0] common_dict = {} final = [] + idx_dict = {} for d in dl[1:]: if 'name' in d and d['name']=='__common__': + # common for a doctype - del d['name'] common_dict[d['doctype']] = d else: + dt = d['doctype'] + if not dt in idx_dict: idx_dict[dt] = 0; d1 = common_values.copy() - d1.update(common_dict[d['doctype']]) + + # update from common and global + d1.update(common_dict[dt]) d1.update(d) + + # idx by sequence + d1['idx'] = idx_dict[dt] + + # increment idx + idx_dict[dt] += 1 + final.append(d1) return final diff --git a/js/widgets/report_builder/report_builder.js b/js/widgets/report_builder/report_builder.js index f10144bb25..379c8c6cbd 100644 --- a/js/widgets/report_builder/report_builder.js +++ b/js/widgets/report_builder/report_builder.js @@ -507,7 +507,13 @@ _r.ReportBuilder.prototype.set_sort_options = function(l) { empty_select(this.dt.sort_sel); - if(l) sl = add_lists(l, this.orig_sort_list) + if(l) sl = add_lists(l, this.orig_sort_list); + + // no sorts, add one + if(!l.length) { + l.push(['ID', 'name']) + } + for(var i=0; i Date: Thu, 25 Aug 2011 16:10:50 +0530 Subject: [PATCH 07/22] trigger form_refresh --- js/form.compressed.js | 3 ++- js/widgets/form/form.js | 5 +++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/js/form.compressed.js b/js/form.compressed.js index c6f5ff2489..f4a7be5b96 100644 --- a/js/form.compressed.js +++ b/js/form.compressed.js @@ -121,7 +121,8 @@ if(this.docname){if(!this.check_doc_perm())return;if(!this.setup_done)this.setup if(this.doc.__islocal) this.is_editable[this.docname]=1;this.editable=this.is_editable[this.docname];if(!this.doc.__archived&&(this.editable||(!this.editable&&this.meta.istable))){if(this.print_wrapper){$dh(this.print_wrapper);$ds(this.page_layout.wrapper);} if(!this.meta.istable){this.refresh_header();this.sidebar&&this.sidebar.refresh();} -this.runclientscript('refresh');this.refresh_tabs();this.refresh_fields();this.refresh_dependency();this.refresh_footer();if(this.layout)this.layout.show();if(is_onload) +this.runclientscript('refresh');$(document).trigger('form_refresh') +this.refresh_tabs();this.refresh_fields();this.refresh_dependency();this.refresh_footer();if(this.layout)this.layout.show();if(is_onload) this.runclientscript('onload_post_render',this.doctype,this.docname);}else{this.refresh_header();if(this.print_wrapper){this.refresh_print_layout();} this.runclientscript('edit_status_changed');} if(!this.display)this.show_the_frm();if(!this.meta.in_dialog)page_body.change_to('Forms');}} diff --git a/js/widgets/form/form.js b/js/widgets/form/form.js index 0202f92e03..fd6034e83c 100644 --- a/js/widgets/form/form.js +++ b/js/widgets/form/form.js @@ -622,6 +622,11 @@ _f.Frm.prototype.refresh = function(docname) { // call trigger this.runclientscript('refresh'); + // trigger global trigger + // to use this + // $(docuemnt).bind('form_refresh', function() { }) + $(document).trigger('form_refresh') + // tabs this.refresh_tabs(); From c1a0b2323bcfaf2beb934e60d253a6c41b72d798 Mon Sep 17 00:00:00 2001 From: Nabin Hait Date: Thu, 25 Aug 2011 16:46:59 +0530 Subject: [PATCH 08/22] idx converted to integer --- cgi-bin/webnotes/model/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cgi-bin/webnotes/model/utils.py b/cgi-bin/webnotes/model/utils.py index b9892067d3..07504093ca 100644 --- a/cgi-bin/webnotes/model/utils.py +++ b/cgi-bin/webnotes/model/utils.py @@ -74,13 +74,13 @@ def getlist(doclist, field): if d.type=='Phone': pl.append(d) """ - + from webnotes.utils import cint l = [] for d in doclist: if d.parent and (not d.parent.lower().startswith('old_parent:')) and d.parentfield == field: l.append(d) - l.sort(lambda a, b: a.idx - b.idx) + l.sort(lambda a, b: cint(a.idx) - cint(b.idx)) return l From eaea1d53898d25015d4cdd90d9b75c738dbfa84e Mon Sep 17 00:00:00 2001 From: Rushabh Mehta Date: Thu, 25 Aug 2011 19:17:20 +0530 Subject: [PATCH 09/22] minor fixes --- cgi-bin/webnotes/utils/__init__.py | 8 ++++++-- js/webpage/page.js | 2 +- js/wnf.compressed.js | 2 +- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/cgi-bin/webnotes/utils/__init__.py b/cgi-bin/webnotes/utils/__init__.py index 1376ed7d41..6e342f1802 100644 --- a/cgi-bin/webnotes/utils/__init__.py +++ b/cgi-bin/webnotes/utils/__init__.py @@ -119,7 +119,7 @@ def getdate(string_date): else: return '' -def add_days(date, days): +def add_days(date, days, format='string'): """ Adds `days` to the given `string_date` """ @@ -130,7 +130,11 @@ def add_days(date, days): if type(date) not in (datetime.datetime, datetime.date): date = getdate(date) - return (date + datetime.timedelta(days)).strftime('%Y-%m-%d') + dt = date + datetime.timedelta(days) + if format=='string': + return dt.strftime('%Y-%m-%d') + else: + return dt def add_months(string_date, months): import datetime diff --git a/js/webpage/page.js b/js/webpage/page.js index 9a54138b01..836bc74aac 100644 --- a/js/webpage/page.js +++ b/js/webpage/page.js @@ -56,7 +56,7 @@ function render_page(page_name, menuitem) { p.doc = pdoc; if(script) { - try { eval(script); } catch(e) { submit_error(e); } + eval(script); } // change diff --git a/js/wnf.compressed.js b/js/wnf.compressed.js index 2848ab2bd2..86fc63b145 100644 --- a/js/wnf.compressed.js +++ b/js/wnf.compressed.js @@ -1130,7 +1130,7 @@ pages['_home']=this;return this;} function render_page(page_name,menuitem){if(!page_name)return;if((!locals['Page'])||(!locals['Page'][page_name])){loadpage('_home');return;} var pdoc=locals['Page'][page_name];if(pdoc.style)set_style(pdoc.style) if(pdoc.stylesheet){set_style(locals.Stylesheet[pdoc.stylesheet].stylesheet);stylesheets.push(pdoc.stylesheet);} -var p=new Page(page_name,pdoc._Page__content?pdoc._Page__content:pdoc.content);var script=pdoc.__script?pdoc.__script:pdoc.script;p.doc=pdoc;if(script){try{eval(script);}catch(e){submit_error(e);}} +var p=new Page(page_name,pdoc._Page__content?pdoc._Page__content:pdoc.content);var script=pdoc.__script?pdoc.__script:pdoc.script;p.doc=pdoc;if(script){eval(script);} page_body.change_to(page_name);try{if(pscript['onload_'+page_name])pscript['onload_'+page_name]();}catch(e){submit_error(e);} return p;} function refresh_page(page_name){var fn=function(r,rt){render_page(page_name)} From 6b9022140ce91703fdf800bc7499264f83d69486 Mon Sep 17 00:00:00 2001 From: Nabin Hait Date: Thu, 25 Aug 2011 19:42:35 +0530 Subject: [PATCH 10/22] dt mapper: flds with same name will map automatically --- .../doctype/doctype_mapper/doctype_mapper.py | 482 +++++++++--------- 1 file changed, 255 insertions(+), 227 deletions(-) diff --git a/cgi-bin/core/doctype/doctype_mapper/doctype_mapper.py b/cgi-bin/core/doctype/doctype_mapper/doctype_mapper.py index 7fb0f61766..1e64c6c4e3 100644 --- a/cgi-bin/core/doctype/doctype_mapper/doctype_mapper.py +++ b/cgi-bin/core/doctype/doctype_mapper/doctype_mapper.py @@ -18,230 +18,258 @@ convert_to_lists = webnotes.conn.convert_to_lists class DocType: - def __init__(self, doc, doclist=[]): - self.doc = doc - self.doclist = doclist - self.prefix = is_testing and 'test' or 'tab' - self.ref_doc = '' - # Autoname - #--------- - def autoname(self): - self.doc.name = make_autoname(self.doc.from_doctype + '-' + self.doc.to_doctype) - - # Map Custom Fields - # ------------------ - def map_custom_fields(self, from_doctype, to_doctype, from_doc, to_doc): - fld_list = [] - for d in sql("select fieldname from `tabCustom Field` where dt = %s and docstatus != 2",from_doctype): - if sql("select fieldname from `tabCustom Field` where dt = %s and fieldname = %s and docstatus != 2",(to_doctype, d[0])): - fld_list.append([d[0], d[0]]) - self.set_value(fld_list, from_doc, to_doc) - - # Maps the fields in 'To DocType' - #-------------------------------- - def dt_map(self, from_doctype, to_doctype, from_docname, to_doc, doclist, from_to_list = '[]'): - - # definition of arguments - ''' - String : contains the name of DocType initiating the function - String : contains the name of DocType created by the function - String : contains ID(name) of 'from_doctype' - String : contains doc of 'to_doctype' - String : contains doclist of 'to_doctype' - String : contains list of tables which will be mapped - ''' - # Validate reference doc docstatus - self.ref_doc = from_docname - self.check_ref_docstatus() - - if not doclist: - doclist.append(to_doc) - - tbl_list = sql("select from_table, to_table, from_field, to_field, match_id, validation_logic from `tabTable Mapper Detail` where parent ='%s' order by match_id" % (from_doctype + "-" + to_doctype)) - - for t in tbl_list: - from_table_name = t[0] - to_table_name = t[1] - from_table_fname = t[2] - to_table_fname = t[3] - match_id = t[4] - validation_logic = t[5] - - - from_to = [from_table_name, to_table_name] - - if from_to in eval(from_to_list): - fld_list = sql("select from_field, to_field from `tabField Mapper Detail` where parent = '%s' and match_id = %s and map = 'Yes'" % (from_doctype + "-" + to_doctype, match_id)) - if fld_list: - if not from_docname: - msgprint(from_doctype + " not selected for mapping") - raise Exception - - # Parent to parent mapping - if from_table_name == self.doc.from_doctype and to_table_name == self.doc.to_doctype: - - # Check validation - nm = sql("select name from `tab%s` where name = '%s' and %s" % (from_doctype, from_docname, validation_logic)) - nm = nm and nm[0][0] or '' - - # If validation failed raise exception - if not nm: - msgprint("Validation failed in doctype mapper. Please contact Administrator.") - raise Exception - - from_doc = Document(from_doctype, nm) - # Maps field in parent - self.set_value(fld_list, from_doc, to_doc) - # Map custom fields - self.map_custom_fields(from_doctype, to_doctype, from_doc, to_doc) - - # Parent to child OR child to child mapping - else: - dnlist = () - if from_table_name == self.doc.from_doctype: - dnlist = ((from_docname,),) - else: - dnlist = sql("select name from `tab%s` where parent='%s' and parenttype = '%s' and %s order by idx" % (from_table_name, from_docname, self.doc.from_doctype, validation_logic)) - - for dn in dnlist: - # Add a row in target table in 'To DocType' and returns obj - ch = addchild(to_doc, t[3], t[1], 1, doclist) - # Creates object for 'From DocType', it can be parent or child - d = Document(t[0], dn[0]) - # Map values - self.set_value(fld_list, d, ch) - # Map custom fields - self.map_custom_fields(from_table_name, t[1], d, ch) - - # Required when called from server side for refreshing table - return doclist - - # Assigns value to "To Doctype" - #------------------------------ - def set_value(self, fld_list, obj, to_doc): - for f in fld_list: - if f[0].startswith('eval:'): - to_doc.fields[f[1]] = eval(f[0][5:]) - else: - to_doc.fields[f[1]] = obj.fields.get(f[0]) - - # Validate - #--------- - def validate(self): - for d in getlist(self.doclist, 'field_mapper_details'): - # Automatically assigns default value if not entered - if not d.match_id: - d.match_id = 0 - if not d.map: - d.map = 'Yes' - for d in getlist(self.doclist, 'table_mapper_details'): - if not d.reference_doctype_key: - d.reference_doctype_key = '' - if not d.reference_key: - d.reference_key = '' - - # Check wrong field name - self.check_fields_in_dt() - - # Check if any wrong fieldname entered - #-------------------------------------- - def check_fields_in_dt(self): - for d in getlist(self.doclist, 'field_mapper_details'): - table_name = sql("select from_table, to_table from `tabTable Mapper Detail` where parent ='%s' and match_id = '%s'" % (self.doc.name, d.match_id)) - - if table_name: - exists1 = sql("select name from tabDocField where parent = '%s' and fieldname = '%s'" % (table_name[0][0], d.from_field)) - exists2 = sql("select name from tabDocField where parent = '%s' and fieldname = '%s'" % (table_name[0][1], d.to_field)) - - # Default fields like name, parent, owner does not exists in DocField - if not exists1 and d.from_field not in default_fields: - msgprint('"' + cstr(d.from_field) + '" does not exists in DocType "' + cstr(table_name[0][0]) + '"') - if not exists2 and d.to_field not in default_fields: - msgprint('"' + cstr(d.to_field) + '" does not exists in DocType "' + cstr(table_name[0][1]) + '"') - - # Check consistency of value with reference document - #--------------------------------------------------- - def validate_reference_value(self, obj, to_docname): - for t in getlist(self.doclist, 'table_mapper_details'): - # Reference key is the fieldname which will relate to the from_table - if t.reference_doctype_key: - for d in getlist(obj.doclist, t.to_field): - if d.fields[t.reference_doctype_key] == self.doc.from_doctype: - self.check_consistency(obj.doc, d, to_docname) - self.check_ref_docstatus() - - # Make list of fields whose value will be consistent with prevdoc - #----------------------------------------------------------------- - def get_checklist(self): - checklist = [] - for f in getlist(self.doclist, 'field_mapper_details'): - - # Check which field's value will be compared - if f.checking_operator: - checklist.append([f.from_field, f.to_field, f.checking_operator, f.match_id]) - return checklist - - def check_fld_type(self, tbl, fld, cur_val): - ft = sql("select fieldtype from tabDocField where fieldname = '%s' and parent = '%s'" % (fld,tbl)) - ft = ft and ft[0][0] or '' - if ft == 'Currency' or ft == 'Float': - cur_val = '%.2f' % cur_val - return cur_val, ft - - # Check consistency - #------------------- - def check_consistency(self, par_obj, child_obj, to_docname): - checklist = self.get_checklist() - self.ref_doc = '' - for t in getlist(self.doclist, 'table_mapper_details'): - if t.reference_key and child_obj.fields[t.reference_key]: - for cl in checklist: - if cl[3] == t.match_id: - if t.to_field: - cur_val = child_obj.fields[cl[1]] - else: - cur_val = par_obj.fields[cl[1]] - - ft = self.check_fld_type(t.to_table, cl[1], cur_val) - cur_val = ft[0] - - if cl[2] == '=' and (ft[1] == 'Currency' or ft[1] == 'Float'): - consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' - %s <= 0.5" % (cl[0], t.from_table, child_obj.fields[t.reference_key], flt(cur_val), cl[0])) - else: - #consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' %s %s" % (cl[0], t.from_table, child_obj.fields[t.reference_key], cur_val, cl[2], cl[0])) - consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' %s ifnull(%s, '')" % (cl[0], t.from_table, child_obj.fields[t.reference_key], ft[1] in ('Currency', 'Float', 'Int') and flt(cur_val) or cstr(cur_val), cl[2], cl[0])) - - if not self.ref_doc: - det = sql("select name, parent from `tab%s` where name = '%s'" % (t.from_table, child_obj.fields[t.reference_key])) - self.ref_doc = det[0][1] and det[0][1] or det[0][0] - - if not consistent: - self.give_message(t.from_table, t.to_table, cl[0], cl[1], child_obj.fields[t.reference_key], cl[2]) - - # Gives message and raise exception - #----------------------------------- - def give_message(self, from_table, to_table, from_field, to_field, ref_value, operator): - # Select label of the field - to_fld_label = sql("select label from tabDocField where parent = '%s' and fieldname = '%s'" % (to_table, to_field)) - from_fld_label = sql("select label from tabDocField where parent = '%s' and fieldname = '%s'" % (from_table, from_field)) - - op_in_words = {'=':'equal to ', '>=':'greater than equal to ', '>':'greater than ', '<=':'less than equal to ', '<':'less than '} - msgprint(to_fld_label[0][0] + " should be " + op_in_words[operator] + from_fld_label[0][0] + " of " + self.doc.from_doctype + ": " + self.ref_doc) - raise Exception, "Validation Error." - - def check_ref_docstatus(self): - if self.ref_doc: - det = sql("select name, docstatus from `tab%s` where name = '%s'" % (self.doc.from_doctype, self.ref_doc)) - if not det: - msgprint(self.doc.from_doctype + ": " + self.ref_doc + " does not exists in the system") - raise Exception, "Validation Error." - elif self.doc.ref_doc_submitted and det[0][1] != 1: - msgprint(self.doc.from_doctype + ": " + self.ref_doc + " is not Submitted Document.") - raise Exception, "Validation Error." - - def on_update(self): - import webnotes.defs - if hasattr(webnotes.defs, 'developer_mode') and webnotes.defs.developer_mode: - from webnotes.modules.export_module import export_to_files - export_to_files(record_list=[[self.doc.doctype, self.doc.name]]) - + def __init__(self, doc, doclist=[]): + self.doc = doc + self.doclist = doclist + self.prefix = is_testing and 'test' or 'tab' + self.ref_doc = '' + # Autoname + #--------- + def autoname(self): + self.doc.name = make_autoname(self.doc.from_doctype + '-' + self.doc.to_doctype) + + # Map Custom Fields + # ------------------ + def map_custom_fields(self, from_doctype, to_doctype, from_doc, to_doc): + fld_list = [] + for d in sql("select fieldname from `tabCustom Field` where dt = %s and docstatus != 2",from_doctype): + if sql("select fieldname from `tabCustom Field` where dt = %s and fieldname = %s and docstatus != 2",(to_doctype, d[0])): + fld_list.append([d[0], d[0], 'Yes']) + self.set_value(fld_list, from_doc, to_doc) + + def map_fields_with_same_name(self, from_doctype, to_doctype, from_doc, to_doc, fld_list): + """ + Returns field list with same name in from and to doctype + """ + exception_flds = [f[0] for f in fld_list if f[2] == 'No'] + + map_fld_list = [ + [d[0], d[0], 'Yes'] for d in sql(""" + select t1.fieldname + from `tabDocField` t1, `tabDocField` t2 + where t1.parent = %s and t2.parent = %s + and t1.fieldname = t2.fieldname + and t1.docstatus != 2 and t2.docstatus != 2 + and ifnull(t1.fieldname, '') != '' + """,(from_doctype, to_doctype)) if d[0] not in exception_flds + ] + + self.set_value(map_fld_list, from_doc, to_doc) + + + # Maps the fields in 'To DocType' + #-------------------------------- + def dt_map(self, from_doctype, to_doctype, from_docname, to_doc, doclist, from_to_list = '[]'): + + # definition of arguments + ''' + String : contains the name of DocType initiating the function + String : contains the name of DocType created by the function + String : contains ID(name) of 'from_doctype' + String : contains doc of 'to_doctype' + String : contains doclist of 'to_doctype' + String : contains list of tables which will be mapped + ''' + # Validate reference doc docstatus + self.ref_doc = from_docname + self.check_ref_docstatus() + + if not doclist: + doclist.append(to_doc) + + tbl_list = sql("select from_table, to_table, from_field, to_field, match_id, validation_logic from `tabTable Mapper Detail` where parent ='%s' order by match_id" % (from_doctype + "-" + to_doctype)) + + for t in tbl_list: + from_table_name = t[0] + to_table_name = t[1] + from_table_fname = t[2] + to_table_fname = t[3] + match_id = t[4] + validation_logic = t[5] + + + from_to = [from_table_name, to_table_name] + + if from_to in eval(from_to_list): + fld_list = sql("select from_field, to_field, map from `tabField Mapper Detail` where parent = '%s' and match_id = %s" % (from_doctype + "-" + to_doctype, match_id)) + if fld_list: + if not from_docname: + msgprint(from_doctype + " not selected for mapping") + raise Exception + + # Parent to parent mapping + if from_table_name == self.doc.from_doctype and to_table_name == self.doc.to_doctype: + + # Check validation + nm = sql("select name from `tab%s` where name = '%s' and %s" % (from_doctype, from_docname, validation_logic)) + nm = nm and nm[0][0] or '' + + # If validation failed raise exception + if not nm: + msgprint("Validation failed in doctype mapper. Please contact Administrator.") + raise Exception + + from_doc = Document(from_doctype, nm) + + # Map fields with same name + self.map_fields_with_same_name(from_doctype, to_doctype, from_doc, to_doc, fld_list) + # Maps field in parent + self.set_value(fld_list, from_doc, to_doc) + # Map custom fields + self.map_custom_fields(from_doctype, to_doctype, from_doc, to_doc) + + # Parent to child OR child to child mapping + else: + dnlist = () + if from_table_name == self.doc.from_doctype: + dnlist = ((from_docname,),) + else: + dnlist = sql("select name from `tab%s` where parent='%s' and parenttype = '%s' and %s order by idx" % (from_table_name, from_docname, self.doc.from_doctype, validation_logic)) + + for dn in dnlist: + # Add a row in target table in 'To DocType' and returns obj + ch = addchild(to_doc, t[3], t[1], 1, doclist) + # Creates object for 'From DocType', it can be parent or child + d = Document(t[0], dn[0]) + # Map fields with same name + self.map_fields_with_same_name(from_table_name, t[1], d, ch, fld_list) + # Map values + self.set_value(fld_list, d, ch) + # Map custom fields + self.map_custom_fields(from_table_name, t[1], d, ch) + + + # Required when called from server side for refreshing table + return doclist + + # Assigns value to "To Doctype" + #------------------------------ + def set_value(self, fld_list, obj, to_doc): + for f in fld_list: + if f[2] == 'Yes': + if f[0].startswith('eval:'): + to_doc.fields[f[1]] = eval(f[0][5:]) + else: + to_doc.fields[f[1]] = obj.fields.get(f[0]) + + + # Validate + #--------- + def validate(self): + for d in getlist(self.doclist, 'field_mapper_details'): + # Automatically assigns default value if not entered + if not d.match_id: + d.match_id = 0 + if not d.map: + d.map = 'Yes' + for d in getlist(self.doclist, 'table_mapper_details'): + if not d.reference_doctype_key: + d.reference_doctype_key = '' + if not d.reference_key: + d.reference_key = '' + + # Check wrong field name + self.check_fields_in_dt() + + # Check if any wrong fieldname entered + #-------------------------------------- + def check_fields_in_dt(self): + for d in getlist(self.doclist, 'field_mapper_details'): + table_name = sql("select from_table, to_table from `tabTable Mapper Detail` where parent ='%s' and match_id = '%s'" % (self.doc.name, d.match_id)) + + if table_name: + exists1 = sql("select name from tabDocField where parent = '%s' and fieldname = '%s'" % (table_name[0][0], d.from_field)) + exists2 = sql("select name from tabDocField where parent = '%s' and fieldname = '%s'" % (table_name[0][1], d.to_field)) + + # Default fields like name, parent, owner does not exists in DocField + if not exists1 and d.from_field not in default_fields: + msgprint('"' + cstr(d.from_field) + '" does not exists in DocType "' + cstr(table_name[0][0]) + '"') + if not exists2 and d.to_field not in default_fields: + msgprint('"' + cstr(d.to_field) + '" does not exists in DocType "' + cstr(table_name[0][1]) + '"') + + # Check consistency of value with reference document + #--------------------------------------------------- + def validate_reference_value(self, obj, to_docname): + for t in getlist(self.doclist, 'table_mapper_details'): + # Reference key is the fieldname which will relate to the from_table + if t.reference_doctype_key: + for d in getlist(obj.doclist, t.to_field): + if d.fields[t.reference_doctype_key] == self.doc.from_doctype: + self.check_consistency(obj.doc, d, to_docname) + self.check_ref_docstatus() + + # Make list of fields whose value will be consistent with prevdoc + #----------------------------------------------------------------- + def get_checklist(self): + checklist = [] + for f in getlist(self.doclist, 'field_mapper_details'): + + # Check which field's value will be compared + if f.checking_operator: + checklist.append([f.from_field, f.to_field, f.checking_operator, f.match_id]) + return checklist + + def check_fld_type(self, tbl, fld, cur_val): + ft = sql("select fieldtype from tabDocField where fieldname = '%s' and parent = '%s'" % (fld,tbl)) + ft = ft and ft[0][0] or '' + if ft == 'Currency' or ft == 'Float': + cur_val = '%.2f' % cur_val + return cur_val, ft + + # Check consistency + #------------------- + def check_consistency(self, par_obj, child_obj, to_docname): + checklist = self.get_checklist() + self.ref_doc = '' + for t in getlist(self.doclist, 'table_mapper_details'): + if t.reference_key and child_obj.fields[t.reference_key]: + for cl in checklist: + if cl[3] == t.match_id: + if t.to_field: + cur_val = child_obj.fields[cl[1]] + else: + cur_val = par_obj.fields[cl[1]] + + ft = self.check_fld_type(t.to_table, cl[1], cur_val) + cur_val = ft[0] + + if cl[2] == '=' and (ft[1] == 'Currency' or ft[1] == 'Float'): + consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' - %s <= 0.5" % (cl[0], t.from_table, child_obj.fields[t.reference_key], flt(cur_val), cl[0])) + else: + #consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' %s %s" % (cl[0], t.from_table, child_obj.fields[t.reference_key], cur_val, cl[2], cl[0])) + consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' %s ifnull(%s, '')" % (cl[0], t.from_table, child_obj.fields[t.reference_key], ft[1] in ('Currency', 'Float', 'Int') and flt(cur_val) or cstr(cur_val), cl[2], cl[0])) + + if not self.ref_doc: + det = sql("select name, parent from `tab%s` where name = '%s'" % (t.from_table, child_obj.fields[t.reference_key])) + self.ref_doc = det[0][1] and det[0][1] or det[0][0] + + if not consistent: + self.give_message(t.from_table, t.to_table, cl[0], cl[1], child_obj.fields[t.reference_key], cl[2]) + + # Gives message and raise exception + #----------------------------------- + def give_message(self, from_table, to_table, from_field, to_field, ref_value, operator): + # Select label of the field + to_fld_label = sql("select label from tabDocField where parent = '%s' and fieldname = '%s'" % (to_table, to_field)) + from_fld_label = sql("select label from tabDocField where parent = '%s' and fieldname = '%s'" % (from_table, from_field)) + + op_in_words = {'=':'equal to ', '>=':'greater than equal to ', '>':'greater than ', '<=':'less than equal to ', '<':'less than '} + msgprint(to_fld_label[0][0] + " should be " + op_in_words[operator] + from_fld_label[0][0] + " of " + self.doc.from_doctype + ": " + self.ref_doc) + raise Exception, "Validation Error." + + def check_ref_docstatus(self): + if self.ref_doc: + det = sql("select name, docstatus from `tab%s` where name = '%s'" % (self.doc.from_doctype, self.ref_doc)) + if not det: + msgprint(self.doc.from_doctype + ": " + self.ref_doc + " does not exists in the system") + raise Exception, "Validation Error." + elif self.doc.ref_doc_submitted and det[0][1] != 1: + msgprint(self.doc.from_doctype + ": " + self.ref_doc + " is not Submitted Document.") + raise Exception, "Validation Error." + + def on_update(self): + import webnotes.defs + if hasattr(webnotes.defs, 'developer_mode') and webnotes.defs.developer_mode: + from webnotes.modules.export_module import export_to_files + export_to_files(record_list=[[self.doc.doctype, self.doc.name]]) + From 567e4e5b643de026f62b31a03c19b4ac908b236c Mon Sep 17 00:00:00 2001 From: Nabin Hait Date: Mon, 29 Aug 2011 14:31:35 +0530 Subject: [PATCH 11/22] code shifted from custom script to py file --- cgi-bin/core/doctype/event/event.js | 9 +++++++++ cgi-bin/core/doctype/file/file.py | 14 ++++++++++++++ 2 files changed, 23 insertions(+) create mode 100644 cgi-bin/core/doctype/event/event.js create mode 100644 cgi-bin/core/doctype/file/file.py diff --git a/cgi-bin/core/doctype/event/event.js b/cgi-bin/core/doctype/event/event.js new file mode 100644 index 0000000000..dc229bd7b3 --- /dev/null +++ b/cgi-bin/core/doctype/event/event.js @@ -0,0 +1,9 @@ +cur_frm.cscript.onload = function(doc, cdt, cdn) { + var df = get_field('Event', 'Intro HTML', doc.name); + if(doc.ref_type) { + ref = repl(cur_frm.cstring.ref_html, {'dt': doc.ref_type, 'dn':doc.ref_name}); + } else var ref = ''; + + df.options = repl(cur_frm.cstring.intro_html, {'ref': ref}); + refresh_fields('Intro HTML'); +} diff --git a/cgi-bin/core/doctype/file/file.py b/cgi-bin/core/doctype/file/file.py new file mode 100644 index 0000000000..b957ea4c15 --- /dev/null +++ b/cgi-bin/core/doctype/file/file.py @@ -0,0 +1,14 @@ +class DocType: + def __init__(self, d, dl): + self.doc, self.doclist = d,dl + + def validate(self): + # check for extension + if not '.' in self.doc.file_name: + msgprint("Extension required in file name") + raise Exception + + # set mime type + if not self.doc.mime_type: + import mimetypes + self.doc.mime_type = mimetypes.guess_type(self.doc.file_name)[0] or 'application/unknown' From 8dc65af2af775131370e30dc8442bc88a2fa8433 Mon Sep 17 00:00:00 2001 From: Nabin Hait Date: Mon, 29 Aug 2011 16:42:24 +0530 Subject: [PATCH 12/22] removed custom fld mapping code: redundant --- .../core/doctype/doctype_mapper/doctype_mapper.py | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/cgi-bin/core/doctype/doctype_mapper/doctype_mapper.py b/cgi-bin/core/doctype/doctype_mapper/doctype_mapper.py index 1e64c6c4e3..b9b3382ec0 100644 --- a/cgi-bin/core/doctype/doctype_mapper/doctype_mapper.py +++ b/cgi-bin/core/doctype/doctype_mapper/doctype_mapper.py @@ -28,14 +28,6 @@ class DocType: def autoname(self): self.doc.name = make_autoname(self.doc.from_doctype + '-' + self.doc.to_doctype) - # Map Custom Fields - # ------------------ - def map_custom_fields(self, from_doctype, to_doctype, from_doc, to_doc): - fld_list = [] - for d in sql("select fieldname from `tabCustom Field` where dt = %s and docstatus != 2",from_doctype): - if sql("select fieldname from `tabCustom Field` where dt = %s and fieldname = %s and docstatus != 2",(to_doctype, d[0])): - fld_list.append([d[0], d[0], 'Yes']) - self.set_value(fld_list, from_doc, to_doc) def map_fields_with_same_name(self, from_doctype, to_doctype, from_doc, to_doc, fld_list): """ @@ -115,8 +107,7 @@ class DocType: self.map_fields_with_same_name(from_doctype, to_doctype, from_doc, to_doc, fld_list) # Maps field in parent self.set_value(fld_list, from_doc, to_doc) - # Map custom fields - self.map_custom_fields(from_doctype, to_doctype, from_doc, to_doc) + # Parent to child OR child to child mapping else: @@ -135,8 +126,7 @@ class DocType: self.map_fields_with_same_name(from_table_name, t[1], d, ch, fld_list) # Map values self.set_value(fld_list, d, ch) - # Map custom fields - self.map_custom_fields(from_table_name, t[1], d, ch) + # Required when called from server side for refreshing table From 7516379a837daa33843376744d26887e682fbfbe Mon Sep 17 00:00:00 2001 From: Nabin Hait Date: Mon, 29 Aug 2011 18:36:02 +0530 Subject: [PATCH 13/22] Error in patches will be mailed to developer@erpnext.com, no msg to login user --- cgi-bin/webnotes/modules/patch.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/cgi-bin/webnotes/modules/patch.py b/cgi-bin/webnotes/modules/patch.py index 401510b3c2..5ecd84446c 100644 --- a/cgi-bin/webnotes/modules/patch.py +++ b/cgi-bin/webnotes/modules/patch.py @@ -31,6 +31,7 @@ def write_log(): patch_log.write(('\n\nError in %s:\n' % webnotes.conn.cur_db_name) + webnotes.getTraceback()) patch_log.close() - webnotes.msgprint("There were errors in running patches, please call the Administrator") - - + from webnotes.utils import sendmail + subj = 'Error in running patches in %s' % webnotes.conn.cur_db_name + msg = subj + '

Login User: ' + webnotes.user.name + '

' + webnotes.getTraceback() + sendmail(['developer@erpnext.com'], sender='automail@erpnext.com', subject= subj, parts=[['text/plain', msg]]) From 1397a37855163fc727e6d907851613e08105179d Mon Sep 17 00:00:00 2001 From: Anand Doshi Date: Mon, 29 Aug 2011 22:11:02 +0530 Subject: [PATCH 14/22] Changed backup script to get password from defs --- cgi-bin/webnotes/utils/backups.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/cgi-bin/webnotes/utils/backups.py b/cgi-bin/webnotes/utils/backups.py index 8865481ade..26e1f98771 100644 --- a/cgi-bin/webnotes/utils/backups.py +++ b/cgi-bin/webnotes/utils/backups.py @@ -123,7 +123,7 @@ def get_backup(): """ #if verbose: print webnotes.conn.cur_db_name + " " + webnotes.defs.db_password odb = BackupGenerator(webnotes.conn.cur_db_name, webnotes.conn.cur_db_name,\ - webnotes.defs.db_password) + get_db_password(webnotes.conn.cur_db_name)) recipient_list = odb.get_backup() delete_temp_backups() webnotes.msgprint("""A download link to your backup will be emailed \ @@ -131,6 +131,19 @@ def get_backup(): %s""" % (', '.join(recipient_list))) +def get_db_password(db_name): + """ + Get db password from defs + """ + from webnotes import defs + if hasattr(defs, 'get_db_password'): + return defs.get_db_password(db_name) + + if hasattr(defs, 'db_password'): + return defs.db_password + + + def delete_temp_backups(): """ Cleans up the backup_link_path directory by deleting files older than 24 hours From 74b1d095875faafed976fed05c449f8e3dad2d72 Mon Sep 17 00:00:00 2001 From: Rushabh Mehta Date: Tue, 30 Aug 2011 11:07:33 +0530 Subject: [PATCH 15/22] fix to table column hide and g is null error of tinymce --- js/form.compressed.js | 6 +++--- js/widgets/form/form_fields.js | 5 ++++- js/widgets/form/form_grid.js | 7 ++++++- js/widgets/form/grid.js | 4 ++++ 4 files changed, 17 insertions(+), 5 deletions(-) diff --git a/js/form.compressed.js b/js/form.compressed.js index f4a7be5b96..a29f580874 100644 --- a/js/form.compressed.js +++ b/js/form.compressed.js @@ -253,7 +253,7 @@ this.input.onchange=function(){if(me.editor){}else{me.set(me.input.value);} me.run_trigger();} this.get_value=function(){if(me.editor){return me.editor.getContent();}else{return this.input.value;}} if(this.df.fieldtype=='Text Editor'){$(me.input).tinymce({script_url:'js/tiny_mce_33/tiny_mce.js',theme:"advanced",plugins:"style,inlinepopups,table",extended_valid_elements:"div[id|dir|class|align|style]",width:'100%',height:'360px',theme_advanced_buttons1:"bold,italic,underline,strikethrough,hr,|,justifyleft,justifycenter,justifyright,|,formatselect,fontselect,fontsizeselect",theme_advanced_buttons2:"bullist,numlist,|,outdent,indent,|,undo,redo,|,link,unlink,code,|,forecolor,backcolor,|,tablecontrols",theme_advanced_buttons3:"",theme_advanced_toolbar_location:"top",theme_advanced_toolbar_align:"left",content_css:"js/tiny_mce_33/custom_content.css",oninit:function(){me.init_editor();}});}else{$y(me.input,{fontFamily:'Courier, Fixed'});}} -_f.CodeField.prototype.init_editor=function(){var me=this;this.editor=tinymce.get(this.myid);this.editor.onKeyUp.add(function(ed,e){me.set(ed.getContent());});this.editor.onPaste.add(function(ed,e){me.set(ed.getContent());});this.editor.onSetContent.add(function(ed,e){me.set(ed.getContent());});if(cur_frm)this.editor.setContent(locals[cur_frm.doctype][cur_frm.docname][this.df.fieldname]);} +_f.CodeField.prototype.init_editor=function(){var me=this;this.editor=tinymce.get(this.myid);this.editor.onKeyUp.add(function(ed,e){me.set(ed.getContent());});this.editor.onPaste.add(function(ed,e){me.set(ed.getContent());});this.editor.onSetContent.add(function(ed,e){me.set(ed.getContent());});var c=locals[cur_frm.doctype][cur_frm.docname][this.df.fieldname];if(cur_frm&&c){this.editor.setContent(c);}} _f.CodeField.prototype.set_disp=function(val){$y(this.disp_area,{width:'90%'}) if(this.df.fieldtype=='Text Editor'){this.disp_area.innerHTML=val;}else{this.disp_area.innerHTML='';}} _f.cur_grid_cell=null;_f.Grid=function(parent){} @@ -271,7 +271,7 @@ this.head_tab.style.width=w+'px';this.tab.style.width=w+'px';} _f.Grid.prototype.set_column_disp=function(fieldname,show){var cidx=this.col_idx_by_name[fieldname];if(!cidx){msgprint('Trying to hide unknown column: '+fieldname);return;} var disp=show?'table-cell':'none';this.head_row.cells[cidx].style.display=disp;for(var i=0,len=this.tab.rows.length;i'+label+'';c.cur_label=label;break;}}} _f.FormGrid.prototype.refresh=function(){var docset=getchildren(this.doctype,this.field.frm.docname,this.field.df.fieldname,this.field.frm.doctype);var data=[];for(var i=0;i