diff --git a/cgi-bin/core/doctype/doctype_mapper/doctype_mapper.py b/cgi-bin/core/doctype/doctype_mapper/doctype_mapper.py index 7fb0f61766..89cf74efae 100644 --- a/cgi-bin/core/doctype/doctype_mapper/doctype_mapper.py +++ b/cgi-bin/core/doctype/doctype_mapper/doctype_mapper.py @@ -2,7 +2,7 @@ import webnotes from webnotes.utils import add_days, add_months, add_years, cint, cstr, date_diff, default_fields, flt, fmt_money, formatdate, generate_hash, getTraceback, get_defaults, get_first_day, get_last_day, getdate, has_common, month_name, now, nowdate, replace_newlines, sendmail, set_default, str_esc_quote, user_format, validate_email_add -from webnotes.model import db_exists +from webnotes.model import db_exists, default_fields from webnotes.model.doc import Document, addchild, removechild, getchildren, make_autoname, SuperDocType from webnotes.model.doclist import getlist, copy_doclist from webnotes.model.code import get_obj, get_server_obj, run_server_obj, updatedb, check_syntax @@ -18,230 +18,252 @@ convert_to_lists = webnotes.conn.convert_to_lists class DocType: - def __init__(self, doc, doclist=[]): - self.doc = doc - self.doclist = doclist - self.prefix = is_testing and 'test' or 'tab' - self.ref_doc = '' - # Autoname - #--------- - def autoname(self): - self.doc.name = make_autoname(self.doc.from_doctype + '-' + self.doc.to_doctype) - - # Map Custom Fields - # ------------------ - def map_custom_fields(self, from_doctype, to_doctype, from_doc, to_doc): - fld_list = [] - for d in sql("select fieldname from `tabCustom Field` where dt = %s and docstatus != 2",from_doctype): - if sql("select fieldname from `tabCustom Field` where dt = %s and fieldname = %s and docstatus != 2",(to_doctype, d[0])): - fld_list.append([d[0], d[0]]) - self.set_value(fld_list, from_doc, to_doc) - - # Maps the fields in 'To DocType' - #-------------------------------- - def dt_map(self, from_doctype, to_doctype, from_docname, to_doc, doclist, from_to_list = '[]'): - - # definition of arguments - ''' - String : contains the name of DocType initiating the function - String : contains the name of DocType created by the function - String : contains ID(name) of 'from_doctype' - String : contains doc of 'to_doctype' - String : contains doclist of 'to_doctype' - String : contains list of tables which will be mapped - ''' - # Validate reference doc docstatus - self.ref_doc = from_docname - self.check_ref_docstatus() - - if not doclist: - doclist.append(to_doc) - - tbl_list = sql("select from_table, to_table, from_field, to_field, match_id, validation_logic from `tabTable Mapper Detail` where parent ='%s' order by match_id" % (from_doctype + "-" + to_doctype)) - - for t in tbl_list: - from_table_name = t[0] - to_table_name = t[1] - from_table_fname = t[2] - to_table_fname = t[3] - match_id = t[4] - validation_logic = t[5] - - - from_to = [from_table_name, to_table_name] - - if from_to in eval(from_to_list): - fld_list = sql("select from_field, to_field from `tabField Mapper Detail` where parent = '%s' and match_id = %s and map = 'Yes'" % (from_doctype + "-" + to_doctype, match_id)) - if fld_list: - if not from_docname: - msgprint(from_doctype + " not selected for mapping") - raise Exception - - # Parent to parent mapping - if from_table_name == self.doc.from_doctype and to_table_name == self.doc.to_doctype: - - # Check validation - nm = sql("select name from `tab%s` where name = '%s' and %s" % (from_doctype, from_docname, validation_logic)) - nm = nm and nm[0][0] or '' - - # If validation failed raise exception - if not nm: - msgprint("Validation failed in doctype mapper. Please contact Administrator.") - raise Exception - - from_doc = Document(from_doctype, nm) - # Maps field in parent - self.set_value(fld_list, from_doc, to_doc) - # Map custom fields - self.map_custom_fields(from_doctype, to_doctype, from_doc, to_doc) - - # Parent to child OR child to child mapping - else: - dnlist = () - if from_table_name == self.doc.from_doctype: - dnlist = ((from_docname,),) - else: - dnlist = sql("select name from `tab%s` where parent='%s' and parenttype = '%s' and %s order by idx" % (from_table_name, from_docname, self.doc.from_doctype, validation_logic)) - - for dn in dnlist: - # Add a row in target table in 'To DocType' and returns obj - ch = addchild(to_doc, t[3], t[1], 1, doclist) - # Creates object for 'From DocType', it can be parent or child - d = Document(t[0], dn[0]) - # Map values - self.set_value(fld_list, d, ch) - # Map custom fields - self.map_custom_fields(from_table_name, t[1], d, ch) - - # Required when called from server side for refreshing table - return doclist - - # Assigns value to "To Doctype" - #------------------------------ - def set_value(self, fld_list, obj, to_doc): - for f in fld_list: - if f[0].startswith('eval:'): - to_doc.fields[f[1]] = eval(f[0][5:]) - else: - to_doc.fields[f[1]] = obj.fields.get(f[0]) - - # Validate - #--------- - def validate(self): - for d in getlist(self.doclist, 'field_mapper_details'): - # Automatically assigns default value if not entered - if not d.match_id: - d.match_id = 0 - if not d.map: - d.map = 'Yes' - for d in getlist(self.doclist, 'table_mapper_details'): - if not d.reference_doctype_key: - d.reference_doctype_key = '' - if not d.reference_key: - d.reference_key = '' - - # Check wrong field name - self.check_fields_in_dt() - - # Check if any wrong fieldname entered - #-------------------------------------- - def check_fields_in_dt(self): - for d in getlist(self.doclist, 'field_mapper_details'): - table_name = sql("select from_table, to_table from `tabTable Mapper Detail` where parent ='%s' and match_id = '%s'" % (self.doc.name, d.match_id)) - - if table_name: - exists1 = sql("select name from tabDocField where parent = '%s' and fieldname = '%s'" % (table_name[0][0], d.from_field)) - exists2 = sql("select name from tabDocField where parent = '%s' and fieldname = '%s'" % (table_name[0][1], d.to_field)) - - # Default fields like name, parent, owner does not exists in DocField - if not exists1 and d.from_field not in default_fields: - msgprint('"' + cstr(d.from_field) + '" does not exists in DocType "' + cstr(table_name[0][0]) + '"') - if not exists2 and d.to_field not in default_fields: - msgprint('"' + cstr(d.to_field) + '" does not exists in DocType "' + cstr(table_name[0][1]) + '"') - - # Check consistency of value with reference document - #--------------------------------------------------- - def validate_reference_value(self, obj, to_docname): - for t in getlist(self.doclist, 'table_mapper_details'): - # Reference key is the fieldname which will relate to the from_table - if t.reference_doctype_key: - for d in getlist(obj.doclist, t.to_field): - if d.fields[t.reference_doctype_key] == self.doc.from_doctype: - self.check_consistency(obj.doc, d, to_docname) - self.check_ref_docstatus() - - # Make list of fields whose value will be consistent with prevdoc - #----------------------------------------------------------------- - def get_checklist(self): - checklist = [] - for f in getlist(self.doclist, 'field_mapper_details'): - - # Check which field's value will be compared - if f.checking_operator: - checklist.append([f.from_field, f.to_field, f.checking_operator, f.match_id]) - return checklist - - def check_fld_type(self, tbl, fld, cur_val): - ft = sql("select fieldtype from tabDocField where fieldname = '%s' and parent = '%s'" % (fld,tbl)) - ft = ft and ft[0][0] or '' - if ft == 'Currency' or ft == 'Float': - cur_val = '%.2f' % cur_val - return cur_val, ft - - # Check consistency - #------------------- - def check_consistency(self, par_obj, child_obj, to_docname): - checklist = self.get_checklist() - self.ref_doc = '' - for t in getlist(self.doclist, 'table_mapper_details'): - if t.reference_key and child_obj.fields[t.reference_key]: - for cl in checklist: - if cl[3] == t.match_id: - if t.to_field: - cur_val = child_obj.fields[cl[1]] - else: - cur_val = par_obj.fields[cl[1]] - - ft = self.check_fld_type(t.to_table, cl[1], cur_val) - cur_val = ft[0] - - if cl[2] == '=' and (ft[1] == 'Currency' or ft[1] == 'Float'): - consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' - %s <= 0.5" % (cl[0], t.from_table, child_obj.fields[t.reference_key], flt(cur_val), cl[0])) - else: - #consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' %s %s" % (cl[0], t.from_table, child_obj.fields[t.reference_key], cur_val, cl[2], cl[0])) - consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' %s ifnull(%s, '')" % (cl[0], t.from_table, child_obj.fields[t.reference_key], ft[1] in ('Currency', 'Float', 'Int') and flt(cur_val) or cstr(cur_val), cl[2], cl[0])) - - if not self.ref_doc: - det = sql("select name, parent from `tab%s` where name = '%s'" % (t.from_table, child_obj.fields[t.reference_key])) - self.ref_doc = det[0][1] and det[0][1] or det[0][0] - - if not consistent: - self.give_message(t.from_table, t.to_table, cl[0], cl[1], child_obj.fields[t.reference_key], cl[2]) - - # Gives message and raise exception - #----------------------------------- - def give_message(self, from_table, to_table, from_field, to_field, ref_value, operator): - # Select label of the field - to_fld_label = sql("select label from tabDocField where parent = '%s' and fieldname = '%s'" % (to_table, to_field)) - from_fld_label = sql("select label from tabDocField where parent = '%s' and fieldname = '%s'" % (from_table, from_field)) - - op_in_words = {'=':'equal to ', '>=':'greater than equal to ', '>':'greater than ', '<=':'less than equal to ', '<':'less than '} - msgprint(to_fld_label[0][0] + " should be " + op_in_words[operator] + from_fld_label[0][0] + " of " + self.doc.from_doctype + ": " + self.ref_doc) - raise Exception, "Validation Error." - - def check_ref_docstatus(self): - if self.ref_doc: - det = sql("select name, docstatus from `tab%s` where name = '%s'" % (self.doc.from_doctype, self.ref_doc)) - if not det: - msgprint(self.doc.from_doctype + ": " + self.ref_doc + " does not exists in the system") - raise Exception, "Validation Error." - elif self.doc.ref_doc_submitted and det[0][1] != 1: - msgprint(self.doc.from_doctype + ": " + self.ref_doc + " is not Submitted Document.") - raise Exception, "Validation Error." - - def on_update(self): - import webnotes.defs - if hasattr(webnotes.defs, 'developer_mode') and webnotes.defs.developer_mode: - from webnotes.modules.export_module import export_to_files - export_to_files(record_list=[[self.doc.doctype, self.doc.name]]) - + def __init__(self, doc, doclist=[]): + self.doc = doc + self.doclist = doclist + self.prefix = is_testing and 'test' or 'tab' + self.ref_doc = '' + # Autoname + #--------- + def autoname(self): + self.doc.name = make_autoname(self.doc.from_doctype + '-' + self.doc.to_doctype) + + + def map_fields_with_same_name(self, from_doctype, to_doctype, from_doc, to_doc, fld_list): + """ + Returns field list with same name in from and to doctype + """ + exception_flds = [f[0] for f in fld_list if f[2] == 'No'] + exception_flds += default_fields + exception_flds += ['amended_from', 'amendment_date', 'file_list', 'naming_series', 'status'] + + map_fld_list = [ + [d[0], d[0], 'Yes'] for d in sql(""" + select t1.fieldname + from `tabDocField` t1, `tabDocField` t2 + where t1.parent = %s and t2.parent = %s + and t1.fieldname = t2.fieldname + and t1.docstatus != 2 and t2.docstatus != 2 + and ifnull(t1.fieldname, '') != '' + """,(from_doctype, to_doctype)) if d[0] not in exception_flds + ] + + self.set_value(map_fld_list, from_doc, to_doc) + + + # Maps the fields in 'To DocType' + #-------------------------------- + def dt_map(self, from_doctype, to_doctype, from_docname, to_doc, doclist, from_to_list = '[]'): + + # definition of arguments + ''' + String : contains the name of DocType initiating the function + String : contains the name of DocType created by the function + String : contains ID(name) of 'from_doctype' + String : contains doc of 'to_doctype' + String : contains doclist of 'to_doctype' + String : contains list of tables which will be mapped + ''' + # Validate reference doc docstatus + self.ref_doc = from_docname + self.check_ref_docstatus() + + if not doclist: + doclist.append(to_doc) + + tbl_list = sql("select from_table, to_table, from_field, to_field, match_id, validation_logic from `tabTable Mapper Detail` where parent ='%s' order by match_id" % (from_doctype + "-" + to_doctype)) + + for t in tbl_list: + from_table_name = t[0] + to_table_name = t[1] + from_table_fname = t[2] + to_table_fname = t[3] + match_id = t[4] + validation_logic = t[5] + + + from_to = [from_table_name, to_table_name] + + if from_to in eval(from_to_list): + fld_list = sql("select from_field, to_field, map from `tabField Mapper Detail` where parent = '%s' and match_id = %s" % (from_doctype + "-" + to_doctype, match_id)) + if not from_docname: + msgprint(from_doctype + " not selected for mapping") + raise Exception + + # Parent to parent mapping + if from_table_name == self.doc.from_doctype and to_table_name == self.doc.to_doctype: + + # Check validation + nm = sql("select name from `tab%s` where name = '%s' and %s" % (from_doctype, from_docname, validation_logic)) + nm = nm and nm[0][0] or '' + + # If validation failed raise exception + if not nm: + msgprint("Validation failed in doctype mapper. Please contact Administrator.") + raise Exception + + from_doc = Document(from_doctype, nm) + + # Map fields with same name + + self.map_fields_with_same_name(from_doctype, to_doctype, from_doc, to_doc, fld_list) + # Maps field in parent + + if fld_list: + self.set_value(fld_list, from_doc, to_doc) + + + # Parent to child OR child to child mapping + else: + dnlist = () + if from_table_name == self.doc.from_doctype: + dnlist = ((from_docname,),) + else: + dnlist = sql("select name from `tab%s` where parent='%s' and parenttype = '%s' and %s order by idx" % (from_table_name, from_docname, self.doc.from_doctype, validation_logic)) + + for dn in dnlist: + # Add a row in target table in 'To DocType' and returns obj + ch = addchild(to_doc, t[3], t[1], 1, doclist) + # Creates object for 'From DocType', it can be parent or child + d = Document(t[0], dn[0]) + # Map fields with same name + self.map_fields_with_same_name(from_table_name, t[1], d, ch, fld_list) + # Map values + if fld_list: + self.set_value(fld_list, d, ch) + + + + # Required when called from server side for refreshing table + return doclist + + # Assigns value to "To Doctype" + #------------------------------ + def set_value(self, fld_list, obj, to_doc): + for f in fld_list: + if f[2] == 'Yes': + if f[0].startswith('eval:'): + to_doc.fields[f[1]] = eval(f[0][5:]) + else: + to_doc.fields[f[1]] = obj.fields.get(f[0]) + + # Validate + #--------- + def validate(self): + for d in getlist(self.doclist, 'field_mapper_details'): + # Automatically assigns default value if not entered + if not d.match_id: + d.match_id = 0 + if not d.map: + d.map = 'Yes' + for d in getlist(self.doclist, 'table_mapper_details'): + if not d.reference_doctype_key: + d.reference_doctype_key = '' + if not d.reference_key: + d.reference_key = '' + + # Check wrong field name + self.check_fields_in_dt() + + # Check if any wrong fieldname entered + #-------------------------------------- + def check_fields_in_dt(self): + for d in getlist(self.doclist, 'field_mapper_details'): + table_name = sql("select from_table, to_table from `tabTable Mapper Detail` where parent ='%s' and match_id = '%s'" % (self.doc.name, d.match_id)) + + if table_name: + exists1 = sql("select name from tabDocField where parent = '%s' and fieldname = '%s'" % (table_name[0][0], d.from_field)) + exists2 = sql("select name from tabDocField where parent = '%s' and fieldname = '%s'" % (table_name[0][1], d.to_field)) + + # Default fields like name, parent, owner does not exists in DocField + if not exists1 and d.from_field not in default_fields: + msgprint('"' + cstr(d.from_field) + '" does not exists in DocType "' + cstr(table_name[0][0]) + '"') + if not exists2 and d.to_field not in default_fields: + msgprint('"' + cstr(d.to_field) + '" does not exists in DocType "' + cstr(table_name[0][1]) + '"') + + # Check consistency of value with reference document + #--------------------------------------------------- + def validate_reference_value(self, obj, to_docname): + for t in getlist(self.doclist, 'table_mapper_details'): + # Reference key is the fieldname which will relate to the from_table + if t.reference_doctype_key: + for d in getlist(obj.doclist, t.to_field): + if d.fields[t.reference_doctype_key] == self.doc.from_doctype: + self.check_consistency(obj.doc, d, to_docname) + self.check_ref_docstatus() + + # Make list of fields whose value will be consistent with prevdoc + #----------------------------------------------------------------- + def get_checklist(self): + checklist = [] + for f in getlist(self.doclist, 'field_mapper_details'): + + # Check which field's value will be compared + if f.checking_operator: + checklist.append([f.from_field, f.to_field, f.checking_operator, f.match_id]) + return checklist + + def check_fld_type(self, tbl, fld, cur_val): + ft = sql("select fieldtype from tabDocField where fieldname = '%s' and parent = '%s'" % (fld,tbl)) + ft = ft and ft[0][0] or '' + if ft == 'Currency' or ft == 'Float': + cur_val = '%.2f' % cur_val + return cur_val, ft + + # Check consistency + #------------------- + def check_consistency(self, par_obj, child_obj, to_docname): + checklist = self.get_checklist() + self.ref_doc = '' + for t in getlist(self.doclist, 'table_mapper_details'): + if t.reference_key and child_obj.fields[t.reference_key]: + for cl in checklist: + if cl[3] == t.match_id: + if t.to_field: + cur_val = child_obj.fields[cl[1]] + else: + cur_val = par_obj.fields[cl[1]] + + ft = self.check_fld_type(t.to_table, cl[1], cur_val) + cur_val = ft[0] + + if cl[2] == '=' and (ft[1] == 'Currency' or ft[1] == 'Float'): + consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' - %s <= 0.5" % (cl[0], t.from_table, child_obj.fields[t.reference_key], flt(cur_val), cl[0])) + else: + #consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' %s %s" % (cl[0], t.from_table, child_obj.fields[t.reference_key], cur_val, cl[2], cl[0])) + consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' %s ifnull(%s, '')" % (cl[0], t.from_table, child_obj.fields[t.reference_key], ft[1] in ('Currency', 'Float', 'Int') and flt(cur_val) or cstr(cur_val), cl[2], cl[0])) + + if not self.ref_doc: + det = sql("select name, parent from `tab%s` where name = '%s'" % (t.from_table, child_obj.fields[t.reference_key])) + self.ref_doc = det[0][1] and det[0][1] or det[0][0] + + if not consistent: + self.give_message(t.from_table, t.to_table, cl[0], cl[1], child_obj.fields[t.reference_key], cl[2]) + + # Gives message and raise exception + #----------------------------------- + def give_message(self, from_table, to_table, from_field, to_field, ref_value, operator): + # Select label of the field + to_fld_label = sql("select label from tabDocField where parent = '%s' and fieldname = '%s'" % (to_table, to_field)) + from_fld_label = sql("select label from tabDocField where parent = '%s' and fieldname = '%s'" % (from_table, from_field)) + + op_in_words = {'=':'equal to ', '>=':'greater than equal to ', '>':'greater than ', '<=':'less than equal to ', '<':'less than '} + msgprint(to_fld_label[0][0] + " should be " + op_in_words[operator] + from_fld_label[0][0] + " of " + self.doc.from_doctype + ": " + self.ref_doc) + raise Exception, "Validation Error." + + def check_ref_docstatus(self): + if self.ref_doc: + det = sql("select name, docstatus from `tab%s` where name = '%s'" % (self.doc.from_doctype, self.ref_doc)) + if not det: + msgprint(self.doc.from_doctype + ": " + self.ref_doc + " does not exists in the system") + raise Exception, "Validation Error." + elif self.doc.ref_doc_submitted and det[0][1] != 1: + msgprint(self.doc.from_doctype + ": " + self.ref_doc + " is not Submitted Document.") + raise Exception, "Validation Error." + + def on_update(self): + import webnotes.defs + if hasattr(webnotes.defs, 'developer_mode') and webnotes.defs.developer_mode: + from webnotes.modules.export_module import export_to_files + export_to_files(record_list=[[self.doc.doctype, self.doc.name]]) + diff --git a/cgi-bin/core/doctype/event/event.js b/cgi-bin/core/doctype/event/event.js new file mode 100644 index 0000000000..dc229bd7b3 --- /dev/null +++ b/cgi-bin/core/doctype/event/event.js @@ -0,0 +1,9 @@ +cur_frm.cscript.onload = function(doc, cdt, cdn) { + var df = get_field('Event', 'Intro HTML', doc.name); + if(doc.ref_type) { + ref = repl(cur_frm.cstring.ref_html, {'dt': doc.ref_type, 'dn':doc.ref_name}); + } else var ref = ''; + + df.options = repl(cur_frm.cstring.intro_html, {'ref': ref}); + refresh_fields('Intro HTML'); +} diff --git a/cgi-bin/core/doctype/file/file.py b/cgi-bin/core/doctype/file/file.py new file mode 100644 index 0000000000..b957ea4c15 --- /dev/null +++ b/cgi-bin/core/doctype/file/file.py @@ -0,0 +1,14 @@ +class DocType: + def __init__(self, d, dl): + self.doc, self.doclist = d,dl + + def validate(self): + # check for extension + if not '.' in self.doc.file_name: + msgprint("Extension required in file name") + raise Exception + + # set mime type + if not self.doc.mime_type: + import mimetypes + self.doc.mime_type = mimetypes.guess_type(self.doc.file_name)[0] or 'application/unknown' diff --git a/cgi-bin/core/doctype/profile/profile.txt b/cgi-bin/core/doctype/profile/profile.txt index fd5cd3722f..3f117123e0 100644 --- a/cgi-bin/core/doctype/profile/profile.txt +++ b/cgi-bin/core/doctype/profile/profile.txt @@ -5,25 +5,22 @@ { 'creation': '2009-05-12 11:19:11', 'docstatus': 0, - 'modified': '2010-12-21 11:07:20', - 'modified_by': 'sneha@webnotestech.com', + 'modified': '2011-08-25 14:02:26', + 'modified_by': 'Administrator', 'owner': 'Administrator' }, # These values are common for all DocType { - '_last_update': '1303708853', + '_last_update': '1311340897', 'allow_attach': 1, 'allow_copy': 0, 'allow_email': 0, 'allow_print': 0, - 'client_script': 'cur_frm.cscript[\'Change Password\']= function(doc, cdt, cdn) {\n var error = false;\n if ((!doc.new_password)||(!doc.retype_new_password)){\n alert("Both fields are required!");\n error = true;\n }\n if (doc.new_password.length<4) {\n alert("Password must be atleast 4 characters long");\n error = true;\n }\n if(doc.new_password!=doc.retype_new_password) {\n alert("Passwords must match");\n error = true;\n }\n if(!/[A-Z]/.test(doc.new_password) || !/[0-9]/.test(doc.new_password) || !/[\\W_]/.test(doc.new_password)) {\n msgprint(\'New password must contain atleast 1 capital letter, 1 numeric and 1 special character.\');\n error = true;\n doc.new_password = \'\';\n refresh_field(\'new_password\');\n }\n if(!error) {\n cur_frm.runscript(\'update_password\', \'\', function(r,t) {\n\tdoc.new_password = \'\';\n\tdoc.retype_new_password = \'\';\n refresh_many([\'new_password\',\'retype_new_password\']);\n });\n }\n}\n\ncur_frm.cscript.validate = function(doc, cdt, cdn) {\n doc.new_password = \'\';\n doc.retype_new_password = \'\';\n}', 'colour': 'White:FFF', 'doctype': 'DocType', 'hide_heading': 0, 'hide_toolbar': 0, - 'idx': 0, - 'in_create': 1, 'issingle': 0, 'istable': 0, 'max_attachments': 1, @@ -31,10 +28,9 @@ 'name': '__common__', 'print_outline': 'Yes', 'read_only': 0, - 'section_style': 'Tray', - 'server_code_error': ' ', + 'search_fields': 'first_name, last_name', 'show_in_menu': 0, - 'version': 25 + 'version': 32 }, # These values are common for all DocField @@ -69,7 +65,6 @@ 'create': 1, 'doctype': 'DocPerm', 'execute': 0, - 'idx': 1, 'permlevel': 0, 'role': 'Administrator', 'submit': 0 @@ -79,7 +74,6 @@ { 'create': 1, 'doctype': 'DocPerm', - 'idx': 2, 'permlevel': 0, 'role': 'System Manager' }, @@ -87,7 +81,6 @@ # DocPerm { 'doctype': 'DocPerm', - 'idx': 3, 'permlevel': 1, 'role': 'Administrator' }, @@ -95,7 +88,6 @@ # DocPerm { 'doctype': 'DocPerm', - 'idx': 4, 'match': 'owner', 'permlevel': 0, 'role': 'All' @@ -103,85 +95,52 @@ # DocField { + 'default': '1', 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'hidden': 0, - 'idx': 1, - 'label': 'Details', - 'oldfieldtype': 'Section Break', - 'permlevel': 0, - 'reqd': 0, - 'search_index': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldtype': 'Column Break', - 'hidden': 0, - 'idx': 2, - 'label': 'Picture', - 'oldfieldtype': 'Column Break', - 'permlevel': 0, - 'reqd': 0, - 'search_index': 0, - 'width': '50%' - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldtype': 'Image', - 'idx': 3, - 'label': 'Profile Picture', - 'oldfieldtype': 'Image', - 'permlevel': 0 + 'fieldname': 'enabled', + 'fieldtype': 'Check', + 'label': 'Enabled', + 'oldfieldname': 'enabled', + 'oldfieldtype': 'Check', + 'permlevel': 1 }, # DocField { 'doctype': 'DocField', - 'fieldtype': 'Column Break', - 'idx': 4, - 'label': 'Contact', - 'oldfieldtype': 'Column Break', - 'permlevel': 0, - 'width': '50%' + 'fieldname': 'password', + 'fieldtype': 'Password', + 'label': 'Password', + 'permlevel': 1, + 'hidden': 1 }, - + # DocField { - 'default': '1', 'doctype': 'DocField', - 'fieldname': 'enabled', + 'fieldname': 'registered', 'fieldtype': 'Check', - 'idx': 5, - 'label': 'Enabled', - 'oldfieldname': 'enabled', - 'oldfieldtype': 'Check', - 'permlevel': 1 + 'label': 'Registered', + 'permlevel': 0, + 'hidden': 1 }, # DocField { - 'default': '1', 'doctype': 'DocField', - 'fieldname': 'send_email_invite', + 'fieldname': 'unsubscribed', 'fieldtype': 'Check', - 'idx': 6, - 'label': 'Send Email Invite', - 'oldfieldname': 'send_email_invite', - 'oldfieldtype': 'Check', - 'permlevel': 1 + 'label': 'Unsubscribed', + 'permlevel': 0, + 'hidden': 1 }, - + # DocField { 'doctype': 'DocField', 'fieldname': 'recent_documents', 'fieldtype': 'Text', 'hidden': 1, - 'idx': 8, 'label': 'Recent Documents', 'oldfieldname': 'recent_documents', 'oldfieldtype': 'Text', @@ -195,7 +154,6 @@ 'doctype': 'DocField', 'fieldname': 'first_name', 'fieldtype': 'Data', - 'idx': 9, 'label': 'First Name', 'oldfieldname': 'first_name', 'oldfieldtype': 'Data', @@ -208,7 +166,6 @@ 'doctype': 'DocField', 'fieldname': 'middle_name', 'fieldtype': 'Data', - 'idx': 10, 'label': 'Middle Name (Optional)', 'oldfieldname': 'middle_name', 'oldfieldtype': 'Data', @@ -220,46 +177,17 @@ 'doctype': 'DocField', 'fieldname': 'last_name', 'fieldtype': 'Data', - 'idx': 11, 'label': 'Last Name', 'oldfieldname': 'last_name', 'oldfieldtype': 'Data', 'permlevel': 0 }, - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'email', - 'fieldtype': 'Data', - 'hidden': 0, - 'idx': 12, - 'label': 'Email', - 'oldfieldname': 'email', - 'oldfieldtype': 'Data', - 'permlevel': 0, - 'reqd': 1, - 'search_index': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'birth_date', - 'fieldtype': 'Date', - 'idx': 13, - 'label': 'Birth Date', - 'oldfieldname': 'birth_date', - 'oldfieldtype': 'Date', - 'permlevel': 0 - }, - # DocField { 'doctype': 'DocField', 'fieldname': 'gender', 'fieldtype': 'Select', - 'idx': 14, 'label': 'Gender', 'oldfieldname': 'gender', 'oldfieldtype': 'Select', @@ -271,51 +199,74 @@ # DocField { 'doctype': 'DocField', - 'fieldname': 'occupation', + 'fieldtype': 'Column Break', + 'oldfieldtype': 'Column Break', + 'permlevel': 1, + 'width': '50%' + }, + + # DocField + { + 'doctype': 'DocField', + 'fieldname': 'email', 'fieldtype': 'Data', - 'idx': 15, - 'label': 'Designation', - 'oldfieldname': 'occupation', + 'hidden': 0, + 'label': 'Email', + 'oldfieldname': 'email', 'oldfieldtype': 'Data', 'permlevel': 0, + 'reqd': 1, 'search_index': 0 }, - + # DocField { 'doctype': 'DocField', 'fieldname': 'bio', 'fieldtype': 'Text', - 'idx': 16, 'label': 'Bio', 'oldfieldname': 'bio', 'oldfieldtype': 'Text', 'permlevel': 0, - 'search_index': 0 + 'search_index': 0, + 'hidden': 1 }, + # DocField { 'doctype': 'DocField', 'fieldname': 'interests', 'fieldtype': 'Text', - 'idx': 17, 'label': 'Interests', 'oldfieldname': 'interests', 'oldfieldtype': 'Text', + 'permlevel': 0, + 'hidden': 1 + }, + + # DocField + { + 'doctype': 'DocField', + 'fieldname': 'birth_date', + 'fieldtype': 'Date', + 'label': 'Birth Date', + 'oldfieldname': 'birth_date', + 'oldfieldtype': 'Date', 'permlevel': 0 }, + # DocField { 'doctype': 'DocField', 'fieldname': 'activities', 'fieldtype': 'Text', - 'idx': 18, 'label': 'Activities', 'oldfieldname': 'activities', 'oldfieldtype': 'Text', - 'permlevel': 0 + 'permlevel': 0, + 'hidden': 1 }, # DocField @@ -323,7 +274,6 @@ 'doctype': 'DocField', 'fieldname': 'messanger_status', 'fieldtype': 'Data', - 'idx': 19, 'label': 'Messanger Status', 'oldfieldname': 'messanger_status', 'oldfieldtype': 'Data', @@ -331,60 +281,11 @@ 'search_index': 0 }, - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'home_phone', - 'fieldtype': 'Data', - 'idx': 20, - 'label': 'Home Phone', - 'oldfieldname': 'home_phone', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'office_phone', - 'fieldtype': 'Data', - 'idx': 21, - 'label': 'Office Phone', - 'oldfieldname': 'office_phone', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'extension', - 'fieldtype': 'Data', - 'idx': 22, - 'label': 'Extension', - 'oldfieldname': 'extension', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'cell_no', - 'fieldtype': 'Data', - 'idx': 23, - 'label': 'Cell No', - 'oldfieldname': 'cell_no', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - # DocField { 'doctype': 'DocField', 'fieldname': 'user_type', 'fieldtype': 'Select', - 'idx': 24, 'label': 'User Type', 'oldfieldname': 'user_type', 'oldfieldtype': 'Select', @@ -392,150 +293,27 @@ 'permlevel': 0 }, - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'last_login', - 'fieldtype': 'Read Only', - 'hidden': 0, - 'idx': 25, - 'label': 'Last Login', - 'oldfieldname': 'last_login', - 'oldfieldtype': 'Read Only', - 'permlevel': 0, - 'reqd': 0, - 'search_index': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'last_ip', - 'fieldtype': 'Read Only', - 'idx': 26, - 'label': 'Last IP', - 'oldfieldname': 'last_ip', - 'oldfieldtype': 'Read Only', - 'permlevel': 0 - }, # DocField { 'doctype': 'DocField', 'fieldtype': 'Section Break', - 'idx': 27, - 'label': 'Address', - 'oldfieldtype': 'Section Break', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'line_1', - 'fieldtype': 'Data', - 'idx': 28, - 'label': 'Line 1', - 'oldfieldname': 'line_1', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'line_2', - 'fieldtype': 'Data', - 'idx': 29, - 'label': 'Line 2', - 'oldfieldname': 'line_2', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'city', - 'fieldtype': 'Data', - 'idx': 30, - 'label': 'City / Town', - 'oldfieldname': 'city', - 'oldfieldtype': 'Data', - 'permlevel': 0, - 'reqd': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'district', - 'fieldtype': 'Data', - 'idx': 31, - 'label': 'District', - 'oldfieldname': 'district', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'state', - 'fieldtype': 'Data', - 'idx': 32, - 'label': 'State', - 'oldfieldname': 'state', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'country', - 'fieldtype': 'Data', - 'idx': 33, - 'label': 'Country', - 'oldfieldname': 'country', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'pin', - 'fieldtype': 'Data', - 'idx': 34, - 'label': 'Pin', - 'oldfieldname': 'pin', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'idx': 35, - 'label': 'User Role', + 'hidden': 0, 'oldfieldtype': 'Section Break', - 'permlevel': 1 + 'permlevel': 1, + 'reqd': 0, + 'search_index': 0 }, - + # DocField { 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'hidden': 0, - 'idx': 36, + 'fieldtype': 'Column Break', 'label': 'Roles', - 'oldfieldtype': 'Section Break', + 'oldfieldtype': 'Column Break', 'permlevel': 1, - 'reqd': 0, - 'search_index': 0 - }, + 'width': '50%' + }, # DocField { @@ -545,7 +323,6 @@ 'fieldname': 'userroles', 'fieldtype': 'Table', 'hidden': 0, - 'idx': 37, 'label': 'User Roles', 'oldfieldname': 'userroles', 'oldfieldtype': 'Table', @@ -558,10 +335,9 @@ # DocField { 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'idx': 38, + 'fieldtype': 'Column Break', 'label': 'System Defaults', - 'oldfieldtype': 'Section Break', + 'oldfieldtype': 'Column Break', 'permlevel': 1, 'width': '50%' }, @@ -574,7 +350,6 @@ 'fieldname': 'defaults', 'fieldtype': 'Table', 'hidden': 0, - 'idx': 39, 'label': 'Defaults', 'oldfieldname': 'defaults', 'oldfieldtype': 'Table', @@ -588,145 +363,69 @@ { 'doctype': 'DocField', 'fieldtype': 'Section Break', - 'idx': 40, - 'label': 'Password', + 'label': 'Login Details', 'oldfieldtype': 'Section Break', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldtype': 'Column Break', - 'idx': 41, - 'label': 'Change Your Password', - 'oldfieldtype': 'Column Break', - 'permlevel': 1, - 'width': '50%' - }, - - # DocField - { - 'colour': 'Pink:FEF2EA', - 'doctype': 'DocField', - 'fieldname': 'password', - 'fieldtype': 'Data', - 'hidden': 1, - 'idx': 42, - 'label': 'Current Password', - 'oldfieldname': 'password', - 'oldfieldtype': 'Data', - 'permlevel': 1, - 'reqd': 0, - 'search_index': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'new_password', - 'fieldtype': 'Password', - 'idx': 43, - 'label': 'New Password', - 'oldfieldname': 'new_password', - 'oldfieldtype': 'Password', - 'permlevel': 1 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'retype_new_password', - 'fieldtype': 'Password', - 'idx': 44, - 'label': 'Retype New Password', - 'oldfieldname': 'retype_new_password', - 'oldfieldtype': 'Password', - 'permlevel': 1 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'password_last_updated', - 'fieldtype': 'Date', - 'hidden': 1, - 'idx': 45, - 'label': 'Password Last Updated', - 'oldfieldname': 'password_last_updated', - 'oldfieldtype': 'Date', - 'permlevel': 1, - 'print_hide': 1 + 'permlevel': 0, }, # DocField { 'doctype': 'DocField', - 'fieldtype': 'Button', - 'idx': 46, - 'label': 'Change Password', - 'oldfieldtype': 'Button', - 'permlevel': 1, - 'trigger': 'Client', - 'width': '120px' + 'fieldname': 'login_before', + 'fieldtype': 'Int', + 'label': 'Login Before', + 'permlevel': 0 }, # DocField { 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'idx': 47, - 'label': 'Attachment', - 'oldfieldtype': 'Section Break', - 'permlevel': 1 + 'fieldname': 'login_after', + 'fieldtype': 'Int', + 'label': 'Login After', + 'permlevel': 0 }, # DocField { - 'default': '0', 'doctype': 'DocField', - 'fieldname': 'social_points', - 'fieldtype': 'Int', - 'idx': 48, - 'label': 'Social Points', - 'oldfieldname': 'social_points', - 'oldfieldtype': 'Int', + 'fieldname': 'restrict_ip', + 'fieldtype': 'Data', + 'label': 'Restrict IP', 'permlevel': 0 }, # DocField { 'doctype': 'DocField', - 'fieldname': 'social_badge', - 'fieldtype': 'Data', - 'idx': 49, - 'label': 'Social Badge', - 'oldfieldname': 'social_badge', - 'oldfieldtype': 'Data', - 'permlevel': 0 + 'fieldtype': 'Column Break', + 'oldfieldtype': 'Column Break', + 'width': '50%' }, # DocField { 'doctype': 'DocField', - 'fieldname': 'avatar', - 'fieldtype': 'Data', - 'idx': 50, - 'label': 'Avatar', - 'oldfieldname': 'avatar', - 'oldfieldtype': 'Data', - 'permlevel': 0 + 'fieldname': 'last_login', + 'fieldtype': 'Read Only', + 'hidden': 0, + 'label': 'Last Login', + 'oldfieldname': 'last_login', + 'oldfieldtype': 'Read Only', + 'permlevel': 0, + 'reqd': 0, + 'search_index': 0 }, # DocField { 'doctype': 'DocField', - 'fieldtype': 'HTML', - 'idx': 51, - 'label': 'Attachment HTML', - 'oldfieldtype': 'HTML', - 'options': 'First attachment must be the picture', - 'permlevel': 1 + 'fieldname': 'last_ip', + 'fieldtype': 'Read Only', + 'label': 'Last IP', + 'oldfieldname': 'last_ip', + 'oldfieldtype': 'Read Only', + 'permlevel': 0 }, # DocField @@ -735,28 +434,9 @@ 'fieldname': 'file_list', 'fieldtype': 'Text', 'hidden': 1, - 'idx': 52, 'label': 'File List', 'oldfieldname': 'file_list', 'oldfieldtype': 'Text', 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'fiscal_year', - 'fieldtype': 'Select', - 'hidden': 1, - 'idx': 53, - 'in_filter': 1, - 'label': 'Fiscal Year', - 'no_copy': 1, - 'oldfieldname': 'fiscal_year', - 'oldfieldtype': 'Select', - 'options': 'link:Fiscal Year', - 'permlevel': 0, - 'print_hide': 1, - 'report_hide': 1 - } + } ] \ No newline at end of file diff --git a/cgi-bin/core/doctype/search_criteria/search_criteria.txt b/cgi-bin/core/doctype/search_criteria/search_criteria.txt index 0a4f09e55d..a3ca3ae369 100644 --- a/cgi-bin/core/doctype/search_criteria/search_criteria.txt +++ b/cgi-bin/core/doctype/search_criteria/search_criteria.txt @@ -87,10 +87,13 @@ # DocField { 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'idx': 1, - 'label': 'Details', - 'oldfieldtype': 'Section Break', + 'fieldname': 'criteria_name', + 'fieldtype': 'Data', + 'hidden': 0, + 'idx': 5, + 'label': 'Criteria Name', + 'oldfieldname': 'criteria_name', + 'oldfieldtype': 'Data', 'permlevel': 0 }, @@ -135,41 +138,6 @@ 'search_index': 1 }, - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'criteria_name', - 'fieldtype': 'Data', - 'hidden': 0, - 'idx': 5, - 'label': 'Criteria Name', - 'oldfieldname': 'criteria_name', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'description', - 'fieldtype': 'Text', - 'idx': 6, - 'label': 'Description', - 'oldfieldname': 'description', - 'oldfieldtype': 'Text', - 'permlevel': 0, - 'width': '300px' - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldtype': 'Section Break', - 'idx': 7, - 'label': 'Query Details', - 'oldfieldtype': 'Section Break', - 'permlevel': 0 - }, # DocField { @@ -189,7 +157,7 @@ 'doctype': 'DocField', 'fieldname': 'filters', 'fieldtype': 'Text', - 'hidden': 0, + 'hidden': 1, 'idx': 9, 'label': 'Filters', 'oldfieldname': 'filters', @@ -288,32 +256,6 @@ 'permlevel': 0 }, - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'graph_series', - 'fieldtype': 'Data', - 'hidden': 0, - 'idx': 17, - 'label': 'Graph Series', - 'oldfieldname': 'graph_series', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - - # DocField - { - 'doctype': 'DocField', - 'fieldname': 'graph_values', - 'fieldtype': 'Data', - 'hidden': 0, - 'idx': 18, - 'label': 'Graph Values', - 'oldfieldname': 'graph_values', - 'oldfieldtype': 'Data', - 'permlevel': 0 - }, - # DocField { 'doctype': 'DocField', diff --git a/cgi-bin/core/doctype/system_console/system_console.js b/cgi-bin/core/doctype/system_console/system_console.js index a16c39f6ac..9a5cc340af 100644 --- a/cgi-bin/core/doctype/system_console/system_console.js +++ b/cgi-bin/core/doctype/system_console/system_console.js @@ -1,10 +1,12 @@ cur_frm.cscript['Server (Python)'] = function(doc, dt, dn) { + doc.response = 'Executing...' + refresh_field('response'); $c_obj([doc], 'execute_server', '', function(r, rt) { doc = locals[doc.doctype][doc.name]; if(r.exc) { doc.response = r.exc; } else { - doc.response = 'Worked!' + doc.response = 'Worked!'.bold() } refresh_field('response'); }) diff --git a/cgi-bin/webnotes/__init__.py b/cgi-bin/webnotes/__init__.py index 06833e2e77..2dcc8e4748 100644 --- a/cgi-bin/webnotes/__init__.py +++ b/cgi-bin/webnotes/__init__.py @@ -94,10 +94,13 @@ def errprint(msg): """ debug_log.append(cstr(msg or '')) -def msgprint(msg, small=0, raise_exception=0): +def msgprint(msg, small=0, raise_exception=0, as_table=False): """ Append to the :data:`message_log` """ + if as_table and type(msg) in (list, tuple): + msg = '' + ''.join([''+''.join(['' % c for c in r])+'' for r in msg]) + '
%s
' + message_log.append((small and '__small:' or '')+cstr(msg or '')) if raise_exception: raise ValidationError @@ -197,3 +200,13 @@ def setup_logging(): if getattr(defs, 'log_file_name', None): setup_logging() +def get_db_password(db_name): + from webnotes import defs + if hasattr(defs, 'get_db_password'): + return defs.get_db_password(db_name) + + elif hasattr(defs, 'db_password'): + return defs.db_password + + else: + return db_name \ No newline at end of file diff --git a/cgi-bin/webnotes/auth.py b/cgi-bin/webnotes/auth.py index d15309020b..6858151560 100644 --- a/cgi-bin/webnotes/auth.py +++ b/cgi-bin/webnotes/auth.py @@ -104,7 +104,7 @@ class HTTPRequest: else: db_name = getattr(webnotes.defs,'default_db_name','') - webnotes.conn = webnotes.db.Database(user = db_name,password = getattr(webnotes.defs,'db_password','')) + webnotes.conn = webnotes.db.Database(user = db_name,password = getattr(webnotes.defs,'db_password', '')) webnotes.ac_name = ac_name # ================================================================================= @@ -127,8 +127,9 @@ class LoginManager: # --------------------------- def post_login(self): - self.validate_ip_address() self.run_trigger() + self.validate_ip_address() + self.validate_hour() # check password # -------------- @@ -186,16 +187,38 @@ class LoginManager: # ------------- def validate_ip_address(self): - try: - ip = webnotes.conn.sql("select ip_address from tabProfile where name = '%s'" % self.user)[0][0] or '' - except: return + ip_list = webnotes.conn.get_value('Profile', self.user, 'restrict_ip', ignore=True) + + if not ip_list: + return + + ip_list = ip_list.replace(",", "\n").split('\n') + ip_list = [i.strip() for i in ip_list] + + for ip in ip_list: + if webnotes.remote_ip.startswith(ip): + return - ip = ip.replace(",", "\n").split('\n') - ip = [i.strip() for i in ip] + webnotes.msgprint('Not allowed from this IP Address', raise_exception=1) + + def validate_hour(self): + """ + check if user is logging in during restricted hours + """ + login_before = int(webnotes.conn.get_value('Profile', self.user, 'login_before', ignore=True) or 0) + login_after = int(webnotes.conn.get_value('Profile', self.user, 'login_after', ignore=True) or 0) + + if not (login_before or login_after): + return - if ret and ip: - if not (webnotes.remote_ip.startswith(ip[0]) or (webnotes.remote_ip in ip)): - raise Exception, 'Not allowed from this IP Address' + from webnotes.utils import now_datetime + current_hour = int(now_datetime().strftime('%H')) + + if login_before and current_hour > login_before: + webnotes.msgprint('Not allowed to login after restricted hour', raise_exception=1) + + if login_after and current_hour < login_after: + webnotes.msgprint('Not allowed to login before restricted hour', raise_exception=1) # login as guest # -------------- diff --git a/cgi-bin/webnotes/db.py b/cgi-bin/webnotes/db.py index 8d175e317e..f288714d47 100644 --- a/cgi-bin/webnotes/db.py +++ b/cgi-bin/webnotes/db.py @@ -26,7 +26,7 @@ class Database: self.transaction_writes = 0 self.testing_tables = [] - self.password = self.get_db_password(ac_name, password) + self.password = self.get_db_password(user, password) self.connect() if self.user != 'root': @@ -57,7 +57,9 @@ class Database: return '' def get_db_login(self, ac_name): - return getattr(defs,'db_name_map').get(ac_name, getattr(defs,'default_db_name')) + if hasattr(defs, 'db_name_map'): + return getattr(defs,'db_name_map').get(ac_name, getattr(defs,'default_db_name')) + else: return ac_name def connect(self): """ @@ -253,7 +255,7 @@ class Database: # ====================================================================================== # get a single value from a record - def get_value(self, doctype, docname, fieldname): + def get_value(self, doctype, docname, fieldname, ignore=None): """ Get a single / multiple value from a record. @@ -264,8 +266,13 @@ class Database: if docname and (docname!=doctype or docname=='DocType'): if type(fieldname) in (list, tuple): fl = '`, `'.join(fieldname) - - r = self.sql("select `%s` from `tab%s` where name='%s'" % (fl, doctype, docname)) + try: + r = self.sql("select `%s` from `tab%s` where name='%s'" % (fl, doctype, docname)) + except Exception, e: + if e.args[0]==1054 and ignore: + return None + else: + raise e return r and (len(r[0]) > 1 and r[0] or r[0][0]) or None else: if type(fieldname) in (list, tuple): diff --git a/cgi-bin/webnotes/model/__init__.py b/cgi-bin/webnotes/model/__init__.py index 98bf7cc1cc..2448f5d5be 100644 --- a/cgi-bin/webnotes/model/__init__.py +++ b/cgi-bin/webnotes/model/__init__.py @@ -76,6 +76,10 @@ def delete_doc(doctype=None, name=None, doclist = None, force=0): # check if links exist if not force: check_if_doc_is_linked(doctype, name) + + # remove tags + from webnotes.widgets.tags import clear_tags + clear_tags(doctype, name) try: webnotes.conn.sql("delete from `tab%s` where name='%s' limit 1" % (doctype, name)) @@ -86,7 +90,7 @@ def delete_doc(doctype=None, name=None, doclist = None, force=0): webnotes.msgprint("Cannot delete %s '%s' as it is referenced in another record. You must delete the referred record first" % (doctype, name)) raise e - + return 'okay' #================================================================================= diff --git a/cgi-bin/webnotes/model/doclist.py b/cgi-bin/webnotes/model/doclist.py index c8df02660a..5e270a69cf 100644 --- a/cgi-bin/webnotes/model/doclist.py +++ b/cgi-bin/webnotes/model/doclist.py @@ -18,7 +18,7 @@ class DocList: self.to_docstatus = 0 if dt and dn: self.load_from_db(dt, dn) - + def load_from_db(self, dt, dn): """ Load doclist from dt @@ -34,15 +34,15 @@ class DocList: doclist = [doc,] for t in tablefields: doclist += getchildren(doc.name, t[0], t[1], dt, prefix=prefix) - + self.docs = docs - + def __iter__(self): """ Make this iterable """ return self.docs.__iter__() - + def from_compressed(self, data, docname): """ Expand called from client @@ -50,13 +50,13 @@ class DocList: from webnotes.model.utils import expand self.docs = expand(data) self.objectify(docname) - + def objectify(self, docname=None): """ Converts self.docs from a list of dicts to list of Documents """ from webnotes.model.doc import Document - + self.docs = [Document(fielddata=d) for d in self.docs] if not docname: self.doc, self.children = self.docs[0], self.docs[1:] @@ -69,21 +69,20 @@ class DocList: self.doc = d else: self.children.append(d) - # catch all if no self.doc if not self.doc: self.doc, self.children = self.docs[0], self.docs[1:] - + def make_obj(self): """ Create a DocType object """ if self.obj: return self.obj - + from webnotes.model.code import get_obj self.obj = get_obj(doc=self.doc, doclist=self.children) return self.obj - + def next(self): """ Next doc @@ -104,13 +103,13 @@ class DocList: if (not is_single(self.doc.doctype)) and (not cint(self.doc.fields.get('__islocal'))): tmp = webnotes.conn.sql(""" - SELECT modified FROM `tab%s` WHERE name="%s" for update""" + SELECT modified FROM `tab%s` WHERE name="%s" for update""" % (self.doc.doctype, self.doc.name)) if tmp and str(tmp[0][0]) != str(self.doc.modified): webnotes.msgprint(""" - Document has been modified after you have opened it. - To maintain the integrity of the data, you will not be able to save your changes. + Document has been modified after you have opened it. + To maintain the integrity of the data, you will not be able to save your changes. Please refresh this document. [%s/%s]""" % (tmp[0][0], self.doc.modified), raise_exception=1) def check_permission(self): @@ -119,7 +118,7 @@ class DocList: """ if not self.doc.check_perm(verbose=1): webnotes.msgprint("Not enough permission to save %s" % self.doc.doctype, raise_exception=1) - + def check_links(self): """ Checks integrity of links (throws exception if links are invalid) @@ -130,11 +129,11 @@ class DocList: ref[d.doctype] = d.make_link_list() err_list += d.validate_links(ref[d.doctype]) - + if err_list: - webnotes.msgprint("""[Link Validation] Could not find the following values: %s. + webnotes.msgprint("""[Link Validation] Could not find the following values: %s. Please correct and resave. Document Not Saved.""" % ', '.join(err_list), raise_exception=1) - + def update_timestamps_and_docstatus(self): """ Update owner, creation, modified_by, modified, docstatus @@ -142,17 +141,17 @@ class DocList: from webnotes.utils import now ts = now() user = webnotes.__dict__.get('session', {}).get('user') or 'Administrator' - + for d in self.docs: if self.doc.__islocal: d.owner = user d.creation = ts - + d.modified_by = user d.modified = ts if d.docstatus != 2: # don't update deleted d.docstatus = self.to_docstatus - + def prepare_for_save(self, check_links): """ Set owner, modified etc before saving @@ -175,7 +174,7 @@ class DocList: from webnotes.model.triggers import fire_event fire_event(self.doc, method) - + def save_main(self): """ Save the main doc @@ -184,7 +183,7 @@ class DocList: self.doc.save(cint(self.doc.__islocal)) except NameError, e: webnotes.msgprint('%s "%s" already exists' % (self.doc.doctype, self.doc.name)) - + # prompt if cancelled if webnotes.conn.get_value(self.doc.doctype, self.doc.name, 'docstatus')==2: webnotes.msgprint('[%s "%s" has been cancelled]' % (self.doc.doctype, self.doc.name)) @@ -197,7 +196,7 @@ class DocList: """ for d in self.children: deleted, local = d.fields.get('__deleted',0), d.fields.get('__islocal',0) - + if cint(local) and cint(deleted): pass @@ -206,7 +205,7 @@ class DocList: d.parent = self.doc.name # rename if reqd d.parenttype = self.doc.doctype - d.save(new = cint(local)) + d.save(new = cint(local)) def save(self, check_links=1): """ @@ -217,7 +216,7 @@ class DocList: self.save_main() self.save_children() self.run_method('on_update') - + def submit(self): """ Save & Submit - set docstatus = 1, run "on_submit" @@ -227,7 +226,7 @@ class DocList: self.to_docstatus = 1 self.save() self.run_method('on_submit') - + def cancel(self): """ Cancel - set docstatus 2, run "on_cancel" @@ -239,7 +238,7 @@ class DocList: self.save_main() self.save_children() self.run_method('on_cancel') - + def update_after_submit(self): """ Update after submit - some values changed after submit @@ -260,11 +259,11 @@ def getlist(doclist, parentfield): """ import webnotes.model.utils return webnotes.model.utils.getlist(doclist, parentfield) - + def copy_doclist(doclist, no_copy = []): """ Make a copy of the doclist """ import webnotes.model.utils return webnotes.model.utils.copy_doclist(doclist, no_copy) - + diff --git a/cgi-bin/webnotes/model/utils.py b/cgi-bin/webnotes/model/utils.py index 2d019273f1..7e0fe29130 100644 --- a/cgi-bin/webnotes/model/utils.py +++ b/cgi-bin/webnotes/model/utils.py @@ -1,7 +1,7 @@ """ Model utilities, unclassified functions """ - + def expand(docs): """ Expand a doclist sent from the client side. (Internally used by the request handler) @@ -25,12 +25,12 @@ def compress(doclist): """ Compress a doclist before sending it to the client side. (Internally used by the request handler) - """ + """ if doclist and hasattr(doclist[0],'fields'): docs = [d.fields for d in doclist] else: docs = doclist - + kl, vl = {}, [] for d in docs: dt = d['doctype'] @@ -38,10 +38,10 @@ def compress(doclist): fl = d.keys() forbidden = ['server_code_compiled'] nl = ['doctype','localname','__oldparent','__unsaved'] - + # add client script for doctype, doctype due to ambiguity if dt=='DocType': nl.append('__client_script') - + for f in fl: if not (f in nl) and not (f in forbidden): nl.append(f) @@ -64,21 +64,24 @@ def compress(doclist): def getlist(doclist, field): """ Filter a list of records for a specific field from the full doclist - + Example:: - - # find all phone call details + + # find all phone call details dl = getlist(self.doclist, 'contact_updates') pl = [] for d in dl: if d.type=='Phone': pl.append(d) """ - + from webnotes.utils import cint l = [] for d in doclist: if d.parent and (not d.parent.lower().startswith('old_parent:')) and d.parentfield == field: l.append(d) + + l.sort(lambda a, b: cint(a.idx) - cint(b.idx)) + return l # Copy doclist @@ -90,31 +93,31 @@ def copy_doclist(doclist, no_copy = []): Pass fields that are not to be copied in `no_copy` """ from webnotes.model.doc import Document - + cl = [] - + # main doc c = Document(fielddata = doclist[0].fields.copy()) - + # clear no_copy fields - for f in no_copy: + for f in no_copy: if c.fields.has_key(f): c.fields[f] = None - + c.name = None c.save(1) cl.append(c) - + # new parent name parent = c.name - + # children for d in doclist[1:]: c = Document(fielddata = d.fields.copy()) c.name = None - + # clear no_copy fields - for f in no_copy: + for f in no_copy: if c.fields.has_key(f): c.fields[f] = None @@ -138,18 +141,18 @@ def _make_html(doc, link_list): from webnotes.utils import cstr out = '' for k in doc.fields.keys(): - if k!='server_code_compiled': + if k!='server_code_compiled': v = cstr(doc.fields[k]) - + # link field if v and (k in link_list.keys()): dt = link_list[k] if type(dt)==str and dt.startswith('link:'): dt = dt[5:] - v = '%s' % (dt, v, v) - + v = '%s' % (dt, v, v) + out += '\t\n' % (cstr(k), v) - + out += '
%s%s
' return out @@ -159,13 +162,13 @@ def to_html(doclist): """ out = '' link_lists = {} - + for d in doclist: if not link_lists.get(d.doctype): link_lists[d.doctype] = d.make_link_list() out += _make_html(d, link_lists[d.doctype]) - + return out def commonify_doclist(doclist, with_comments=1): @@ -183,15 +186,17 @@ def commonify_doclist(doclist, with_comments=1): c[k] = doclist[0][k] return c - def strip_common(d): - for k in common_keys: + def strip_common_and_idx(d): + for k in common_keys: if k in d: del d[k] + + if 'idx' in d: del d['idx'] return d def make_common_dicts(doclist): - + common_dict = {} # one per doctype - + # make common dicts for all records for d in doclist: if not d['doctype'] in common_dict: @@ -206,15 +211,15 @@ def commonify_doclist(doclist, with_comments=1): common_dict = make_common_dicts(doclist) # make docs - final = [] + final = [] for d in doclist: - f = strip_common(get_diff_dict(common_dict[d['doctype']], d)) + f = strip_common_and_idx(get_diff_dict(common_dict[d['doctype']], d)) f['doctype'] = d['doctype'] # keep doctype! - + # strip name for child records (only an auto generated number!) if f['doctype'] != doclist[0]['doctype']: del f['name'] - + if with_comments: f['##comment'] = d['doctype'] + ('name' in f and (', ' + f['name']) or '') final.append(f) @@ -225,37 +230,51 @@ def commonify_doclist(doclist, with_comments=1): d['name']='__common__' if with_comments: d['##comment'] = 'These values are common for all ' + d['doctype'] - commons.append(strip_common(d)) - + commons.append(strip_common_and_idx(d)) + common_values = make_common(doclist) return [common_values]+commons+final - + def uncommonify_doclist(dl): """ Expands an commonified doclist """ + # first one has common values common_values = dl[0] common_dict = {} final = [] + idx_dict = {} for d in dl[1:]: if 'name' in d and d['name']=='__common__': + # common for a doctype - del d['name'] common_dict[d['doctype']] = d else: + dt = d['doctype'] + if not dt in idx_dict: idx_dict[dt] = 0; d1 = common_values.copy() - d1.update(common_dict[d['doctype']]) + + # update from common and global + d1.update(common_dict[dt]) d1.update(d) + + # idx by sequence + d1['idx'] = idx_dict[dt] + + # increment idx + idx_dict[dt] += 1 + final.append(d1) return final - + def pprint_doclist(doclist, with_comments = 1): """ Pretty Prints a doclist with common keys separated and comments """ from webnotes.utils import pprint_dict - + dictlist =[pprint_dict(d) for d in commonify_doclist(doclist, with_comments)] title = '# '+doclist[0]['doctype']+', '+doclist[0]['name'] return title + '\n[\n' + ',\n'.join(dictlist) + '\n]' @@ -268,5 +287,5 @@ def peval_doclist(txt): return uncommonify_doclist(eval(txt)) else: return eval(txt) - + return uncommonify_doclist(eval(txt)) diff --git a/cgi-bin/webnotes/modules/__init__.py b/cgi-bin/webnotes/modules/__init__.py index dfbebae378..0f625ed10b 100644 --- a/cgi-bin/webnotes/modules/__init__.py +++ b/cgi-bin/webnotes/modules/__init__.py @@ -139,8 +139,13 @@ class Module: """ Sync the file to the db """ + import os dt, dn = scrub_dt_dn(dt, dn) - self.get_file(dt, dn, dn + '.txt').sync() + path = os.path.exists(os.path.join(self.get_path(), os.path.join(dt, dn, dn + '.txt'))) + if not path: + webnotes.msgprint("%s not found" % path) + else: + self.get_file(dt, dn, dn + '.txt').sync(force=1) def sync_all_of_type(self, extn, verbose=0): """ @@ -217,15 +222,13 @@ class ModuleFile: """ returns file contents """ - try: + import os + if os.path.exists(self.path): f = open(self.path,'r') self.content = f.read() f.close() - except IOError, e: - if e.args[0]==2: - self.content = '' - else: - raise e + else: + self.content = '' return self.content @@ -248,21 +251,21 @@ class TxtModuleFile(ModuleFile): def __init__(self, path): ModuleFile.__init__(self, path) - def sync(self): + def sync(self, force=1): """ import the doclist if new """ if self.is_new(): from webnotes.model.utils import peval_doclist doclist = peval_doclist(self.read()) - if doclist: + if doclist: + from webnotes.utils.transfer import set_doc + set_doc(doclist, 1, 1, 1) + # since there is a new timestamp on the file, update timestamp in # the record webnotes.conn.sql("update `tab%s` set modified=now() where name=%s" \ % (doclist[0]['doctype'], '%s'), doclist[0]['name']) - - from webnotes.utils.transfer import set_doc - set_doc(doclist, 1, 1, 1) self.update() diff --git a/cgi-bin/webnotes/modules/patch.py b/cgi-bin/webnotes/modules/patch.py index 401510b3c2..5ecd84446c 100644 --- a/cgi-bin/webnotes/modules/patch.py +++ b/cgi-bin/webnotes/modules/patch.py @@ -31,6 +31,7 @@ def write_log(): patch_log.write(('\n\nError in %s:\n' % webnotes.conn.cur_db_name) + webnotes.getTraceback()) patch_log.close() - webnotes.msgprint("There were errors in running patches, please call the Administrator") - - + from webnotes.utils import sendmail + subj = 'Error in running patches in %s' % webnotes.conn.cur_db_name + msg = subj + '

Login User: ' + webnotes.user.name + '

' + webnotes.getTraceback() + sendmail(['developer@erpnext.com'], sender='automail@erpnext.com', subject= subj, parts=[['text/plain', msg]]) diff --git a/cgi-bin/webnotes/utils/__init__.py b/cgi-bin/webnotes/utils/__init__.py index 1376ed7d41..6e342f1802 100644 --- a/cgi-bin/webnotes/utils/__init__.py +++ b/cgi-bin/webnotes/utils/__init__.py @@ -119,7 +119,7 @@ def getdate(string_date): else: return '' -def add_days(date, days): +def add_days(date, days, format='string'): """ Adds `days` to the given `string_date` """ @@ -130,7 +130,11 @@ def add_days(date, days): if type(date) not in (datetime.datetime, datetime.date): date = getdate(date) - return (date + datetime.timedelta(days)).strftime('%Y-%m-%d') + dt = date + datetime.timedelta(days) + if format=='string': + return dt.strftime('%Y-%m-%d') + else: + return dt def add_months(string_date, months): import datetime diff --git a/cgi-bin/webnotes/utils/backups.py b/cgi-bin/webnotes/utils/backups.py index 8865481ade..26e1f98771 100644 --- a/cgi-bin/webnotes/utils/backups.py +++ b/cgi-bin/webnotes/utils/backups.py @@ -123,7 +123,7 @@ def get_backup(): """ #if verbose: print webnotes.conn.cur_db_name + " " + webnotes.defs.db_password odb = BackupGenerator(webnotes.conn.cur_db_name, webnotes.conn.cur_db_name,\ - webnotes.defs.db_password) + get_db_password(webnotes.conn.cur_db_name)) recipient_list = odb.get_backup() delete_temp_backups() webnotes.msgprint("""A download link to your backup will be emailed \ @@ -131,6 +131,19 @@ def get_backup(): %s""" % (', '.join(recipient_list))) +def get_db_password(db_name): + """ + Get db password from defs + """ + from webnotes import defs + if hasattr(defs, 'get_db_password'): + return defs.get_db_password(db_name) + + if hasattr(defs, 'db_password'): + return defs.db_password + + + def delete_temp_backups(): """ Cleans up the backup_link_path directory by deleting files older than 24 hours diff --git a/cgi-bin/webnotes/utils/email_lib/receive.py b/cgi-bin/webnotes/utils/email_lib/receive.py index 7207d836f0..d6cc347998 100644 --- a/cgi-bin/webnotes/utils/email_lib/receive.py +++ b/cgi-bin/webnotes/utils/email_lib/receive.py @@ -39,8 +39,11 @@ class IncomingMail: """ get utf-8 encoded part content """ - return unicode(part.get_payload(decode=True),str(charset),"ignore").encode('utf8','replace') - + try: + return unicode(part.get_payload(decode=True),str(charset),"ignore").encode('utf8','replace') + except LookupError, e: + return part.get_payload() + def get_attachment(self, part, charset): """ Extracts an attachment @@ -128,7 +131,10 @@ class POP3Mailbox: num = len(self.pop.list()[1]) for m in range(num): msg = self.pop.retr(m+1) - self.process_message(IncomingMail('\n'.join(msg[1]))) + try: + self.process_message(IncomingMail('\n'.join(msg[1]))) + except: + pass self.pop.dele(m+1) self.pop.quit() diff --git a/cgi-bin/webnotes/utils/scheduler.py b/cgi-bin/webnotes/utils/scheduler.py index 96db7bb5d8..7caa1fd6ee 100644 --- a/cgi-bin/webnotes/utils/scheduler.py +++ b/cgi-bin/webnotes/utils/scheduler.py @@ -89,7 +89,7 @@ class Scheduler: import webnotes, webnotes.defs, webnotes.db try: - webnotes.conn = webnotes.db.Database(user=db_name, password=webnotes.defs.db_password) + webnotes.conn = webnotes.db.Database(user=db_name, password=webnotes.get_db_password(db_name)) webnotes.session = {'user':'Administrator'} module = '.'.join(event.split('.')[:-1]) diff --git a/cgi-bin/webnotes/widgets/page.py b/cgi-bin/webnotes/widgets/page.py index 0cad0b72d6..de31fb9fd7 100644 --- a/cgi-bin/webnotes/widgets/page.py +++ b/cgi-bin/webnotes/widgets/page.py @@ -19,7 +19,7 @@ class Page: Loads page info from files in module """ # load js - doc.fields['__script'] = module.get_doc_file('page',doc.name,'.js').read() + doc.fields['__script'] = module.get_doc_file('page',doc.name,'.js').read() or doc.script doc.script = None # load css diff --git a/cgi-bin/webnotes/widgets/query_builder.py b/cgi-bin/webnotes/widgets/query_builder.py index 72e320cd70..6b4196b6d7 100644 --- a/cgi-bin/webnotes/widgets/query_builder.py +++ b/cgi-bin/webnotes/widgets/query_builder.py @@ -14,7 +14,7 @@ def get_search_criteria_list(dt): def load_report_list(): webnotes.response['rep_list'] = get_search_criteria_list(form.getvalue('dt')) - + # Get, scrub metadata # ==================================================================== @@ -37,20 +37,20 @@ def get_parent_dt(dt): def get_sql_meta(tl): std_columns = { - 'owner':('Owner', '', '', '100'), - 'creation':('Created on', 'Date', '', '100'), - 'modified':('Last modified on', 'Date', '', '100'), + 'owner':('Owner', '', '', '100'), + 'creation':('Created on', 'Date', '', '100'), + 'modified':('Last modified on', 'Date', '', '100'), 'modified_by':('Modified By', '', '', '100') } - + meta = {} - + for dt in tl: meta[dt] = std_columns.copy() # for table doctype, the ID is the parent id pdt = get_parent_dt(dt) - if pdt: + if pdt: meta[dt]['parent'] = ('ID', 'Link', pdt, '200') # get the field properties from DocField @@ -58,10 +58,10 @@ def get_sql_meta(tl): for r in res: if r[0]: meta[dt][r[0]] = (r[1], r[2], r[3], r[4]); - + # name meta[dt]['name'] = ('ID', 'Link', dt, '200') - + return meta # Additional conditions to fulfill match permission rules @@ -80,12 +80,12 @@ def getmatchcondition(dt, ud, ur): return '' return ' OR '.join(cond) - + def add_match_conditions(q, tl, ur, ud): sl = [] for dt in tl: s = getmatchcondition(dt, ud, ur) - if s: + if s: sl.append(s) # insert the conditions @@ -94,13 +94,13 @@ def add_match_conditions(q, tl, ur, ud): condition_end = q.find('ORDER BY')!=-1 and 'ORDER BY' or 'LIMIT' condition_end = q.find('GROUP BY')!=-1 and 'GROUP BY' or condition_end - + if q.find('ORDER BY')!=-1 or q.find('LIMIT')!=-1 or q.find('GROUP BY')!=-1: # if query continues beyond conditions q = q.split(condition_end) q = q[0] + condition_st + '(' + ' OR '.join(sl) + ') ' + condition_end + q[1] else: q = q + condition_st + '(' + ' OR '.join(sl) + ')' - + return q # execute server-side script from Search Criteria @@ -111,7 +111,7 @@ def exec_report(code, res, colnames=[], colwidths=[], coltypes=[], coloptions=[] for c in colnames: col_idx[c] = i i+=1 - + # load globals (api) from webnotes import * from webnotes.utils import * @@ -127,12 +127,12 @@ def exec_report(code, res, colnames=[], colwidths=[], coltypes=[], coloptions=[] NEWLINE = '\n' exec str(code) - + if out!=None: res = out return res, style, header_html, footer_html, page_template - + # ==================================================================== def guess_type(m): @@ -146,7 +146,7 @@ def guess_type(m): return 'Date' else: return 'Data' - + def build_description_simple(): colnames, coltypes, coloptions, colwidths = [], [], [], [] @@ -155,7 +155,7 @@ def build_description_simple(): coltypes.append(guess_type[m[0]]) coloptions.append('') colwidths.append('100') - + return colnames, coltypes, coloptions, colwidths # ==================================================================== @@ -180,27 +180,27 @@ def build_description_standard(meta, tl): if (not dt) and merged_meta.get(fn): # no "AS" given, find type from merged description - + desc = merged_meta[fn] colnames.append(desc[0] or fn) coltypes.append(desc[1] or '') coloptions.append(desc[2] or '') colwidths.append(desc[3] or '100') - + elif meta.get(dt,{}).has_key(fn): # type specified for a multi-table join # usually from Report Builder - + desc = meta[dt][fn] colnames.append(desc[0] or fn) coltypes.append(desc[1] or '') coloptions.append(desc[2] or '') colwidths.append(desc[3] or '100') - + else: # nothing found # guess - + colnames.append(fn) coltypes.append(guess_type(f[1])) coloptions.append('') @@ -214,21 +214,21 @@ def build_description_standard(meta, tl): def runquery(q='', ret=0, from_export=0): import webnotes.utils - formatted = cint(form.getvalue('formatted')) - + formatted = cint(form.getvalue('formatted')) + # CASE A: Simple Query # -------------------- if form.getvalue('simple_query') or form.getvalue('is_simple'): - q = form.getvalue('simple_query') or form.getvalue('query') + if not q: q = form.getvalue('simple_query') or form.getvalue('query') if q.split()[0].lower() != 'select': raise Exception, 'Query must be a SELECT' - + as_dict = cint(form.getvalue('as_dict')) res = sql(q, as_dict = as_dict, as_list = not as_dict, formatted=formatted) - + # build colnames etc from metadata colnames, coltypes, coloptions, colwidths = [], [], [], [] - + # CASE B: Standard Query # ----------------------- else: @@ -236,17 +236,17 @@ def runquery(q='', ret=0, from_export=0): tl = get_sql_tables(q) meta = get_sql_meta(tl) - + q = add_match_conditions(q, tl, webnotes.user.roles, webnotes.user.get_defaults()) - + # replace special variables q = q.replace('__user', session['user']) q = q.replace('__today', webnotes.utils.nowdate()) - + res = sql(q, as_list=1, formatted=formatted) colnames, coltypes, coloptions, colwidths = build_description_standard(meta, tl) - + # run server script # ----------------- style, header_html, footer_html, page_template = '', '', '', '' @@ -254,15 +254,15 @@ def runquery(q='', ret=0, from_export=0): sc_id = form.getvalue('sc_id') from webnotes.model.code import get_code sc_details = webnotes.conn.sql("select module, standard, server_script from `tabSearch Criteria` where name=%s", sc_id)[0] - if sc_details[1]!='No': + if sc_details[1]!='No': code = get_code(sc_details[0], 'Search Criteria', sc_id, 'py') else: code = sc_details[2] - + if code: filter_values = form.has_key('filter_values') and eval(form.getvalue('filter_values','')) or {} res, style, header_html, footer_html, page_template = exec_report(code, res, colnames, colwidths, coltypes, coloptions, filter_values, q, from_export) - + out['colnames'] = colnames out['coltypes'] = coltypes out['coloptions'] = coloptions @@ -270,17 +270,17 @@ def runquery(q='', ret=0, from_export=0): out['header_html'] = header_html out['footer_html'] = footer_html out['page_template'] = page_template - + if style: out['style'] = style - + # just the data - return if ret==1: - return res + return res out['values'] = res - # return num of entries + # return num of entries qm = form.has_key('query_max') and form.getvalue('query_max') or '' if qm and qm.strip(): if qm.split()[0].lower() != 'select': @@ -298,31 +298,31 @@ def runquery_csv(): # run query res = runquery(from_export = 1) - + q = form.getvalue('query') - + rep_name = form.getvalue('report_name') if not form.has_key('simple_query'): # Report Name if not rep_name: rep_name = get_sql_tables(q)[0] - + if not rep_name: rep_name = 'DataExport' - + # Headings heads = [] - + rows = [[rep_name], out['colnames']] + out['values'] - + from cStringIO import StringIO import csv - + f = StringIO() writer = csv.writer(f) for r in rows: writer.writerow(r) - + f.seek(0) out['result'] = f.read() out['type'] = 'csv' diff --git a/cgi-bin/webnotes/widgets/search.py b/cgi-bin/webnotes/widgets/search.py index 9d8da2b790..bfd1c08486 100644 --- a/cgi-bin/webnotes/widgets/search.py +++ b/cgi-bin/webnotes/widgets/search.py @@ -22,16 +22,16 @@ def getsearchfields(): webnotes.response['searchfields'] = [['name', 'ID', 'Data', '']] + res def make_query(fields, dt, key, txt, start, length): - return """SELECT %(fields)s - FROM `tab%(dt)s` + return """SELECT %(fields)s + FROM `tab%(dt)s` WHERE `tab%(dt)s`.`%(key)s` LIKE '%(txt)s' AND `tab%(dt)s`.docstatus != 2 - ORDER BY `tab%(dt)s`.`%(key)s` + ORDER BY `tab%(dt)s`.`%(key)s` DESC LIMIT %(start)s, %(len)s """ % { 'fields': fields, 'dt': dt, 'key': key, 'txt': txt + '%', - 'start': start, + 'start': start, 'len': length } @@ -48,7 +48,7 @@ def get_std_fields_list(dt, key): def build_for_autosuggest(res): from webnotes.utils import cstr - + results = [] for r in res: info = '' @@ -56,10 +56,10 @@ def build_for_autosuggest(res): info = ','.join([cstr(t) for t in r[1:]]) if len(info) > 30: info = info[:30] + '...' - + results.append({'id':r[0], 'value':r[0], 'info':info}) return results - + def scrub_custom_query(query, key, txt): if '%(key)s' in query: query = query.replace('%(key)s', key) @@ -74,7 +74,7 @@ def search_link(): txt = webnotes.form.getvalue('txt') dt = webnotes.form.getvalue('dt') query = webnotes.form.getvalue('query') - + if query: res = webnotes.conn.sql(scrub_custom_query(query, 'name', txt)) else: @@ -97,5 +97,5 @@ def search_widget(): query = scrub_custom_query(user_query, key, txt) else: query = make_query(', '.join(get_std_fields_list(dt, key)), dt, key, txt, webnotes.form.getvalue('start') or 0, webnotes.form.getvalue('page_len') or 50) - + webnotes.widgets.query_builder.runquery(query) diff --git a/cgi-bin/webnotes/widgets/tags.py b/cgi-bin/webnotes/widgets/tags.py index 5fe5429c6d..6fe4b1950c 100644 --- a/cgi-bin/webnotes/widgets/tags.py +++ b/cgi-bin/webnotes/widgets/tags.py @@ -70,7 +70,7 @@ class DocTags: def get_tags(self, dn): """returns tag for a particular item""" - return webnotes.conn.get_value(self.dt, dn, '_user_tags') or '' + return webnotes.conn.get_value(self.dt, dn, '_user_tags', ignore=1) or '' def create(self, tag): try: @@ -95,16 +95,32 @@ class DocTags: self.update(dn, filter(lambda x:x!=tag, tl)) TagCounter(self.dt).update(tag, -1) + def remove_all(self, dn): + """remove all user tags (call before delete)""" + tl = self.get_tags(dn).split(',') + tl = filter(lambda x:x, tl) + tc = TagCounter(self.dt) + for t in tl: + tc.update(t, -1) + self.update(dn, []) + def update(self, dn, tl): """updates the _user_tag column in the table""" - tl = list(set(filter(lambda x: x, tl))) - + if not tl: + tags = '' + else: + tl = list(set(filter(lambda x: x, tl))) + tags = ',' + ','.join(tl) try: webnotes.conn.sql("update `tab%s` set _user_tags=%s where name=%s" % \ - (self.dt,'%s','%s'), (',' + ','.join(tl), dn)) + (self.dt,'%s','%s'), (tags , dn)) except Exception, e: if e.args[0]==1054: + if not tags: + # no tags, nothing to do + return + self.setup() self.update(dn, tl) else: raise e @@ -141,6 +157,8 @@ class TagCounter: # if doctype cnt does not exist # creates it for the first time def update(self, tag, diff): + if not tag: + return "updates tag cnt for a doctype and tag" cnt = webnotes.conn.sql("select cnt from `_tag_cnt` where doctype=%s and tag=%s", (self.doctype, tag)) @@ -255,4 +273,6 @@ def get_top_tags(args=''): get_item('tags-' + dt).set(tl, 60*60) return tl - + +def clear_tags(dt, dn): + DocTags(dt).remove_all(dn) diff --git a/js/form.compressed.js b/js/form.compressed.js index 64ea3993ac..a29f580874 100644 --- a/js/form.compressed.js +++ b/js/form.compressed.js @@ -121,7 +121,8 @@ if(this.docname){if(!this.check_doc_perm())return;if(!this.setup_done)this.setup if(this.doc.__islocal) this.is_editable[this.docname]=1;this.editable=this.is_editable[this.docname];if(!this.doc.__archived&&(this.editable||(!this.editable&&this.meta.istable))){if(this.print_wrapper){$dh(this.print_wrapper);$ds(this.page_layout.wrapper);} if(!this.meta.istable){this.refresh_header();this.sidebar&&this.sidebar.refresh();} -this.runclientscript('refresh');this.refresh_tabs();this.refresh_fields();this.refresh_dependency();this.refresh_footer();if(this.layout)this.layout.show();if(is_onload) +this.runclientscript('refresh');$(document).trigger('form_refresh') +this.refresh_tabs();this.refresh_fields();this.refresh_dependency();this.refresh_footer();if(this.layout)this.layout.show();if(is_onload) this.runclientscript('onload_post_render',this.doctype,this.docname);}else{this.refresh_header();if(this.print_wrapper){this.refresh_print_layout();} this.runclientscript('edit_status_changed');} if(!this.display)this.show_the_frm();if(!this.meta.in_dialog)page_body.change_to('Forms');}} @@ -252,22 +253,25 @@ this.input.onchange=function(){if(me.editor){}else{me.set(me.input.value);} me.run_trigger();} this.get_value=function(){if(me.editor){return me.editor.getContent();}else{return this.input.value;}} if(this.df.fieldtype=='Text Editor'){$(me.input).tinymce({script_url:'js/tiny_mce_33/tiny_mce.js',theme:"advanced",plugins:"style,inlinepopups,table",extended_valid_elements:"div[id|dir|class|align|style]",width:'100%',height:'360px',theme_advanced_buttons1:"bold,italic,underline,strikethrough,hr,|,justifyleft,justifycenter,justifyright,|,formatselect,fontselect,fontsizeselect",theme_advanced_buttons2:"bullist,numlist,|,outdent,indent,|,undo,redo,|,link,unlink,code,|,forecolor,backcolor,|,tablecontrols",theme_advanced_buttons3:"",theme_advanced_toolbar_location:"top",theme_advanced_toolbar_align:"left",content_css:"js/tiny_mce_33/custom_content.css",oninit:function(){me.init_editor();}});}else{$y(me.input,{fontFamily:'Courier, Fixed'});}} -_f.CodeField.prototype.init_editor=function(){var me=this;this.editor=tinymce.get(this.myid);this.editor.onKeyUp.add(function(ed,e){me.set(ed.getContent());});this.editor.onPaste.add(function(ed,e){me.set(ed.getContent());});this.editor.onSetContent.add(function(ed,e){me.set(ed.getContent());});if(cur_frm)this.editor.setContent(locals[cur_frm.doctype][cur_frm.docname][this.df.fieldname]);} +_f.CodeField.prototype.init_editor=function(){var me=this;this.editor=tinymce.get(this.myid);this.editor.onKeyUp.add(function(ed,e){me.set(ed.getContent());});this.editor.onPaste.add(function(ed,e){me.set(ed.getContent());});this.editor.onSetContent.add(function(ed,e){me.set(ed.getContent());});var c=locals[cur_frm.doctype][cur_frm.docname][this.df.fieldname];if(cur_frm&&c){this.editor.setContent(c);}} _f.CodeField.prototype.set_disp=function(val){$y(this.disp_area,{width:'90%'}) if(this.df.fieldtype=='Text Editor'){this.disp_area.innerHTML=val;}else{this.disp_area.innerHTML='';}} _f.cur_grid_cell=null;_f.Grid=function(parent){} -_f.Grid.prototype.init=function(parent,row_height){this.alt_row_bg='#F2F2FF';this.row_height=row_height;if(!row_height)this.row_height='26px';this.make_ui(parent);this.insert_column('','','Int','Sr','50px','',[1,0,0]);this.total_width=50;if(this.oninit)this.oninit();keypress_observers.push(this)} +_f.Grid.prototype.init=function(parent,row_height){this.col_idx_by_name={} +this.alt_row_bg='#F2F2FF';this.row_height=row_height;if(!row_height)this.row_height='26px';this.make_ui(parent);this.insert_column('','','Int','Sr','50px','',[1,0,0]);if(this.oninit)this.oninit();keypress_observers.push(this);} _f.Grid.prototype.make_ui=function(parent){var ht=make_table($a(parent,'div'),1,2,'100%',['60%','40%']);this.main_title=$td(ht,0,0);this.main_title.className='columnHeading';$td(ht,0,1).style.textAlign='right';this.tbar_div=$a($td(ht,0,1),'div','grid_tbarlinks');if(isIE)$y(this.tbar_div,{width:'200px'});this.tbar_tab=make_table(this.tbar_div,1,4,'100%',['25%','25%','25%','25%']);this.wrapper=$a(parent,'div','grid_wrapper');$h(this.wrapper,cint(screen.width*0.5)+'px');this.head_wrapper=$a(this.wrapper,'div','grid_head_wrapper');this.head_tab=$a(this.head_wrapper,'table','grid_head_table');this.head_row=this.head_tab.insertRow(0);this.tab_wrapper=$a(this.wrapper,'div','grid_tab_wrapper');this.tab=$a(this.tab_wrapper,'table','grid_table');var me=this;this.wrapper.onscroll=function(){me.head_wrapper.style.top=me.wrapper.scrollTop+'px';}} _f.Grid.prototype.show=function(){if(this.can_add_rows){$ds(this.tbar_div);}else{$dh(this.tbar_div);} $ds(this.wrapper);} _f.Grid.prototype.hide=function(){$dh(this.wrapper);$dh(this.tbar_div);} -_f.Grid.prototype.insert_column=function(doctype,fieldname,fieldtype,label,width,options,perm,reqd){var idx=this.head_row.cells.length;if(!width)width='100px';var col=this.head_row.insertCell(idx);col.doctype=doctype;col.fieldname=fieldname;col.fieldtype=fieldtype;col.innerHTML='
'+label+'
';col.label=label;if(reqd) -col.childNodes[0].style.color="#D22";this.total_width+=cint(width);$w(col,width);col.orig_width=col.style.width;col.options=options;col.perm=perm;} -_f.Grid.prototype.set_column_disp=function(label,show){for(var i=0;i'+label+'';col.label=label;if(reqd) +col.childNodes[0].style.color="#D22";col.style.width=width;col.options=options;col.perm=perm;this.col_idx_by_name[fieldname]=idx;} +_f.Grid.prototype.reset_table_width=function(){var w=0;for(var i=0,len=this.head_row.cells.length;ithis.tab.rows.length) +_f.Grid.prototype.set_data=function(data){this.cell_deselect();this.reset_table_width();if(data.length>this.tab.rows.length) this.append_rows(data.length-this.tab.rows.length);if(data.length'+label+'';c.cur_label=label;break;}}} _f.FormGrid.prototype.refresh=function(){var docset=getchildren(this.doctype,this.field.frm.docname,this.field.df.fieldname,this.field.frm.doctype);var data=[];for(var i=0;i