You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

283 lines
8.8 KiB

  1. # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
  2. # License: MIT. See LICENSE
  3. import hashlib
  4. import json
  5. import os
  6. import frappe
  7. from frappe.model.base_document import get_controller
  8. from frappe.modules import get_module_path, scrub_dt_dn
  9. from frappe.query_builder import DocType
  10. from frappe.utils import get_datetime_str, now
  11. def caclulate_hash(path: str) -> str:
  12. """Calculate md5 hash of the file in binary mode
  13. Args:
  14. path (str): Path to the file to be hashed
  15. Returns:
  16. str: The calculated hash
  17. """
  18. hash_md5 = hashlib.md5()
  19. with open(path, "rb") as f:
  20. for chunk in iter(lambda: f.read(4096), b""):
  21. hash_md5.update(chunk)
  22. return hash_md5.hexdigest()
  23. ignore_values = {
  24. "Report": ["disabled", "prepared_report", "add_total_row"],
  25. "Print Format": ["disabled"],
  26. "Notification": ["enabled"],
  27. "Print Style": ["disabled"],
  28. "Module Onboarding": ["is_complete"],
  29. "Onboarding Step": ["is_complete", "is_skipped"],
  30. }
  31. ignore_doctypes = [""]
  32. def import_files(module, dt=None, dn=None, force=False, pre_process=None, reset_permissions=False):
  33. if type(module) is list:
  34. out = []
  35. for m in module:
  36. out.append(import_file(m[0], m[1], m[2], force=force, pre_process=pre_process, reset_permissions=reset_permissions))
  37. return out
  38. else:
  39. return import_file(module, dt, dn, force=force, pre_process=pre_process, reset_permissions=reset_permissions)
  40. def import_file(module, dt, dn, force=False, pre_process=None, reset_permissions=False):
  41. """Sync a file from txt if modifed, return false if not updated"""
  42. path = get_file_path(module, dt, dn)
  43. ret = import_file_by_path(path, force, pre_process=pre_process, reset_permissions=reset_permissions)
  44. return ret
  45. def get_file_path(module, dt, dn):
  46. dt, dn = scrub_dt_dn(dt, dn)
  47. path = os.path.join(get_module_path(module), os.path.join(dt, dn, f"{dn}.json"))
  48. return path
  49. def import_file_by_path(path: str,force: bool = False,data_import: bool = False,pre_process = None,ignore_version: bool = None,reset_permissions: bool = False):
  50. """Import file from the given path
  51. Some conditions decide if a file should be imported or not.
  52. Evaluation takes place in the order they are mentioned below.
  53. - Check if `force` is true. Import the file. If not, move ahead.
  54. - Get `db_modified_timestamp`(value of the modified field in the database for the file).
  55. If the return is `none,` this file doesn't exist in the DB, so Import the file. If not, move ahead.
  56. - Check if there is a hash in DB for that file. If there is, Calculate the Hash of the file to import and compare it with the one in DB if they are not equal.
  57. Import the file. If Hash doesn't exist, move ahead.
  58. - Check if `db_modified_timestamp` is older than the timestamp in the file; if it is, we import the file.
  59. If timestamp comparison happens for doctypes, that means the Hash for it doesn't exist.
  60. So, even if the timestamp is newer on DB (When comparing timestamps), we import the file and add the calculated Hash to the DB.
  61. So in the subsequent imports, we can use hashes to compare. As a precautionary measure, the timestamp is updated to the current time as well.
  62. Args:
  63. path (str): Path to the file.
  64. force (bool, optional): Load the file without checking any conditions. Defaults to False.
  65. data_import (bool, optional): [description]. Defaults to False.
  66. pre_process ([type], optional): Any preprocesing that may need to take place on the doc. Defaults to None.
  67. ignore_version (bool, optional): ignore current version. Defaults to None.
  68. reset_permissions (bool, optional): reset permissions for the file. Defaults to False.
  69. Returns:
  70. [bool]: True if import takes place. False if it wasn't imported.
  71. """
  72. frappe.flags.dt = frappe.flags.dt or []
  73. try:
  74. docs = read_doc_from_file(path)
  75. except IOError:
  76. print(f"{path} missing")
  77. return
  78. calculated_hash = caclulate_hash(path)
  79. if docs:
  80. if not isinstance(docs, list):
  81. docs = [docs]
  82. for doc in docs:
  83. # modified timestamp in db, none if doctype's first import
  84. db_modified_timestamp = frappe.db.get_value(doc["doctype"], doc["name"], "modified")
  85. is_db_timestamp_latest = db_modified_timestamp and doc.get("modified") <= get_datetime_str(db_modified_timestamp)
  86. if not force or db_modified_timestamp:
  87. try:
  88. stored_hash = frappe.db.get_value(doc["doctype"], doc["name"], "migration_hash")
  89. except Exception:
  90. frappe.flags.dt += [doc["doctype"]]
  91. stored_hash = None
  92. # if hash exists and is equal no need to update
  93. if stored_hash and stored_hash == calculated_hash:
  94. return False
  95. # if hash doesn't exist, check if db timestamp is same as json timestamp, add hash if from doctype
  96. if is_db_timestamp_latest and doc["doctype"] != "DocType":
  97. return False
  98. import_doc(
  99. docdict=doc,
  100. force=force,
  101. data_import=data_import,
  102. pre_process=pre_process,
  103. ignore_version=ignore_version,
  104. reset_permissions=reset_permissions,
  105. path=path,
  106. )
  107. if doc["doctype"] == "DocType":
  108. doctype_table = DocType("DocType")
  109. frappe.qb.update(
  110. doctype_table
  111. ).set(
  112. doctype_table.migration_hash, calculated_hash
  113. ).where(
  114. doctype_table.name == doc["name"]
  115. ).run()
  116. new_modified_timestamp = doc.get("modified")
  117. # if db timestamp is newer, hash must have changed, must update db timestamp
  118. if is_db_timestamp_latest and doc["doctype"] == "DocType":
  119. new_modified_timestamp = now()
  120. if new_modified_timestamp:
  121. update_modified(new_modified_timestamp, doc)
  122. return True
  123. def is_timestamp_changed(doc):
  124. # check if timestamps match
  125. db_modified = frappe.db.get_value(doc["doctype"], doc["name"], "modified")
  126. return not (db_modified and doc.get("modified") == get_datetime_str(db_modified))
  127. def read_doc_from_file(path):
  128. doc = None
  129. if os.path.exists(path):
  130. with open(path, "r") as f:
  131. try:
  132. doc = json.loads(f.read())
  133. except ValueError:
  134. print("bad json: {0}".format(path))
  135. raise
  136. else:
  137. raise IOError("%s missing" % path)
  138. return doc
  139. def update_modified(original_modified, doc):
  140. # since there is a new timestamp on the file, update timestamp in
  141. if doc["doctype"] == doc["name"] and doc["name"] != "DocType":
  142. singles_table = DocType("Singles")
  143. frappe.qb.update(
  144. singles_table
  145. ).set(
  146. singles_table.value,original_modified
  147. ).where(
  148. singles_table["field"] == "modified", # singles_table.field is a method of pypika Selectable
  149. ).where(
  150. singles_table.doctype == doc["name"]
  151. ).run()
  152. else:
  153. doctype_table = DocType(doc['doctype'])
  154. frappe.qb.update(doctype_table
  155. ).set(
  156. doctype_table.modified, original_modified
  157. ).where(
  158. doctype_table.name == doc["name"]
  159. ).run()
  160. def import_doc(docdict, force=False, data_import=False, pre_process=None, ignore_version=None, reset_permissions=False, path=None):
  161. frappe.flags.in_import = True
  162. docdict["__islocal"] = 1
  163. controller = get_controller(docdict["doctype"])
  164. if controller and hasattr(controller, "prepare_for_import") and callable(getattr(controller, "prepare_for_import")):
  165. controller.prepare_for_import(docdict)
  166. doc = frappe.get_doc(docdict)
  167. reset_tree_properties(doc)
  168. load_code_properties(doc, path)
  169. doc.run_method("before_import")
  170. doc.flags.ignore_version = ignore_version
  171. if pre_process:
  172. pre_process(doc)
  173. if frappe.db.exists(doc.doctype, doc.name):
  174. delete_old_doc(doc, reset_permissions)
  175. doc.flags.ignore_links = True
  176. if not data_import:
  177. doc.flags.ignore_validate = True
  178. doc.flags.ignore_permissions = True
  179. doc.flags.ignore_mandatory = True
  180. doc.insert()
  181. frappe.flags.in_import = False
  182. return doc
  183. def load_code_properties(doc, path):
  184. """Load code files stored in separate files with extensions"""
  185. if path:
  186. if hasattr(doc, "get_code_fields"):
  187. dirname, filename = os.path.split(path)
  188. for key, extn in doc.get_code_fields().items():
  189. codefile = os.path.join(dirname, filename.split(".")[0] + "." + extn)
  190. if os.path.exists(codefile):
  191. with open(codefile, "r") as txtfile:
  192. doc.set(key, txtfile.read())
  193. def delete_old_doc(doc, reset_permissions):
  194. ignore = []
  195. old_doc = frappe.get_doc(doc.doctype, doc.name)
  196. if doc.doctype in ignore_values:
  197. # update ignore values
  198. for key in ignore_values.get(doc.doctype) or []:
  199. doc.set(key, old_doc.get(key))
  200. # update ignored docs into new doc
  201. for df in doc.meta.get_table_fields():
  202. if df.options in ignore_doctypes and not reset_permissions:
  203. doc.set(df.fieldname, [])
  204. ignore.append(df.options)
  205. # delete old
  206. frappe.delete_doc(doc.doctype, doc.name, force=1, ignore_doctypes=ignore, for_reload=True)
  207. doc.flags.ignore_children_type = ignore
  208. def reset_tree_properties(doc):
  209. # Note on Tree DocTypes:
  210. # The tree structure is maintained in the database via the fields "lft" and
  211. # "rgt". They are automatically set and kept up-to-date. Importing them
  212. # would destroy any existing tree structure.
  213. if getattr(doc.meta, "is_tree", None) and any([doc.lft, doc.rgt]):
  214. print('Ignoring values of `lft` and `rgt` for {} "{}"'.format(doc.doctype, doc.name))
  215. doc.lft = None
  216. doc.rgt = None