@@ -1,20 +1,19 @@ | |||||
{ | { | ||||
"default.css": [ | "default.css": [ | ||||
"css/body.css", | |||||
"css/menus.css", | |||||
"css/messages.css", | |||||
"css/forms.css", | |||||
"css/grid.css", | |||||
"css/listing.css", | |||||
"css/report.css", | |||||
"css/calendar.css", | |||||
"css/autosuggest.css", | |||||
"css/dialog.css", | |||||
"css/wntoolbar.css", | |||||
"css/tabs.css", | |||||
"css/jqplot.css", | |||||
"css/bw-icons.css", | |||||
"css/sidebar.css", | |||||
"css/doc_column_view.css" | |||||
"body.css", | |||||
"menus.css", | |||||
"messages.css", | |||||
"forms.css", | |||||
"grid.css", | |||||
"listing.css", | |||||
"report.css", | |||||
"calendar.css", | |||||
"autosuggest.css", | |||||
"dialog.css", | |||||
"wntoolbar.css", | |||||
"tabs.css", | |||||
"jqplot.css", | |||||
"bw-icons.css", | |||||
"sidebar.css" | |||||
] | ] | ||||
} | } |
@@ -107,6 +107,41 @@ div.std-footer-item { | |||||
margin: 0px 13px 13px 0px; | margin: 0px 13px 13px 0px; | ||||
} | } | ||||
.shadow { | |||||
-moz-box-shadow: 0px 2px 2px #888; | |||||
-webkit-box-shadow: 0px 2px 2px #888; | |||||
box-shadow: 0px 2px 2px #888; | |||||
} | |||||
.round { | |||||
-webkit-border-radius: 5px; | |||||
-moz-border-radius: 5px; | |||||
border-radius: 5px; | |||||
} | |||||
.gradient { | |||||
background: #ededed; /* Old browsers */ | |||||
background: -moz-linear-gradient(top, #ededed 0%, #d1d1d1 47%, #b7b7b7 100%); /* FF3.6+ */ | |||||
background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,#ededed), color-stop(47%,#d1d1d1), color-stop(100%,#b7b7b7)); /* Chrome,Safari4+ */ | |||||
background: -webkit-linear-gradient(top, #ededed 0%,#d1d1d1 47%,#b7b7b7 100%); /* Chrome10+,Safari5.1+ */ | |||||
background: -o-linear-gradient(top, #ededed 0%,#d1d1d1 47%,#b7b7b7 100%); /* Opera11.10+ */ | |||||
background: -ms-linear-gradient(top, #ededed 0%,#d1d1d1 47%,#b7b7b7 100%); /* IE10+ */ | |||||
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#ededed', endColorstr='#b7b7b7',GradientType=0 ); /* IE6-9 */ | |||||
background: linear-gradient(top, #ededed 0%,#d1d1d1 47%,#b7b7b7 100%); /* W3C */ | |||||
} | |||||
.header-gradient { | |||||
background: #84827c; /* Old browsers */ | |||||
background: -moz-linear-gradient(top, #84827c 0%, #27211c 100%); /* FF3.6+ */ | |||||
background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,#84827c), color-stop(100%,#27211c)); /* Chrome,Safari4+ */ | |||||
background: -webkit-linear-gradient(top, #84827c 0%,#27211c 100%); /* Chrome10+,Safari5.1+ */ | |||||
background: -o-linear-gradient(top, #84827c 0%,#27211c 100%); /* Opera11.10+ */ | |||||
background: -ms-linear-gradient(top, #84827c 0%,#27211c 100%); /* IE10+ */ | |||||
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#84827c', endColorstr='#27211c',GradientType=0 ); /* IE6-9 */ | |||||
background: linear-gradient(top, #84827c 0%,#27211c 100%); /* W3C */ | |||||
color: #FFF; | |||||
} | |||||
/******** Menus - menu.js ************/ | /******** Menus - menu.js ************/ | ||||
ul.menu_toolbar { | ul.menu_toolbar { | ||||
@@ -1745,65 +1780,3 @@ div.follower-list { | |||||
margin-top: 7px; | margin-top: 7px; | ||||
font-size: 11px; | font-size: 11px; | ||||
} | } | ||||
/******** Listing2 ***********/ | |||||
table.dcv-tab { | |||||
table-layout: fixed; | |||||
border-collapse: collapse; | |||||
} | |||||
table.dcv-tab td { | |||||
border: 1px solid #DDD; | |||||
} | |||||
table.dcv-tab td td { | |||||
border: 0px solid #BBB; | |||||
} | |||||
div.list2-head { | |||||
padding: 3px 6px; | |||||
font-size: 14px; | |||||
color: #555; | |||||
} | |||||
div.list2-new { | |||||
padding: 4px; | |||||
} | |||||
div.list2-message { | |||||
padding: 8px; | |||||
color: #888; | |||||
background-color: #FFD; | |||||
} | |||||
div.list2-search { | |||||
padding: 4px; | |||||
} | |||||
div.list2-list-area { | |||||
padding: 4px; | |||||
} | |||||
div.list2-item-div { | |||||
border-bottom: 1px solid #AAA; | |||||
padding: 4px; | |||||
cursor: pointer; | |||||
} | |||||
span.list2-edit-link { | |||||
font-weight: normal; | |||||
font-size: 11px; | |||||
margin-left: 7px; | |||||
} | |||||
div.list2-item-title { | |||||
font-weight: bold; | |||||
} | |||||
div.list2-item-selected { | |||||
background-color: #CCF; | |||||
} | |||||
div.list2-item-more-info { | |||||
color: #888; | |||||
margin-top: 4px; | |||||
} |
@@ -6,11 +6,8 @@ wn.provide('wn.settings');wn.provide('wn.ui');wn.xmlhttp={request:function(){if( | |||||
return new XMLHttpRequest();else if(window.ActiveXObject) | return new XMLHttpRequest();else if(window.ActiveXObject) | ||||
return new ActiveXObject("MsXml2.XmlHttp");},complete:function(req,callback,url){if(req.status==200||req.status==304){callback(req.responseText);}else{alert(url+' request error: '+req.statusText+' ('+req.status+')');}},get:function(url,callback,args,async){if(async===null)async=true;var req=wn.xmlhttp.request();req.onreadystatechange=function(){if(req.readyState==4){wn.xmlhttp.complete(req,callback,url)}} | return new ActiveXObject("MsXml2.XmlHttp");},complete:function(req,callback,url){if(req.status==200||req.status==304){callback(req.responseText);}else{alert(url+' request error: '+req.statusText+' ('+req.status+')');}},get:function(url,callback,args,async){if(async===null)async=true;var req=wn.xmlhttp.request();req.onreadystatechange=function(){if(req.readyState==4){wn.xmlhttp.complete(req,callback,url)}} | ||||
var sep=((args&&args.indexOf('?'))==-1)?'?':'&';var u=args?(url+sep+args):url;req.open('GET',u,async);req.send(null);if(!async){wn.xmlhttp.complete(req,callback,url)}}} | var sep=((args&&args.indexOf('?'))==-1)?'?':'&';var u=args?(url+sep+args):url;req.open('GET',u,async);req.send(null);if(!async){wn.xmlhttp.complete(req,callback,url)}}} | ||||
wn.versions={is_latest:function(){if(window._version_number==(localStorage?localStorage['_version_number']:null)){return true;} | |||||
return false;},get_diff:function(){if(!localStorage)return;wn.xmlhttp.get('index.cgi',function(txt){r=JSON.parse(txt);if(r.exc){alert(r.exc);} | |||||
wn.versions.set(r.message);},'cmd=get_diff&version_number='+localStorage['_version_number'],false);},set:function(diff){for(var i=0;i<diff.length;i++){localStorage.removeItem(diff[i]);} | |||||
localStorage['_version_number']=_version_number;},check:function(){if(localStorage&&!localStorage['_version_number']){localStorage['_version_number']=_version_number;return;} | |||||
if(!wn.versions.is_latest())wn.versions.get_diff();}} | |||||
wn.versions={check:function(){if(localStorage){if(window._version_number==-1||parseInt(localStorage._version_number)!=parseInt(window._version_number)){localStorage.clear();} | |||||
localStorage.setItem('_version_number',window._version_number);}}} | |||||
wn.assets={executed_:{},exists:function(src){if('localStorage'in window&&localStorage.getItem(src)) | wn.assets={executed_:{},exists:function(src){if('localStorage'in window&&localStorage.getItem(src)) | ||||
return true},add:function(src,txt){if('localStorage'in window){localStorage.setItem(src,txt);}},get:function(src){return localStorage.getItem(src);},extn:function(src){if(src.indexOf('?')!=-1){src=src.split('?').slice(-1)[0];} | return true},add:function(src,txt){if('localStorage'in window){localStorage.setItem(src,txt);}},get:function(src){return localStorage.getItem(src);},extn:function(src){if(src.indexOf('?')!=-1){src=src.split('?').slice(-1)[0];} | ||||
return src.split('.').slice(-1)[0];},html_src:function(src){if(src.indexOf('/')!=-1){var t=src.split('/').slice(0,-1);t.push('src');t=t.join('/')+'/'+a.split('/').slice(-1)[0];}else{var t='src/'+src;} | return src.split('.').slice(-1)[0];},html_src:function(src){if(src.indexOf('/')!=-1){var t=src.split('/').slice(0,-1);t.push('src');t=t.join('/')+'/'+a.split('/').slice(-1)[0];}else{var t='src/'+src;} | ||||
@@ -1,42 +1,14 @@ | |||||
// manage app versioning | // manage app versioning | ||||
// get the last_version_number from the server (loaded) | |||||
// and update based on it | |||||
// if version is changed or version is -1, clear localStorage | |||||
wn.versions = { | wn.versions = { | ||||
is_latest: function() { | |||||
if(window._version_number == (localStorage ? localStorage['_version_number'] : null)) { | |||||
return true; | |||||
} | |||||
return false; | |||||
}, | |||||
// get the change list of all files | |||||
// from current version and local version | |||||
get_diff: function() { | |||||
if(!localStorage) return; | |||||
wn.xmlhttp.get('index.cgi', function(txt) { | |||||
// add it to localstorage | |||||
r = JSON.parse(txt); | |||||
if(r.exc) { alert(r.exc); } | |||||
wn.versions.set(r.message); | |||||
}, 'cmd=get_diff&version_number=' + localStorage['_version_number'], false); | |||||
}, | |||||
// set will clear all changes since the last update | |||||
set: function(diff) { | |||||
for(var i=0; i<diff.length; i++) { | |||||
localStorage.removeItem(diff[i]); | |||||
} | |||||
localStorage['_version_number'] = _version_number; | |||||
}, | |||||
check: function() { | check: function() { | ||||
if(localStorage && !localStorage['_version_number']) { | |||||
// first load | |||||
localStorage['_version_number'] = _version_number; | |||||
return; | |||||
if(localStorage) { | |||||
if(window._version_number==-1 || parseInt(localStorage._version_number) | |||||
!= parseInt(window._version_number)) { | |||||
localStorage.clear(); | |||||
} | |||||
localStorage.setItem('_version_number', window._version_number); | |||||
} | } | ||||
if(!wn.versions.is_latest()) wn.versions.get_diff(); | |||||
} | } | ||||
} | } |
@@ -1,18 +1,3 @@ | |||||
verbose = True | verbose = True | ||||
force_rebuild = False | force_rebuild = False | ||||
no_minify = False | no_minify = False | ||||
def run(): | |||||
""" | |||||
Run the builder | |||||
""" | |||||
global verbose | |||||
import sys, os | |||||
from build.project import Project | |||||
verbose = True | |||||
Project().build() | |||||
if __name__=='__main__': | |||||
run() |
@@ -41,28 +41,9 @@ class Bundle: | |||||
f.write(temp.getvalue()) | f.write(temp.getvalue()) | ||||
f.close() | f.close() | ||||
self.vc.repo.add(outfile) | |||||
if verbose: print 'Wrote %s' % outfile | if verbose: print 'Wrote %s' % outfile | ||||
return temp | return temp | ||||
def changed(self, files): | |||||
""" | |||||
Returns true if the files are changed since last build | |||||
""" | |||||
import os | |||||
from build import force_rebuild, verbose | |||||
if force_rebuild: | |||||
return True | |||||
for f in files: | |||||
if f in self.dirty: | |||||
if verbose: | |||||
print '*** %s changed' % f | |||||
return True | |||||
return False | |||||
def minify(self, in_files, outfile, concat=False): | def minify(self, in_files, outfile, concat=False): | ||||
""" | """ | ||||
@@ -86,7 +67,6 @@ class Bundle: | |||||
jsm.minify(temp, out) | jsm.minify(temp, out) | ||||
out.close() | out.close() | ||||
self.vc.repo.add(outfile) | |||||
new_size = os.path.getsize(outfile) | new_size = os.path.getsize(outfile) | ||||
@@ -105,9 +85,9 @@ class Bundle: | |||||
# open the build.json file and read | # open the build.json file and read | ||||
# the dict | # the dict | ||||
bfile = open(bpath, 'r') | |||||
bdata = json.loads(bfile.read()) | |||||
bfile.close() | |||||
print "making %s ..." % bpath | |||||
with open(bpath, 'r') as bfile: | |||||
bdata = json.loads(bfile.read()) | |||||
path = os.path.dirname(bpath) | path = os.path.dirname(bpath) | ||||
@@ -121,25 +101,13 @@ class Bundle: | |||||
# build the file list relative to the main folder | # build the file list relative to the main folder | ||||
fl = [os.path.relpath(os.path.join(path, f), os.curdir) for f in bdata[outfile]] | fl = [os.path.relpath(os.path.join(path, f), os.curdir) for f in bdata[outfile]] | ||||
if self.changed(fl): | |||||
# js files are minified by default unless explicitly | |||||
# mentioned in the prefix. | |||||
# some files may not work if minified (known jsmin bug) | |||||
# js files are minified by default unless explicitly | |||||
# mentioned in the prefix. | |||||
# some files may not work if minified (known jsmin bug) | |||||
if fname.split('.')[-1]=='js' and prefix!='concat' and not no_minify: | |||||
self.minify(fl, os.path.relpath(os.path.join(path, fname), os.curdir)) | |||||
else: | |||||
self.concat(fl, os.path.relpath(os.path.join(path, fname), os.curdir)) | |||||
if fname.split('.')[-1]=='js' and prefix!='concat' and not no_minify: | |||||
self.minify(fl, os.path.relpath(os.path.join(path, fname), os.curdir)) | |||||
else: | |||||
self.concat(fl, os.path.relpath(os.path.join(path, fname), os.curdir)) | |||||
def bundle(self, vc): | |||||
""" | |||||
Build js files from "build.json" found in version control | |||||
""" | |||||
import os | |||||
self.dirty = vc.repo.uncommitted() | |||||
self.vc = vc | |||||
# walk the parent folder and build all files as defined in the build.json files | |||||
for b in vc.repo.sql("select fname from bundles"): | |||||
self.make(os.path.abspath(os.path.join(vc.root_path, b[0]))) | |||||
@@ -1,34 +0,0 @@ | |||||
""" | |||||
Jinja2 markdown2 extension | |||||
by Silas Swell | |||||
http://www.silassewell.com/blog/2010/05/10/jinja2-markdown-extension/ | |||||
""" | |||||
import jinja2 | |||||
import jinja2.ext | |||||
import markdown2 | |||||
class Markdown2Extension(jinja2.ext.Extension): | |||||
tags = set(['markdown2']) | |||||
def __init__(self, environment): | |||||
super(Markdown2Extension, self).__init__(environment) | |||||
environment.extend( | |||||
markdowner=markdown2.Markdown() | |||||
) | |||||
def parse(self, parser): | |||||
lineno = parser.stream.next().lineno | |||||
body = parser.parse_statements( | |||||
['name:endmarkdown2'], | |||||
drop_needle=True | |||||
) | |||||
return jinja2.nodes.CallBlock( | |||||
self.call_method('_markdown_support'), | |||||
[], | |||||
[], | |||||
body | |||||
).set_lineno(lineno) | |||||
def _markdown_support(self, caller): | |||||
return self.environment.markdowner.convert(caller()).strip() |
@@ -1,67 +0,0 @@ | |||||
class Nav: | |||||
""" | |||||
Build sitemap / navigation tree | |||||
""" | |||||
page_info_template = { | |||||
'description': None, | |||||
'keywords': None, | |||||
'title': 'No Title Set' | |||||
} | |||||
def __init__(self): | |||||
""" | |||||
write out the nav | |||||
""" | |||||
import json, os | |||||
self.data = {} | |||||
if os.path.exists('config/sitenav.json'): | |||||
nfile = open('config/sitenav.json') | |||||
self.data = json.loads(nfile.read()) | |||||
nfile.close() | |||||
def page_info(self): | |||||
""" | |||||
return dict with href as the key | |||||
""" | |||||
ret = {} | |||||
import copy | |||||
ul = copy.deepcopy(self.data) | |||||
for li in ul: | |||||
ret[li.get('href')] = li | |||||
# has subitems, loop | |||||
if li.get('subitems'): | |||||
for lia in li.get('subitems'): | |||||
if not lia.get('href') in ret.keys(): | |||||
ul.append(lia) | |||||
return ret | |||||
def html(self, list_class=''): | |||||
""" | |||||
return nested lists <ul> in html | |||||
""" | |||||
self.list_class = list_class | |||||
return self.make_list(self.data) | |||||
def make_list(self, ul): | |||||
""" | |||||
return a list with <li> and <a> elements | |||||
""" | |||||
lis = [] | |||||
link_html = '<a href="%(href)s" title="%(title)s">%(label)s</a>' | |||||
for li in ul: | |||||
if not 'title' in li: | |||||
li['title'] = 'No Title' | |||||
if 'subitems' in li: | |||||
h = ('\t<li>' + link_html + self.make_list(li['subitems']) +'</li>') % li | |||||
else: | |||||
h = ('\t<li>' + link_html + '</li>') % li | |||||
lis.append(h) | |||||
return '\n<ul class="%s">\n%s\n</ul>' % (self.list_class, '\n'.join(lis)) |
@@ -1,4 +1,5 @@ | |||||
verbose = False | verbose = False | ||||
import os | |||||
class Project: | class Project: | ||||
""" | """ | ||||
@@ -14,22 +15,31 @@ class Project: | |||||
""" | """ | ||||
load libraries | load libraries | ||||
""" | """ | ||||
from build.bundle import Bundle | |||||
from nav import Nav | |||||
from py.build.bundle import Bundle | |||||
self.bundle = Bundle() | self.bundle = Bundle() | ||||
self.nav = Nav() | |||||
def getversion(self): | |||||
"""get from version.num file and increment it""" | |||||
if os.path.exists('version.num'): | |||||
with open('version.num', 'r') as vfile: | |||||
self.version = int(vfile.read()) + 1 | |||||
else: | |||||
self.version = 1 | |||||
with open('version.num', 'w') as vfile: | |||||
vfile.write(str(self.version)) | |||||
return self.version | |||||
def boot(self): | def boot(self): | ||||
""" | """ | ||||
returns bootstrap js | returns bootstrap js | ||||
""" | """ | ||||
import json | import json | ||||
corejs = open('lib/js/core.min.js', 'r') | |||||
v = int(self.vc.repo.get_value('last_version_number') or 0) + 1 | |||||
boot = ('window._version_number="%s"' % str(v)) + \ | |||||
corejs = open('lib/js/core.min.js', 'r') | |||||
boot = ('window._version_number="%s"' % str(self.getversion())) + \ | |||||
'\n' + corejs.read() | '\n' + corejs.read() | ||||
corejs.close() | corejs.close() | ||||
@@ -40,51 +50,31 @@ class Project: | |||||
""" | """ | ||||
Generate static files from templates | Generate static files from templates | ||||
""" | """ | ||||
# render templates | # render templates | ||||
import os | |||||
from jinja2 import Environment, FileSystemLoader | |||||
from build.markdown2_extn import Markdown2Extension | |||||
env = Environment(loader=FileSystemLoader('templates'), extensions=[Markdown2Extension]) | |||||
# dynamic boot info | |||||
env.globals['boot'] = self.boot() | |||||
env.globals['nav'] = self.nav.html() | |||||
page_info = self.nav.page_info() | |||||
boot = self.boot() | |||||
for wt in os.walk('templates'): | for wt in os.walk('templates'): | ||||
for fname in wt[2]: | for fname in wt[2]: | ||||
if fname.split('.')[-1]=='html' and not fname.startswith('template'): | if fname.split('.')[-1]=='html' and not fname.startswith('template'): | ||||
fpath = os.path.relpath(os.path.join(wt[0], fname), 'templates') | fpath = os.path.relpath(os.path.join(wt[0], fname), 'templates') | ||||
temp = env.get_template(fpath) | |||||
env.globals.update(self.nav.page_info_template) | |||||
env.globals.update(page_info.get(fpath, {})) | |||||
with open(os.path.join(wt[0], fname), 'r') as tempfile: | |||||
temp = tempfile.read() | |||||
temp = temp % boot | |||||
# out file in parent folder of template | |||||
f = open(fpath, 'w') | |||||
f.write(temp.render()) | |||||
f.close() | |||||
with open(fpath, 'w') as outfile: | |||||
outfile.write(temp) | |||||
print "Rendered %s | %.2fkb" % (fpath, os.path.getsize(fpath) / 1024.0) | print "Rendered %s | %.2fkb" % (fpath, os.path.getsize(fpath) / 1024.0) | ||||
def build(self): | def build(self): | ||||
""" | """ | ||||
Build all js files, index.html and template.html | |||||
build js files, index.html | |||||
""" | """ | ||||
from build.version import VersionControl | |||||
self.vc = VersionControl() | |||||
self.vc.add_all() | |||||
# index, template if framework is dirty | |||||
if self.vc.repo.uncommitted(): | |||||
self.bundle.bundle(self.vc) | |||||
self.render_templates() | |||||
# again add all bundles | |||||
self.vc.add_all() | |||||
self.vc.repo.commit() | |||||
self.vc.close() | |||||
for wt in os.walk('lib'): | |||||
for fname in wt[2]: | |||||
if fname=='build.json': | |||||
self.bundle.make(os.path.join(wt[0], fname)) | |||||
self.render_templates() |
@@ -1,150 +0,0 @@ | |||||
## DEPRECATED | |||||
class Timestamps: | |||||
""" | |||||
Build / manage json timestamp files | |||||
""" | |||||
previous = {} | |||||
dirty = [] | |||||
bundled = [] | |||||
current = {} | |||||
ignore_hidden = True | |||||
ignore_extn = ('pyc', 'DS_Store', 'gitignore') | |||||
""" | |||||
load timestamps and dirty files | |||||
""" | |||||
def __init__(self): | |||||
self.load() | |||||
self.get_current() | |||||
self.check_dirty() | |||||
def check_dirty(self): | |||||
""" | |||||
Returns true if the current folder is dirty | |||||
""" | |||||
from build import verbose | |||||
import os | |||||
self.dirty = [] | |||||
if not self.previous: | |||||
if verbose: | |||||
print 'Dirty: no timestamps!' | |||||
self.dirty = self.current.keys() | |||||
else: | |||||
# check both ways for missing files | |||||
for f in self.current: | |||||
if self.current[f] != self.previous.get(f): | |||||
print '**** %s changed | %s -> %s' % (f, self.previous.get(f), self.current.get(f)) | |||||
self.dirty.append(f) | |||||
for f in self.previous: | |||||
if self.previous[f] != self.current.get(f): | |||||
if f not in self.dirty: | |||||
print '**** %s changed | %s -> %s' % (f, self.previous.get(f), self.current.get(f)) | |||||
self.dirty.append(f) | |||||
# unique | |||||
self.dirty = list(set(self.dirty)) | |||||
def get_current(self): | |||||
""" | |||||
build timestamps dict for specified files | |||||
""" | |||||
try: | |||||
import config.assets | |||||
except ImportError: | |||||
return self.get_current_from_folders() | |||||
ts = {} | |||||
for fname in config.assets.file_list: | |||||
ts[fname] = str(int(os.stat(fname).st_mtime)) | |||||
self.current = ts | |||||
def get_current_from_folders(self): | |||||
""" | |||||
walk in all folders and build tree of all js, css, html, md files | |||||
""" | |||||
import os | |||||
ts = {} | |||||
# walk the parent folder and build all files as defined in the build.json files | |||||
for wt in os.walk('.', followlinks=True): | |||||
# build timestamps | |||||
if self.ignore_hidden: | |||||
for d in wt[1]: | |||||
if d.startswith('.'): | |||||
wt[1].remove(d) | |||||
if os.path.exists(os.path.join(wt[0], d, '.no_timestamps')): | |||||
wt[1].remove(d) | |||||
for f in wt[2]: | |||||
if f.split('.')[-1] not in self.ignore_extn and f!='_timestamps.js': | |||||
fname = os.path.relpath(os.path.join(wt[0], f), os.curdir) | |||||
ts[fname] = str(int(os.stat(fname).st_mtime)) | |||||
self.current = ts | |||||
def write(self): | |||||
""" | |||||
Write timestamp if dirty | |||||
""" | |||||
import json, os | |||||
ts_path = 'config/_timestamps.js' | |||||
# write timestamps | |||||
f = open(ts_path, 'w') | |||||
self.get_current() | |||||
f.write(json.dumps(self.current)) | |||||
f.close() | |||||
def load(self): | |||||
""" | |||||
Get all timestamps from file | |||||
""" | |||||
from build import verbose | |||||
import json, os | |||||
ts_path = os.path.join('config', '_timestamps.js') | |||||
if os.path.exists(ts_path): | |||||
ts = open(ts_path, 'r') | |||||
# merge the timestamps | |||||
tmp = json.loads(ts.read()) | |||||
ts.close() | |||||
else: | |||||
if verbose: | |||||
print "** No timestamps **" | |||||
tmp = {} | |||||
self.previous = tmp | |||||
def update(self, fname): | |||||
""" | |||||
Update timestamp of the given file and add to dirty | |||||
""" | |||||
import os | |||||
self.current[fname] = str(int(os.stat(fname).st_mtime)) | |||||
self.dirty.append(fname) | |||||
def get(self, rettype='dict', types=[]): | |||||
""" | |||||
return timestamps (ignore the ones not wanted) | |||||
""" | |||||
# remove all .md timestamps | |||||
ret = {} | |||||
for t in self.current: | |||||
if t.split('.')[-1] in types: | |||||
if t not in self.bundled: | |||||
ret[t] = self.current[t] | |||||
if rettype=='dict': | |||||
return ret | |||||
else: | |||||
import json | |||||
return json.dumps(ret) |
@@ -1,457 +0,0 @@ | |||||
""" | |||||
Version Control: | |||||
Schema: | |||||
properties (key, value) | |||||
uncommitted (fname, ftype, content, timestamp) | |||||
files (fname, ftype, content, timestamp, version) | |||||
log (fname, ftype, version) | |||||
bundle_files (fname primary key) | |||||
Discussion: | |||||
There are 2 databases, versions.db and versions-local.db | |||||
All changes are commited to versions-local.db, when the patches are complete, the developer | |||||
must pull the latest .wnf db and merge | |||||
versions-local.db is never commited in the global repository | |||||
""" | |||||
import unittest | |||||
import os | |||||
test_file = {'fname':'test.js', 'ftype':'js', 'content':'test_code', 'timestamp':'1100'} | |||||
root_path = os.curdir | |||||
def edit_file(): | |||||
# edit a file | |||||
p = os.path.join(root_path, 'lib/js/core.js') | |||||
# read | |||||
f1 = open(p, 'r') | |||||
content = f1.read() | |||||
f1.close() | |||||
# write | |||||
f = open(p, 'w') | |||||
f.write(content) | |||||
f.close() | |||||
return os.path.relpath(p, root_path) | |||||
verbose = False | |||||
class TestVC(unittest.TestCase): | |||||
def setUp(self): | |||||
self.vc = VersionControl(root_path, True) | |||||
self.vc.repo.setup() | |||||
def test_add(self): | |||||
self.vc.add(**test_file) | |||||
ret = self.vc.repo.sql('select * from uncommitted', as_dict=1)[0] | |||||
self.assertTrue(ret['content']==test_file['content']) | |||||
def test_commit(self): | |||||
last_number = self.vc.repo.get_value('last_version_number') | |||||
self.vc.add(**test_file) | |||||
self.vc.commit() | |||||
# test version | |||||
number = self.vc.repo.get_value('last_version_number') | |||||
version = self.vc.repo.sql("select version from versions where number=?", (number,))[0][0] | |||||
self.assertTrue(number != last_number) | |||||
# test file | |||||
self.assertTrue(self.vc.repo.get_file('test.js')['content'] == test_file['content']) | |||||
# test uncommitted | |||||
self.assertFalse(self.vc.repo.sql("select * from uncommitted")) | |||||
# test log | |||||
self.assertTrue(self.vc.repo.sql("select * from log where version=?", (version,))) | |||||
def test_diff(self): | |||||
self.vc.add(**test_file) | |||||
self.vc.commit() | |||||
self.assertTrue(self.vc.repo.diff(None), ['test.js']) | |||||
def test_walk(self): | |||||
# add | |||||
self.vc.add_all() | |||||
# check if added | |||||
ret = self.vc.repo.sql("select * from uncommitted", as_dict=1) | |||||
self.assertTrue(len(ret)>0) | |||||
self.vc.commit() | |||||
p = edit_file() | |||||
# add | |||||
self.vc.add_all() | |||||
# check if added | |||||
ret = self.vc.repo.sql("select * from uncommitted", as_dict=1) | |||||
self.assertTrue(p in [r['fname'] for r in ret]) | |||||
def test_merge(self): | |||||
self.vc.add_all() | |||||
self.vc.commit() | |||||
# write the file | |||||
self.vc.repo.conn.commit() | |||||
# make master (copy) | |||||
self.vc.setup_master() | |||||
p = edit_file() | |||||
self.vc.add_all() | |||||
self.vc.commit() | |||||
self.vc.merge(self.vc.repo, self.vc.master) | |||||
log = self.vc.master.diff(int(self.vc.master.get_value('last_version_number'))-1) | |||||
self.assertTrue(p in log) | |||||
def tearDown(self): | |||||
self.vc.close() | |||||
if os.path.exists(self.vc.local_db_name()): | |||||
os.remove(self.vc.local_db_name()) | |||||
if os.path.exists(self.vc.master_db_name()): | |||||
os.remove(self.vc.master_db_name()) | |||||
class VersionControl: | |||||
def __init__(self, root=None, testing=False): | |||||
self.testing = testing | |||||
self.set_root(root) | |||||
self.repo = Repository(self, self.local_db_name()) | |||||
self.ignore_folders = ['.git', '.', '..'] | |||||
self.ignore_files = ['py', 'pyc', 'DS_Store', 'txt', 'db-journal', 'db'] | |||||
def local_db_name(self): | |||||
"""return local db name""" | |||||
return os.path.join(self.root_path, 'versions-local.db' + (self.testing and '.test' or '')) | |||||
def master_db_name(self): | |||||
"""return master db name""" | |||||
return os.path.join(self.root_path, 'versions-master.db' + (self.testing and '.test' or '')) | |||||
def setup_master(self): | |||||
""" | |||||
setup master db from local (if not present) | |||||
""" | |||||
import os | |||||
if not os.path.exists(self.master_db_name()): | |||||
os.system('cp %s %s' % (self.local_db_name(), self.master_db_name())) | |||||
self.master = Repository(self, self.master_db_name()) | |||||
def set_root(self, path=None): | |||||
""" | |||||
set / reset root and connect | |||||
(the root path is the path of the folder) | |||||
""" | |||||
import os | |||||
if not path: | |||||
path = os.path.abspath(os.path.curdir) | |||||
self.root_path = path | |||||
def relpath(self, fname): | |||||
""" | |||||
get relative path from root path | |||||
""" | |||||
import os | |||||
return os.path.relpath(fname, self.root_path) | |||||
def timestamp(self, path): | |||||
""" | |||||
returns timestamp | |||||
""" | |||||
import os | |||||
if os.path.exists(path): | |||||
return int(os.stat(path).st_mtime) | |||||
else: | |||||
return 0 | |||||
def add_all(self): | |||||
""" | |||||
walk the root folder Add all dirty files to the vcs | |||||
""" | |||||
import os | |||||
for wt in os.walk(self.root_path, followlinks = True): | |||||
# ignore folders | |||||
for folder in self.ignore_folders: | |||||
if folder in wt[1]: | |||||
wt[1].remove(folder) | |||||
for fname in wt[2]: | |||||
fpath = os.path.join(wt[0], fname) | |||||
if fname.endswith('build.json'): | |||||
self.repo.add_bundle(fpath) | |||||
continue | |||||
if fname.split('.')[-1] in self.ignore_files: | |||||
# nothing to do | |||||
continue | |||||
# file does not exist | |||||
if not self.exists(fpath): | |||||
if verbose: | |||||
print "%s added" % fpath | |||||
self.repo.add(fpath) | |||||
# file changed | |||||
else: | |||||
if self.timestamp(fpath) != self.repo.timestamp(fpath): | |||||
if verbose: | |||||
print "%s changed" % fpath | |||||
self.repo.add(fpath) | |||||
def version_diff(self, source, target): | |||||
""" | |||||
get missing versions in target | |||||
""" | |||||
# find versions in source not in target | |||||
d = [] | |||||
versions = source.sql("select version from versions") | |||||
for v in versions: | |||||
if not target.sql("select version from versions where version=?", v): | |||||
d.append(v) | |||||
return d | |||||
def merge(self, source, target): | |||||
""" | |||||
merges with two repositories | |||||
""" | |||||
diff = self.version_diff(source, target) | |||||
if not len(diff): | |||||
print 'nothing to merge' | |||||
return | |||||
for d in diff: | |||||
for f in source.sql("select * from files where version=?", d, as_dict=1): | |||||
print 'merging %s' % f['fname'] | |||||
target.add(**f) | |||||
target.commit(d[0]) | |||||
""" | |||||
short hand | |||||
""" | |||||
def commit(self, version=None): | |||||
"""commit to local""" | |||||
self.repo.commit(version) | |||||
def add(self, **args): | |||||
"""add to local""" | |||||
self.repo.add(**args) | |||||
def remove(self, fname): | |||||
"""remove from local""" | |||||
self.repo.add(fname=fname, action='remove') | |||||
def exists(self, fname): | |||||
"""exists in local""" | |||||
return len(self.repo.sql("select fname from files where fname=?", (self.relpath(fname),))) | |||||
def get_file(self, fname): | |||||
"""return file""" | |||||
return self.repo.sql("select * from files where fname=?", (self.relpath(fname),), as_dict=1)[0] | |||||
def close(self): | |||||
self.repo.conn.commit() | |||||
self.repo.conn.close() | |||||
if hasattr(self, 'master'): | |||||
self.master.conn.commit() | |||||
self.master.conn.close() | |||||
class Repository: | |||||
def __init__(self, vc, fname): | |||||
self.vc = vc | |||||
import sqlite3 | |||||
self.db_path = os.path.join(self.vc.root_path, fname) | |||||
self.conn = sqlite3.connect(self.db_path) | |||||
self.cur = self.conn.cursor() | |||||
def setup(self): | |||||
""" | |||||
setup the schema | |||||
""" | |||||
print "setting up %s..." % self.db_path | |||||
self.cur.executescript(""" | |||||
create table properties(pkey primary key, value); | |||||
create table uncommitted(fname primary key, ftype, content, timestamp, action); | |||||
create table files (fname primary key, ftype, content, timestamp, version); | |||||
create table log (fname, ftype, version); | |||||
create table versions (number integer primary key, version); | |||||
create table bundles(fname primary key); | |||||
""") | |||||
def sql(self, query, values=(), as_dict=None): | |||||
""" | |||||
like webnotes.db.sql | |||||
""" | |||||
self.cur.execute(query, values) | |||||
res = self.cur.fetchall() | |||||
if as_dict: | |||||
out = [] | |||||
for row in res: | |||||
d = {} | |||||
for idx, col in enumerate(self.cur.description): | |||||
d[col[0]] = row[idx] | |||||
out.append(d) | |||||
return out | |||||
return res | |||||
def get_value(self, key): | |||||
""" | |||||
returns value of a property | |||||
""" | |||||
ret = self.sql("select `value` from properties where `pkey`=?", (key,)) | |||||
return ret and ret[0][0] or None | |||||
def set_value(self, key, value): | |||||
""" | |||||
returns value of a property | |||||
""" | |||||
self.sql("insert or replace into properties(pkey, value) values (?, ?)", (key,value)) | |||||
def add(self, fname, ftype=None, timestamp=None, content=None, version=None, action=None): | |||||
""" | |||||
add to uncommitted | |||||
""" | |||||
import os | |||||
if not timestamp: | |||||
timestamp = self.vc.timestamp(fname) | |||||
# commit relative path | |||||
fname = self.vc.relpath(fname) | |||||
if not action: | |||||
action = 'add' | |||||
if not ftype: | |||||
ftype = fname.split('.')[-1] | |||||
self.sql("insert or replace into uncommitted(fname, ftype, timestamp, content, action) values (?, ?, ?, ?, ?)" \ | |||||
, (fname, ftype, timestamp, content, action)) | |||||
def new_version(self): | |||||
""" | |||||
return a random version id | |||||
""" | |||||
import random | |||||
# genarate id (global) | |||||
return '%016x' % random.getrandbits(64) | |||||
def update_number(self, version): | |||||
""" | |||||
update version.number | |||||
""" | |||||
# set number (local) | |||||
self.sql("insert into versions (number, version) values (null, ?)", (version,)) | |||||
number = self.sql("select last_insert_rowid()")[0][0] | |||||
self.set_value('last_version_number', number) | |||||
def commit(self, version=None): | |||||
""" | |||||
copy uncommitted files to repository, update the log and add the change | |||||
""" | |||||
# get a new version number | |||||
if not version: version = self.new_version() | |||||
self.update_number(version) | |||||
# find added files to commit | |||||
self.add_from_uncommitted(version) | |||||
# clear uncommitted | |||||
self.sql("delete from uncommitted") | |||||
def add_from_uncommitted(self, version): | |||||
""" | |||||
move files from uncommitted table to files table | |||||
""" | |||||
added = self.sql("select * from uncommitted", as_dict=1) | |||||
for f in added: | |||||
if f['action']=='add': | |||||
# move them to "files" | |||||
self.sql(""" | |||||
insert or replace into files | |||||
(fname, ftype, timestamp, content, version) | |||||
values (?,?,?,?,?) | |||||
""", (f['fname'], f['ftype'], f['timestamp'], f['content'], version)) | |||||
elif f['action']=='remove': | |||||
self.sql("""delete from files where fname=?""", (f['fname'],)) | |||||
else: | |||||
raise Exception, 'bad action %s' % action | |||||
# update log | |||||
self.add_log(f['fname'], f['ftype'], version) | |||||
def timestamp(self, fname): | |||||
""" | |||||
get timestamp | |||||
""" | |||||
fname = self.vc.relpath(fname) | |||||
return int(self.sql("select timestamp from files where fname=?", (fname,))[0][0] or 0) | |||||
def diff(self, number): | |||||
""" | |||||
get changed files since number | |||||
""" | |||||
if number is None: number = 0 | |||||
ret = self.sql(""" | |||||
select log.fname from log, versions | |||||
where versions.number > ? | |||||
and versions.version = log.version""", (number,)) | |||||
return list(set([f[0] for f in ret])) | |||||
def uncommitted(self): | |||||
""" | |||||
return list of uncommitted files | |||||
""" | |||||
return [f[0] for f in self.sql("select fname from uncommitted")] | |||||
def add_log(self, fname, ftype, version): | |||||
""" | |||||
add file to log | |||||
""" | |||||
self.sql("insert into log(fname, ftype, version) values (?,?,?)", (fname, ftype, version)) | |||||
def add_bundle(self, fname): | |||||
""" | |||||
add to bundles | |||||
""" | |||||
self.sql("insert or replace into bundles(fname) values (?)", (fname,)) | |||||
if __name__=='__main__': | |||||
import os, sys | |||||
sys.path.append('py') | |||||
sys.path.append('lib/py') | |||||
unittest.main() |
@@ -2,42 +2,15 @@ | |||||
import os, sys | import os, sys | ||||
from py.build import version | |||||
version.verbose = True | |||||
def print_help(): | def print_help(): | ||||
print "wnframework version control utility" | print "wnframework version control utility" | ||||
print "Usage:" | print "Usage:" | ||||
print "python lib/wnf.py build : scan all folders and commit versions with latest changes" | print "python lib/wnf.py build : scan all folders and commit versions with latest changes" | ||||
print "python lib/wnf.py setup : setup the local system (from master or fresh)" | |||||
print "python lib/wnf.py merge : merge from local into master" | |||||
print "python lib/wnf.py log : list last 10 commits" | |||||
print "python lib/wnf.py pull : pull from git" | print "python lib/wnf.py pull : pull from git" | ||||
print "python lib/wnf.py replace txt1 txt2 extn" | print "python lib/wnf.py replace txt1 txt2 extn" | ||||
print "python lib/wnf.py patch -vp patch1 .. : run patches from patches module if not executed" | |||||
print "python lib/wnf.py patch -fvp patch1 .. : run patches from patches module, force rerun" | |||||
def setup(): | |||||
import os, sys | |||||
if not os.path.exists('versions-local.db'): | |||||
if os.path.exists('versions-master.db'): | |||||
import shutil | |||||
shutil.copyfile('versions-master.db', 'versions-local.db') | |||||
print "created versions-local.db from versions-master.db" | |||||
else: | |||||
vc = version.VersionControl() | |||||
vc.repo.setup() | |||||
vc.close() | |||||
print "created fresh versions-local.db" | |||||
else: | |||||
if len(sys.argv)==3 and sys.argv[2]=='master': | |||||
import shutil | |||||
shutil.copyfile('versions-local.db', 'versions-master.db') | |||||
print "created versions-master.db from versions-local.db" | |||||
else: | |||||
print "versions-local.db already exists. Nothing to do." | |||||
print "python lib/wnf.py patch patch1 .. : run patches from patches module if not executed" | |||||
print "python lib/wnf.py patch -f patch1 .. : run patches from patches module, force rerun" | |||||
"""simple replacement script""" | """simple replacement script""" | ||||
@@ -48,15 +21,12 @@ def replace_code(start, txt1, txt2, extn): | |||||
for fn in wt[2]: | for fn in wt[2]: | ||||
if fn.split('.')[-1]==extn: | if fn.split('.')[-1]==extn: | ||||
fpath = os.path.join(wt[0], fn) | fpath = os.path.join(wt[0], fn) | ||||
f = open(fpath, 'r') | |||||
content = f.read() | |||||
f.close() | |||||
if re.search(txt1, content): | |||||
with open(fpath, 'r') as f: | |||||
content = f.read() | |||||
f = open(fpath, 'w') | |||||
f.write(re.sub(txt1, txt2, content)) | |||||
f.close() | |||||
if re.search(txt1, content): | |||||
with open(fpath, 'w') as f: | |||||
f.write(re.sub(txt1, txt2, content)) | |||||
print 'updated in %s' % fpath | print 'updated in %s' % fpath | ||||
@@ -86,101 +56,21 @@ def run(): | |||||
elif cmd=='build': | elif cmd=='build': | ||||
from py import build | |||||
build.run() | |||||
vc = version.VersionControl() | |||||
print 'version %s' % vc.repo.get_value('last_version_number') | |||||
elif cmd=='merge': | |||||
vc = version.VersionControl() | |||||
vc.setup_master() | |||||
vc.merge(vc.repo, vc.master) | |||||
vc.close() | |||||
elif cmd=='merge-local': | |||||
vc = version.VersionControl() | |||||
vc.setup_master() | |||||
vc.merge(vc.master, vc.repo) | |||||
vc.close() | |||||
elif cmd=='setup': | |||||
setup() | |||||
from py.build.project import Project | |||||
Project().build() | |||||
elif cmd=='clear_startup': | |||||
# experimental | |||||
from webnotes import startup | |||||
startup.clear_info('all') | |||||
vc = version.VersionControl() | |||||
print 'version %s' % vc.repo.get_value('last_version_number') | |||||
elif cmd=='log': | |||||
vc = version.VersionControl() | |||||
for l in vc.repo.sql("select * from log order by rowid desc limit 10 ", as_dict =1): | |||||
print 'file:'+ l['fname'] + ' | version: ' + l['version'] | |||||
print 'version %s' % vc.repo.get_value('last_version_number') | |||||
vc.close() | |||||
elif cmd=='files': | |||||
vc = version.VersionControl() | |||||
for f in vc.repo.sql("select fname from files where fname like ?", ((sys.argv[2] + '%'),)): | |||||
print f[0] | |||||
vc.close() | |||||
# pull from remote and merge with local | |||||
elif cmd=='gitpull': | |||||
branch = 'master' | |||||
if len(sys.argv)>2: | |||||
branch = sys.argv[2] | |||||
print "pulling erpnext" | |||||
os.system('git pull origin %s' % branch) | |||||
vc = version.VersionControl() | |||||
vc.setup_master() | |||||
vc.merge(vc.master, vc.repo) | |||||
vc.close() | |||||
print "pulling framework" | |||||
os.chdir('lib') | |||||
os.system('git pull origin %s' % branch) | |||||
# replace code | # replace code | ||||
elif cmd=='replace': | elif cmd=='replace': | ||||
replace_code('.', sys.argv[2], sys.argv[3], sys.argv[4]) | replace_code('.', sys.argv[2], sys.argv[3], sys.argv[4]) | ||||
elif cmd=='patch': | elif cmd=='patch': | ||||
from optparse import OptionParser | |||||
parser = OptionParser() | |||||
parser.add_option("-q", "--quiet", | |||||
action="store_false", dest="verbose", default=True, | |||||
help="Do not print status messages to stdout") | |||||
parser.add_option("-l", "--latest", | |||||
action="store_true", dest="run_latest", default=False, | |||||
help="Apply the latest patches") | |||||
parser.add_option("-p", "--patch", dest="patch_list", metavar='PATCH_MODULE.PATCH_FILE', | |||||
action="append", | |||||
help="Apply patch PATCH_MODULE.PATCH_FILE\n\ | |||||
Can be used more than once for applying multiple patches") | |||||
parser.add_option("-f", "--force", | |||||
action="store_true", dest="force", default=False, | |||||
help="Force Apply all patches specified using option -p or --patch") | |||||
(options, args) = parser.parse_args() | |||||
if options.patch_list: | |||||
for patch in options.patch_list: | |||||
patch_split = patch.split(".") | |||||
idx = options.patch_list.index(patch) | |||||
patch_module = ".".join(patch_split[:-1]) | |||||
options.patch_list[idx] = { | |||||
'patch_module': patch_module or "patches", | |||||
'patch_file': patch_split[-1] | |||||
} | |||||
options.db_name = hasattr(webnotes.defs, 'default_db_name') and getattr(webnotes.defs, 'default_db_name') or None | |||||
from webnotes.modules.patch_handler import PatchHandler | |||||
PatchHandler(**options.__dict__).run(**options.__dict__) | |||||
from webnotes.modules.patch_handler import run | |||||
if len(sys.argv)>2 and sys.argv[2]=='-f': | |||||
# force patch | |||||
run(patch_list = sys.argv[3:], overwrite=1, log_exception=0) | |||||
else: | |||||
# run patch once | |||||
run(patch_list = sys.argv[2:], log_exception=0) | |||||
if __name__=='__main__': | if __name__=='__main__': | ||||
run() | |||||
run() |