12 import pygraphviz as pgv
16 from amara import bindery
17 from amara.xslt import transform
18 from Cheetah.Template import Template
20 parser = argparse.ArgumentParser(description='Process docbook article tree.')
21 parser.add_argument('--style', nargs='?',
22 default=os.path.dirname(os.getcwd())+'/style/default/')
23 parser.add_argument('--output', nargs='?',
24 default=os.path.dirname(os.getcwd())+'/htdocs/')
25 args = parser.parse_args()
27 style_xslt = args.style+"docbook.xsl"
28 outputdir = args.output
30 tmptarget = tempfile.mkdtemp()+'/'
32 valid_scripts = ['.py','.pl']
38 except OSError as exc: # Python >2.5
39 if exc.errno == errno.EEXIST:
43 def publish(src,target):
44 cmd = ["rsync","-a","--delete",src,target]
45 retcode = subprocess.call(cmd)
47 print 'Error: '+' '.join(cmd)+' Returncode ['+str(retcode)+']'
49 def ssh_cmd(target, command):
52 cmd = ["ssh",t[0],c[0],c[1],t[1]]
53 retcode = subprocess.call(cmd)
55 print 'Error: '+' '.join(cmd)+' Returncode ['+str(retcode)+']'
57 PREFIXES={u'db': u'http://docbook.org/ns/docbook',
58 u'xi': u'http://www.w3.org/2001/XInclude',
59 u'xl': u'http://www.w3.org/1999/xlink',
60 u'html' : u'http://www.w3.org/1999/xhtml'}
63 """Class containing the state of the directory with articles"""
69 for dirname, dirnames, filenames in os.walk(self._cwd):
70 for filename in filenames:
71 if fnmatch.fnmatch(filename, '*.xml'):
72 file_ = os.path.join(dirname,filename)
73 doc = bindery.parse(file_, prefixes=PREFIXES)
74 title = doc.xml_select(u'/db:article/db:info/db:title')
75 menu = doc.xml_select(u'/db:article/db:info/db:titleabbrev')
77 base = file_.split('.')[1]
78 link = base.replace('index','')
79 self._tree.append(link)
82 return set(self._tree)
85 """Class representing a version of a webpage"""
86 def __init__(self,link,page):
94 self._rendered_article = None
100 return set(self._resources)
105 def set_article(self,art):
106 self._rendered_article = art
109 self._doc = bindery.parse(self._file, prefixes=PREFIXES)
110 if self._doc.xml_select(u'/db:article/db:info/db:title'):
111 self._title = unicode(self._doc.article.info.title)
112 if self._doc.xml_select(u'/db:article/db:info/db:titleabbrev'):
113 self._menu = unicode(self._doc.article.info.titleabbrev)
115 dirname = os.path.dirname(self._file)
116 code = self._doc.xml_select(u"//xi:include[@parse='text']")
119 (p, ext) = os.path.splitext(c.href)
120 if ext in valid_scripts:
121 exe = os.path.join(os.path.abspath(dirname)+'/'+c.href)
122 xml = subprocess.Popen([exe],stdout=subprocess.PIPE)
123 xstr = bindery.parse(str(xml.stdout.read()))
124 idp = c.xml_index_on_parent
125 for x in xstr.xml_children:
126 c.xml_parent.xml_insert(idp,x)
127 c.xml_parent.xml_remove(c)
129 for r in self._doc.xml_select(u"//db:link[@xl:href]"):
130 rf = os.path.join(dirname,r.href)
131 if os.path.isfile(rf):
132 self._resources.append(rf)
133 for i in self._doc.xml_select(u"//db:imagedata[@fileref]"):
134 im = os.path.join(dirname,i.fileref)
135 if os.path.isfile(im):
136 self._resources.append(im)
137 for i in self._doc.xml_select(u"//html:form[@action]"):
138 pyscript = re.split('\.py',i.action,1)[0]+'.py'
139 im = os.path.join(dirname,pyscript)
140 if os.path.isfile(im):
141 self._resources.append(im)
144 # amara can not handle the docbook stylesheets
145 # xmlarticle = transform(doc,style_xslt)
147 dirname = os.path.dirname(self._file)
149 infile = os.path.basename(tempfile.mktemp())
150 outfile = tempfile.mktemp()
151 tfi = open(infile,'w')
152 tfi.write(self._doc.xml_encode(omit_xml_declaration=True))
154 # cmd = ["saxon-xslt-xinclude","-o",outfile,infile,style_xslt]
155 cmd = ["xsltproc","--xinclude","--output",outfile,style_xslt,infile]
156 retcode = subprocess.call(cmd)
158 print 'Error: '+' '.join(cmd)+' Returncode ['+str(retcode)+']'
159 tfo = open(outfile,'r')
160 self._rendered_article = tfo.read()
166 def template(self,sitemap):
167 htmlmenu = sitemap.gen_menu(self._lang,None,"menu")
168 levelmenu = sitemap.gen_menu(self._lang,self,"tree")
169 langmenu = sitemap.lang_menu(self._lang,self._link)
170 template = Template(file=args.style+'index.'+self._lang+'.html.tmpl',
171 searchList=[{'title':self._title},
173 {'article':self._rendered_article},
174 {'levelmenu':levelmenu},
175 {'langmenu':langmenu}])
176 outfile = tmptarget+'html'.join(self._file.rsplit('xml',1))
177 mkdir_p(os.path.dirname(outfile))
178 out = open(outfile, 'w')
179 out.write(str(template))
184 """Class representing a webpage on the site"""
185 def __init__(self,link):
187 # find the representations of the link.
190 if self._link[-1] == '/':
192 lang = self._scan_languages(path)
194 self._pages.append(Page(self,l))
196 def add_page(self,l):
197 self._pages.append(Page(self,l))
199 def _scan_languages(self,path):
201 for l in glob.glob('.'+path+'*'):
203 if len(ls) > 3 and ls[3] == 'xml':
204 lang.append((ls[2],l))
211 for page in self._pages:
216 for page in self._pages:
217 p.append(page.language())
221 for page in self._pages:
224 def template(self,sitemap):
225 for page in self._pages:
226 page.template(sitemap)
229 for page in self._pages:
230 if page.language()==lang:
236 for page in self._pages:
237 res = res.union(page.resources())
242 def __init__(self,token,value):
254 return self._children
261 return self.inorder(self._root)
266 for x in self.inorder(l.children()):
269 def _add(self,trie, key, content):
273 node = Node(k,content)
278 self._add(ch.children(), key, content)
280 def add(self,key, content):
281 self._add(self._root, key, content)
283 def _graph(self, trie, G):
285 G.add_node(l.token())
286 for ch in l.children():
287 G.add_edge(l.token(),ch.token())
288 self._graph(l.children(), G)
291 G = pgv.AGraph(directed=True)
292 G.add_node("sitemap")
293 for ch in self._root:
294 G.add_edge("sitemap",ch.token())
295 self._graph(self._root, G)
300 def _menu(self, trie, lang, page, css):
301 html = "<ul%s>\n" % css
304 p = l.value().page(lang)
306 sel = ' class="selected"'
308 html += '<li%s><a href="%s">%s</a>\n' \
309 % (sel,l.value().link(),p.menu())
311 html += '<li%s><a href="%s.en" hreflang="en">%s</a>*\n' \
312 % (sel,l.value().link(), l.value().page('en').menu())
314 html += self._menu(l.children(), lang, page, "")
318 def menu(self,lang,page,cssclass):
321 css = ' class="'+cssclass+'"'
322 return self._menu(self._root, lang, page, css)
325 """Class keeping the internal site structure"""
327 self._file = 'sitemap.txt'
329 self._sitelang = set()
330 self._isocode = bindery.parse('/usr/share/xml/iso-codes/iso_639_3.xml')
333 def add_link(self, link):
334 tokens = filter(None,re.split(r'(^/[\w-]*/|[\w-]*/)',link))
335 self._tree.add(tokens,Link(link))
338 f = open(self._file,'w')
339 f.write('\n'.join(link.link() for link in self._tree))
345 sml = f.read().split()
349 except IOError, what_error:
350 print 'INFO: Could not read sitemap.txt - one will be created'
353 return set(link.link() for link in self._tree)
357 for link in self._tree:
360 print "Prepare [%5.2f s]" % (round(t2-t1,2))
361 for link in self._tree:
362 self._sitelang = self._sitelang.union(set(link.languages()))
363 for tran in self._sitelang:
365 self._tranlang[tran] = gettext.translation('iso_639_3',
368 print "Language [%5.2f s]" % (round(t3-t2,2))
369 for link in self._tree:
372 print "Render [%5.2f s]" % (round(t4-t3,2))
373 for link in self._tree:
376 print "Template [%5.2f s]" % (round(t5-t4,2))
380 for link in self._tree:
381 res = res.union(link.resources())
383 outfile = tmptarget+f
384 mkdir_p(os.path.dirname(outfile))
385 shutil.copyfile(f,outfile)
386 print "Resources[%5.2f s]" % (round(t6-t5,2))
387 sitmaplink = Link('/sitemap')
388 for l in self._sitelang:
389 sitmaplink.add_page((l,'/sitemap.'+l+'.xml'))
390 for l in self._sitelang:
391 sitmaplink.page(l).set_article(self.gen_menu(l,None,"tree sitemap"))
392 sitmaplink.page(l).template(self)
394 print "Sitemap [%5.2f s]" % (round(t7-t6,2))
399 def gen_menu(self,lang,page,cssclass):
400 return self._tree.menu(lang,page,cssclass)
402 def lang_menu(self,lang,link):
404 for l in link.languages():
405 isoxml = u"//iso_639_3_entry[@*='"+l+"']"
406 ln = self._isocode.xml_select(isoxml)[0].name
408 ln = self._tranlang[lang].gettext(ln)
413 html += '<li><a href="%s" hreflang="%s">%s</a></li>' % (p, l, ln)
418 ssh_cmd(args.output,"mkdir -p")
419 publish(tmptarget, args.output)
420 for res in ["css","images","js","favicon.ico"]:
421 if (os.path.exists(args.style+res)):
422 publish(args.style+res, args.output)
423 ssh_cmd(args.output,"chmod a+rx")
432 missing = dir_.set() - sitemap.set()
433 removed = sitemap.set() - dir_.set()
435 print page+' pages missing!!'
437 print 'adding missing page '+page
438 sitemap.add_link(page)
439 if len(missing)+len(removed) != 0:
440 print 'writing new sitemap - please adjust if needed'
449 print "Publish [%5.2f s]" % (round(t2-t1,2))
450 print "Total [%5.2f s]" % (round(t2-ts,2))