12 import pygraphviz as pgv
16 from amara import bindery
17 from amara.xslt import transform
18 from Cheetah.Template import Template
20 parser = argparse.ArgumentParser(description='Process docbook article tree.')
21 parser.add_argument('--style', nargs='?',
22 default=os.path.dirname(os.getcwd())+'/style/default/')
23 parser.add_argument('--output', nargs='?',
24 default=os.path.dirname(os.getcwd())+'/htdocs/')
25 args = parser.parse_args()
27 style_xslt = args.style+"docbook.xsl"
28 outputdir = args.output
30 tmptarget = tempfile.mkdtemp()+'/'
32 valid_scripts = ['.py','.pl']
38 except OSError as exc: # Python >2.5
39 if exc.errno == errno.EEXIST:
43 def publish(src,target):
44 cmd = ["rsync","-a","--delete",src,target]
45 retcode = subprocess.call(cmd)
47 print 'Error: '+' '.join(cmd)+' Returncode ['+str(retcode)+']'
50 PREFIXES={u'db': u'http://docbook.org/ns/docbook',
51 u'xi': u'http://www.w3.org/2001/XInclude',
52 u'xl': u'http://www.w3.org/1999/xlink',
53 u'html' : u'http://www.w3.org/1999/xhtml'}
56 """Class containing the state of the directory with articles"""
62 for dirname, dirnames, filenames in os.walk(self._cwd):
63 for filename in filenames:
64 if fnmatch.fnmatch(filename, '*.xml'):
65 file_ = os.path.join(dirname,filename)
66 doc = bindery.parse(file_, prefixes=PREFIXES)
67 title = doc.xml_select(u'/db:article/db:info/db:title')
68 menu = doc.xml_select(u'/db:article/db:info/db:titleabbrev')
70 base = file_.split('.')[1]
71 link = base.replace('index','')
72 self._tree.append(link)
75 return set(self._tree)
78 """Class representing a version of a webpage"""
79 def __init__(self,link,page):
87 self._rendered_article = None
93 return set(self._resources)
98 def set_article(self,art):
99 self._rendered_article = art
102 self._doc = bindery.parse(self._file, prefixes=PREFIXES)
103 if self._doc.xml_select(u'/db:article/db:info/db:title'):
104 self._title = unicode(self._doc.article.info.title)
105 if self._doc.xml_select(u'/db:article/db:info/db:titleabbrev'):
106 self._menu = unicode(self._doc.article.info.titleabbrev)
108 dirname = os.path.dirname(self._file)
109 code = self._doc.xml_select(u"//xi:include[@parse='text']")
112 (p, ext) = os.path.splitext(c.href)
113 if ext in valid_scripts:
114 exe = os.path.join(os.path.abspath(dirname)+'/'+c.href)
115 xml = subprocess.Popen([exe],stdout=subprocess.PIPE)
116 xstr = bindery.parse(str(xml.stdout.read()))
117 idp = c.xml_index_on_parent
118 for x in xstr.xml_children:
119 c.xml_parent.xml_insert(idp,x)
120 c.xml_parent.xml_remove(c)
122 for r in self._doc.xml_select(u"//db:link[@xl:href]"):
123 rf = os.path.join(dirname,r.href)
124 if os.path.isfile(rf):
125 self._resources.append(rf)
126 for i in self._doc.xml_select(u"//db:imagedata[@fileref]"):
127 im = os.path.join(dirname,i.fileref)
128 if os.path.isfile(im):
129 self._resources.append(im)
130 for i in self._doc.xml_select(u"//html:form[@action]"):
131 pyscript = re.split('\.py',i.action,1)[0]+'.py'
132 im = os.path.join(dirname,pyscript)
133 if os.path.isfile(im):
134 self._resources.append(im)
137 # amara can not handle the docbook stylesheets
138 # xmlarticle = transform(doc,style_xslt)
140 dirname = os.path.dirname(self._file)
142 infile = os.path.basename(tempfile.mktemp())
143 outfile = tempfile.mktemp()
144 tfi = open(infile,'w')
145 tfi.write(self._doc.xml_encode())
147 # cmd = ["saxon-xslt-xinclude","-o",outfile,infile,style_xslt]
148 cmd = ["xsltproc","--xinclude","--output",outfile,style_xslt,infile]
149 retcode = subprocess.call(cmd)
151 print 'Error: '+' '.join(cmd)+' Returncode ['+str(retcode)+']'
152 tfo = open(outfile,'r')
153 self._rendered_article = tfo.read()
159 def template(self,sitemap):
160 htmlmenu = sitemap.gen_menu(self._lang,None,"menu")
161 levelmenu = sitemap.gen_menu(self._lang,self,"tree")
162 langmenu = sitemap.lang_menu(self._lang,self._link)
163 template = Template(file=args.style+'index.'+self._lang+'.html.tmpl',
164 searchList=[{'title':self._title},
166 {'article':self._rendered_article},
167 {'levelmenu':levelmenu},
168 {'langmenu':langmenu}])
169 outfile = tmptarget+'html'.join(self._file.rsplit('xml',1))
170 mkdir_p(os.path.dirname(outfile))
171 out = open(outfile, 'w')
172 out.write(str(template))
177 """Class representing a webpage on the site"""
178 def __init__(self,link):
180 # find the representations of the link.
183 if self._link[-1] == '/':
185 lang = self._scan_languages(path)
187 self._pages.append(Page(self,l))
189 def add_page(self,l):
190 self._pages.append(Page(self,l))
192 def _scan_languages(self,path):
194 for l in glob.glob('.'+path+'*'):
196 if len(ls) > 3 and ls[3] == 'xml':
197 lang.append((ls[2],l))
204 for page in self._pages:
209 for page in self._pages:
210 p.append(page.language())
214 for page in self._pages:
217 def template(self,sitemap):
218 for page in self._pages:
219 page.template(sitemap)
222 for page in self._pages:
223 if page.language()==lang:
229 for page in self._pages:
230 res = res.union(page.resources())
235 def __init__(self,token,value):
247 return self._children
254 return self.inorder(self._root)
259 for x in self.inorder(l.children()):
262 def _add(self,trie, key, content):
266 node = Node(k,content)
271 self._add(ch.children(), key, content)
273 def add(self,key, content):
274 self._add(self._root, key, content)
276 def _graph(self, trie, G):
278 G.add_node(l.token())
279 for ch in l.children():
280 G.add_edge(l.token(),ch.token())
281 self._graph(l.children(), G)
284 G = pgv.AGraph(directed=True)
285 G.add_node("sitemap")
286 for ch in self._root:
287 G.add_edge("sitemap",ch.token())
288 self._graph(self._root, G)
293 def _menu(self, trie, lang, page, css):
294 html = "<ul%s>\n" % css
297 p = l.value().page(lang)
299 sel = ' class="selected"'
301 html += '<li%s><a href="%s">%s</a>\n' \
302 % (sel,l.value().link(),p.menu())
304 html += '<li%s><a href="%s.en" hreflang="en">%s</a>*\n' \
305 % (sel,l.value().link(), l.value().page('en').menu())
307 html += self._menu(l.children(), lang, page, "")
311 def menu(self,lang,page,cssclass):
314 css = ' class="'+cssclass+'"'
315 return self._menu(self._root, lang, page, css)
318 """Class keeping the internal site structure"""
320 self._file = 'sitemap.txt'
322 self._sitelang = set()
323 self._isocode = bindery.parse('/usr/share/xml/iso-codes/iso_639_3.xml')
326 def add_link(self, link):
327 tokens = filter(None,re.split(r'(^/[\w-]*/|[\w-]*/)',link))
328 self._tree.add(tokens,Link(link))
331 f = open(self._file,'w')
332 f.write('\n'.join(link.link() for link in self._tree))
338 sml = f.read().split()
342 except IOError, what_error:
343 print 'INFO: Could not read sitemap.txt - one will be created'
346 return set(link.link() for link in self._tree)
350 for link in self._tree:
353 print "Prepare [%5.2f s]" % (round(t2-t1,2))
354 for link in self._tree:
355 self._sitelang = self._sitelang.union(set(link.languages()))
356 for tran in self._sitelang:
358 self._tranlang[tran] = gettext.translation('iso_639_3',
361 print "Language [%5.2f s]" % (round(t3-t2,2))
362 for link in self._tree:
365 print "Render [%5.2f s]" % (round(t4-t3,2))
366 for link in self._tree:
369 print "Template [%5.2f s]" % (round(t5-t4,2))
373 for link in self._tree:
374 res = res.union(link.resources())
376 outfile = tmptarget+f
377 mkdir_p(os.path.dirname(outfile))
378 shutil.copyfile(f,outfile)
379 print "Resources[%5.2f s]" % (round(t6-t5,2))
380 sitmaplink = Link('/sitemap')
381 for l in self._sitelang:
382 sitmaplink.add_page((l,'/sitemap.'+l+'.xml'))
383 for l in self._sitelang:
384 sitmaplink.page(l).set_article(self.gen_menu(l,None,"tree sitemap"))
385 sitmaplink.page(l).template(self)
387 print "Sitemap [%5.2f s]" % (round(t7-t6,2))
392 def gen_menu(self,lang,page,cssclass):
393 return self._tree.menu(lang,page,cssclass)
395 def lang_menu(self,lang,link):
397 for l in link.languages():
398 isoxml = u"//iso_639_3_entry[@*='"+l+"']"
399 ln = self._isocode.xml_select(isoxml)[0].name
401 ln = self._tranlang[lang].gettext(ln)
406 html += '<li><a href="%s" hreflang="%s">%s</a></li>' % (p, l, ln)
411 publish(tmptarget, args.output)
412 publish(args.style+"css", args.output)
413 publish(args.style+"images",args.output)
422 missing = dir_.set() - sitemap.set()
423 removed = sitemap.set() - dir_.set()
425 print page+' pages missing!!'
427 print 'adding missing page '+page
428 sitemap.add_link(page)
429 if len(missing)+len(removed) != 0:
430 print 'writing new sitemap - please adjust if needed'
439 print "Publish [%5.2f s]" % (round(t2-t1,2))
440 print "Total [%5.2f s]" % (round(t2-ts,2))