summaryrefslogtreecommitdiffstats
path: root/to-wiki
diff options
context:
space:
mode:
authorAkash Deshpande <fastakash@gmail.com>2016-09-03 16:13:28 -0400
committerAndras Timar <andras.timar@collabora.com>2016-09-09 07:16:25 +0000
commit187a8d006c9b738a387eedd65c0cb71e8257e6ce (patch)
tree98b73262401a0ca1a309c6d77364b6892ce561c3 /to-wiki
parenttdf#101805 correct documentation for ERF.PRECISE function (diff)
downloadhelp-187a8d006c9b738a387eedd65c0cb71e8257e6ce.tar.gz
help-187a8d006c9b738a387eedd65c0cb71e8257e6ce.zip
help-to-wiki shell call replaced with a function
shell call to run getalltitles has been replaced with a function call. Also added a new option to save the title file alltitles.csv If this file is needed, to continue to generate this, please add -t to the run. Or else, please remove it so that a stale file is not kept around Change-Id: I2902243df59d415fb313efa7d4132b0190658fa3 Reviewed-on: https://gerrit.libreoffice.org/28650 Reviewed-by: jan iversen <jani@documentfoundation.org> Tested-by: jan iversen <jani@documentfoundation.org> Reviewed-by: Andras Timar <andras.timar@collabora.com>
Diffstat (limited to 'to-wiki')
-rwxr-xr-xto-wiki/getalltitles.py26
-rwxr-xr-xto-wiki/wikiconv2.py15
2 files changed, 16 insertions, 25 deletions
diff --git a/to-wiki/getalltitles.py b/to-wiki/getalltitles.py
index 8db9bcb457..71f5aed325 100755
--- a/to-wiki/getalltitles.py
+++ b/to-wiki/getalltitles.py
@@ -137,18 +137,18 @@ def parsexhp(filename):
title = title.strip('_')
title = make_unique(title)
alltitles.append(title)
- print filename + ';' + title + ';' + readable_title
-
-if len(sys.argv) < 2:
- print "getalltitles.py <directory>"
- print "e.g. getalltitles.py source/text/scalc"
- sys.exit(1)
-
-pattern = "xhp"
-
-for root, dirs, files in os.walk(sys.argv[1]):
- for i in files:
- if i.find(pattern) >= 0:
- parsexhp(root+"/"+i)
+ return((filename, title, readable_title))
+
+# Main Function
+def gettitles(path):
+ pattern = "xhp"
+ alltitles = []
+ for root, dirs, files in os.walk(path):
+ for i in files:
+ if i.find(pattern) >= 0:
+ t = parsexhp(root+"/"+i)
+ if t is not None:
+ alltitles.append(t)
+ return alltitles
# vim:set shiftwidth=4 softtabstop=4 expandtab:
diff --git a/to-wiki/wikiconv2.py b/to-wiki/wikiconv2.py
index f6569b85e2..b239419293 100755
--- a/to-wiki/wikiconv2.py
+++ b/to-wiki/wikiconv2.py
@@ -1371,15 +1371,6 @@ class XhpParser(ParserBase):
ParserBase.__init__(self, filename, follow_embed, embedding_app,
current_app, wiki_page_name, lang, XhpFile(), buf.encode('utf-8'))
-def loadallfiles(filename):
- global titles
- titles = []
- file = codecs.open(filename, "r", "utf-8")
- for line in file:
- title = line.split(";", 2)
- titles.append(title)
- file.close()
-
class WikiConverter(Thread):
def __init__(self, inputfile, wiki_page_name, lang, outputfile):
Thread.__init__(self)
@@ -1441,19 +1432,19 @@ def write_redirects():
write_link(r, target)
# Main Function
-def convert(generate_redirects, lang, po_root):
+def convert(title_data, generate_redirects, lang, po_root):
if lang == '':
print 'Generating the main wiki pages...'
else:
print 'Generating the wiki pages for language %s...'% lang
+ global titles
+ titles = [t for t in title_data]
global redirects
redirects = []
global images
images = set()
- loadallfiles("alltitles.csv")
-
if lang != '':
sys.stderr.write('Using localizations from "%s"\n'% po_root)
if not load_localization_data(po_root):