summaryrefslogtreecommitdiffstats
path: root/to-wiki/wikiconv2.py
diff options
context:
space:
mode:
authorAkash Deshpande <fastakash@gmail.com>2016-09-03 16:13:28 -0400
committerAndras Timar <andras.timar@collabora.com>2016-09-09 07:16:25 +0000
commit187a8d006c9b738a387eedd65c0cb71e8257e6ce (patch)
tree98b73262401a0ca1a309c6d77364b6892ce561c3 /to-wiki/wikiconv2.py
parenttdf#101805 correct documentation for ERF.PRECISE function (diff)
downloadhelp-187a8d006c9b738a387eedd65c0cb71e8257e6ce.tar.gz
help-187a8d006c9b738a387eedd65c0cb71e8257e6ce.zip
help-to-wiki shell call replaced with a function
shell call to run getalltitles has been replaced with a function call. Also added a new option to save the title file alltitles.csv If this file is needed, to continue to generate this, please add -t to the run. Or else, please remove it so that a stale file is not kept around Change-Id: I2902243df59d415fb313efa7d4132b0190658fa3 Reviewed-on: https://gerrit.libreoffice.org/28650 Reviewed-by: jan iversen <jani@documentfoundation.org> Tested-by: jan iversen <jani@documentfoundation.org> Reviewed-by: Andras Timar <andras.timar@collabora.com>
Diffstat (limited to 'to-wiki/wikiconv2.py')
-rwxr-xr-xto-wiki/wikiconv2.py15
1 files changed, 3 insertions, 12 deletions
diff --git a/to-wiki/wikiconv2.py b/to-wiki/wikiconv2.py
index f6569b85e2..b239419293 100755
--- a/to-wiki/wikiconv2.py
+++ b/to-wiki/wikiconv2.py
@@ -1371,15 +1371,6 @@ class XhpParser(ParserBase):
ParserBase.__init__(self, filename, follow_embed, embedding_app,
current_app, wiki_page_name, lang, XhpFile(), buf.encode('utf-8'))
-def loadallfiles(filename):
- global titles
- titles = []
- file = codecs.open(filename, "r", "utf-8")
- for line in file:
- title = line.split(";", 2)
- titles.append(title)
- file.close()
-
class WikiConverter(Thread):
def __init__(self, inputfile, wiki_page_name, lang, outputfile):
Thread.__init__(self)
@@ -1441,19 +1432,19 @@ def write_redirects():
write_link(r, target)
# Main Function
-def convert(generate_redirects, lang, po_root):
+def convert(title_data, generate_redirects, lang, po_root):
if lang == '':
print 'Generating the main wiki pages...'
else:
print 'Generating the wiki pages for language %s...'% lang
+ global titles
+ titles = [t for t in title_data]
global redirects
redirects = []
global images
images = set()
- loadallfiles("alltitles.csv")
-
if lang != '':
sys.stderr.write('Using localizations from "%s"\n'% po_root)
if not load_localization_data(po_root):