Skip to content
Snippets Groups Projects
Commit c9deb32d authored by Magnus Giesbert's avatar Magnus Giesbert
Browse files

add more functions and cli options

parent 1d492624
Branches
No related tags found
No related merge requests found
# documentation of dokuwiki xmlrpc commands: https://www.dokuwiki.org/devel:xmlrpc
import config
from xmlrpc.client import ServerProxy as Proxy, Error as wikiError
import re
import datetime
import argparse
from xmlrpc.client import ServerProxy as Proxy, Error as wikiError
def get_time():
"""Return the dokwuikis current time as timestamp."""
with Proxy(config.WIKI_API_URL) as proxy:
return proxy.dokuwiki.getTime()
def strip_namespace(full_name):
"""Returns the pure pagename without namespace
If a namespace withoput pagename is given like "n1:n2:" then the last namespace is returned
"""Returns the pure pagename without namespace.
If a namespace withoput pagename is given like "n1:n2:" then the last namespace is returned .
"""
split = full_name.split(":")
if len(split) > 1 and split[-1] == "":
......@@ -16,28 +23,29 @@ def strip_namespace(full_name):
return split[-1]
def move_page(original_page, new_page, delete=False):
"""Moves a page, updates link to new page and flags or deletes the old one"""
def move_page(old_pagename, new_pagename, delete=False):
"""Moves a page, updates links to new page and flags or deletes the old one."""
with Proxy(config.WIKI_API_URL) as proxy:
# create new page with identical content #
page_content = proxy.wiki.getPage(original_page)
proxy.wiki.putPage(new_page, page_content, {
"sum": "Moved from " + original_page})
change_links(original_page, new_page) # update back links to new page
page_content = proxy.wiki.getPage(old_pagename)
proxy.wiki.putPage(new_pagename, page_content, {
"sum": "Moved from " + old_pagename})
# update back links to new page
change_links(old_pagename, new_pagename)
# either flag or delete original page #
if not delete:
proxy.dokuwiki.appendPage(original_page, "\n DELETEME This page was moved to " + new_page,
{"sum": "Moved to " + new_page + " and marked page for deletion"})
proxy.dokuwiki.appendPage(old_pagename, "\n DELETEME This page was moved to " + new_pagename,
{"sum": "Moved to " + new_pagename + " and marked page for deletion"})
else:
proxy.wiki.putPage(original_page, "", {
"sum": "Moved to " + new_page})
proxy.wiki.putPage(old_pagename, "", {
"sum": "Moved to " + new_pagename})
def move_pages(original_name_regex, new_name_func, delete=False):
""" Moves a bunch of pages where the complete name is matched by a given regex and new names are given via a function
original_name_regex is a regular expression which the old names have to match completly
new_name_func is a function which gets the matched old name as input and returns the new name
delete is a boolean flag wether old pages are to be deleted or just marked
""" Moves a bunch of pages where the complete name is matched by a given regex and new names are given via a function.
original_name_regex is a regular expression which the old names have to match completly.
new_name_func is a function which gets the matched old name as input and returns the new name.
delete is a boolean flag wether old pages are to be deleted or just marked.
"""
with Proxy(config.WIKI_API_URL) as proxy:
# move sites #
......@@ -76,23 +84,27 @@ def move_pages(original_name_regex, new_name_func, delete=False):
old_page, "", {"sum": "Moved to " + new_page})
def change_links(old_page, new_page):
"""Updates pages that link to old page, to link to new page"""
def change_links(old_pagename, new_pagename):
"""Updates pages that link to an old page, to link to new page instead."""
with Proxy(config.WIKI_API_URL) as proxy:
backLinks = proxy.wiki.getBackLinks(old_page)
backLinks = proxy.wiki.getBackLinks(old_pagename)
# regex for dokuwiki links we want to replace
reg = rf"\[\[\s*{old_page}\s*\|(.*?)\]\]"
def _replacer(
matched): return "[[" + new_page + "|" + matched.group(1) + "]]"
reg = rf"\[\[\s*{old_pagename}\s*\|(.*?)\]\]"
def _replacer(matched):
return "[[" + new_pagename + "|" + matched.group(1) + "]]"
for page in backLinks:
content = proxy.wiki.getPage(page)
content = re.sub(reg, _replacer, content)
proxy.wiki.putPage(
page, content, {"sum": "Update links from " + old_page + " to " + new_page})
page, content, {"sum": "Update links from " + old_pagename + " to " + new_pagename})
def find_old_pages(timedelta, namespace=""):
"""Returns all pages whose rev is older than the given timedelta"""
"""Returns all pages whose last review is older than the given timedelta.
Used timedelta can be given as datetime.datetime or datetime.timedelta object.
Optional a namespace can be given to only get old pages from that namespace.
"""
with Proxy(config.WIKI_API_URL) as proxy:
pages = proxy.dokuwiki.getPagelist(namespace)
proxy_time = proxy.dokuwiki.getTime()
......@@ -104,3 +116,71 @@ def find_old_pages(timedelta, namespace=""):
old_pages = [page for page in pages if page.get(
'rev') < timedelta.timestamp()]
return old_pages
def mark_page(pagename, message, summary="Marked page"):
"""Puts a message at the beginning of a page"""
with Proxy(config.WIKI_API_URL) as proxy:
content = proxy.wiki.getPage(pagename)
proxy.wiki.putPage(pagename, message+"\n\n"+content, {"sum": summary})
def mark_old_pages(timedelta, namespace=""):
"""Marks all old pages that do not start with a 'FIXME'.
Used timedelta can be given as datetime.datetime or datetime.timedelta object.
Optional a namespace can be given to only mark old pages from that namespace.
"""
old_pages = find_old_pages(timedelta, namespace)
message = "FIXME This Page is out of date and should be reviewed\n\n"
summary = "Marked page due to its age"
with Proxy(config.WIKI_API_URL) as proxy:
for page in old_pages:
if not proxy.wiki.getPage(page.get("id")).startswith("FIXME"):
mark_page(page.get("id"), message, summary)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Functions for dokuwiki interaction. To see details for a specific positional argument use 'argumentname -h'.")
subparsers = parser.add_subparsers()
# parser for time command #
parser_time = subparsers.add_parser(
'time', help="Returns the current server-time.", description="Returns servertime as Unix timestamp.")
parser_time.set_defaults(func=get_time)
# parser for link update #
parser_relink = subparsers.add_parser(
'relink', help="Updates links to a new page", description="Changes all links to a specific wikipage, to link to another page instead.")
parser_relink.add_argument(
"old_pagename", metavar="from", help="Old pagename")
parser_relink.add_argument(
"new_pagename", metavar="to", help="New pagename")
parser_relink.set_defaults(func=change_links)
# parser for find_old_pages #
parser_old = subparsers.add_parser(
'old-pages', help="Returns pages before a certain date.",
description="Returns list of pages, last changed before the given date. For each page the name, timestamp of last change, and amount of characters are given.")
parser_old.add_argument("timedelta",
type=lambda s: datetime.datetime.strptime(
s, '%Y-%m-%d-%H-%M'),
help="Date in the format year-month-day-hour-minute.")
parser_old.add_argument("-n", dest="namespace",
help="Namespace in which to search", default=None, metavar="namespace")
parser_old.set_defaults(func=find_old_pages)
# parser for single page moving #
parser_move = subparsers.add_parser(
'move', help="Moves a page and updates backlinks.", description="Moves a page to a new name(space). Links to the old page are updated. The old page can be deleted.")
parser_move.add_argument(
"old_pagename", metavar="from", help="Old pagename")
parser_move.add_argument("new_pagename", metavar="to", help="New pagename")
parser_move.add_argument("-d", "--delete", dest="delete", action='store_true',
help="Flag indicating that old page should be deleted.")
parser_move.set_defaults(func=move_page)
args = parser.parse_args()
# filter 'func' and all arguments that are None
try:
value = args.func(**{k: v for k, v in vars(args).items()
if (k != "func") and (v is not None)})
print(value)
except AttributeError:
print("No function specified, use -h to view available functions")
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment