FANDOM


# -*- coding: utf-8 -*-
"""
Animanga Wiki List Updater
 
http://pl.animanga.wikia.com/
 
==  List of accepted arguments  =====
 
-page:                          Page with the list.
 
-summarylist:                   Summary for the list edit.
-summaryarticle:                Summary for the subpage edit.
 
-skipsubpages                   Don't update subpages.
 
-noemptygroups                  Hide empty group sections.
-nogroups                        Don't add group sections at all.
 
"""
#
# Distributed under the terms of the CC-BY-NC 3.0 license.
# License summary: http://creativecommons.org/licenses/by-nc/3.0/
# Full legal code: http://creativecommons.org/licenses/by-nc/3.0/legalcode
#
# Author: Karol "Nanaki" Dylewski [http://spolecznosc.wikia.com/wiki/Użytkownik:Nanaki]
# 
 
import wikipedia as pywikibot
import sys, re, datetime
import urllib2, json
 
 
# DEFAULT SETTINGS
admin_active_days = 60
 
list_tags = [u'<!-- Lista start -->\n', u'\n<!-- Lista koniec -->']
to_add_tags = [u'== Do dodania ==\n', u'\n== Opis ==']
 
list_wrap = u'{{Nagłówek listy}}\n%s\n|}'
 
subpage_params = [
    (ur'wiki', ur'%(address)s'),
    (ur'nazwa wiki', ur'%(name)s'),
    (ur'licznik stron', ur'%(articles)s'),
    (ur'licznik grafik', ur'\%(images)s'),
    (ur'licznik użytkowników', ur'%(users)s'),
    (ur'licznik administratorów', ur'%(admins)s'),
]
subpage_params_after = [
    (ur'aktualizowano', ur'{{subst:#time:Y/m/d}}'),
]
 
list_record = [
    u'Wpis na liście', 
    u'Tytuł           = %(title)s',
    u'Adres           = %(address)s',
    u'Artykuły        = %(articles)s',
    u'Grafiki         = %(images)s',
    u'Użytkownicy     = %(users)s',
    u'Administratorzy = %(admins)s',
    u'Data założenia  = %(created)s',
    u'Inne            = %(other)s',
]
list_groups = [
    (ur'[0-9]', u'0–9'),
    (ur'[Aa]', u'A'),
    (ur'[Bb]', u'B'),
    (ur'[Cc]', u'C'),
    (ur'[Dd]', u'D'),
    (ur'[Ee]', u'E'),
    (ur'[Ff]', u'F'),
    (ur'[Gg]', u'G'),
    (ur'[Hh]', u'H'),
    (ur'[Ii]', u'I'),
    (ur'[Jj]', u'J'),
    (ur'[Kk]', u'K'),
    (ur'[Ll]', u'L'),
    (ur'[Mm]', u'M'),
    (ur'[Nn]', u'N'),
    (ur'[Oo]', u'O'),
    (ur'[Pp]', u'P'),
    (ur'[Qq]', u'Q'),
    (ur'[Rr]', u'R'),
    (ur'[Ss]', u'S'),
    (ur'[Tt]', u'T'),
    (ur'[Uu]', u'U'),
    (ur'[Vv]', u'V'),
    (ur'[Ww]', u'W'),
    (ur'[Xx]', u'X'),
    (ur'[Yy]', u'Y'),
    (ur'[Zz]', u'Z'),
]
 
def initialize():
    global wikis, args
    args = {}
    args['pagename'] = None
    args['extended'] = False
    args['group'] = True
    args['emptygroups'] = True
    args['skipsubpages'] = False
    args['summarylist'] = None
    args['summaryarticle'] = None
 
    for arg in pywikibot.handleArgs():
        if arg.startswith(u'-page:'): args['pagename'] = arg[6:]
        elif arg.startswith(u'-summarylist:'): args['summaryarticle'] = arg[13:]
        elif arg.startswith(u'-summaryarticle:'): args['summaryarticle'] = arg[16:]
        elif arg == u'-extended': args['extended'] = True
        elif arg == u'-noemptygroups': args['emptygroups'] = False
        elif arg == u'-nogroups': args['group'] = False
        elif arg == u'-skipsubpages': args['skipsubpages'] = True
 
    global json_cache
    json_cache = {
        'info': {},
        'stats': {},
        'admins': {},
    }
    global wikis
    wikis = []
 
    global removed_count, added_count, updated_count
    removed_count = 0
    added_count = 0
    updated_count = 0
 
    global site
    site = pywikibot.getSite();
 
def main():
    initialize()
    global args, list_tags, to_add_tags
 
    if args['pagename'] == None:
        args['pagename'] = pywikibot.input('Page with the list to work with:')
    page = pywikibot.Page(site, args['pagename'])
    pywikibot.output('\n\03{lightpurple}== \03{lightaqua}[%s:%s] \03{lightpurple}==\03{default}' % (site,page.title()))
    pywikibot.output('Revision ID :%s\nMade by     :%s\nOn          :%s' % (page.latestRevision(),page.userName(),page.editTime(True)))
 
    old_text = page.getOldVersion(page.latestRevision())
 
    global console_format
    console_format = '%-60s | %-60s - %-25s | %5s  %4s  %4s  %4s'
 
    processList(get_between(old_text, list_tags))
    processed = processAdditions(get_between(old_text, to_add_tags))
 
    new_list = compileList()
 
 
    new_text = put_between(old_text, to_add_tags, processed)
    new_text = put_between(new_text, list_tags, new_list)
 
    if old_text == new_text: return
    pywikibot.output('\n\n\n-- DIFF ----------------------------------------------------')
    pywikibot.showDiff(old_text, new_text)
    pywikibot.output('------------------------------------------------------------')
 
    global i16n_summarylist
    page.put(new_text, comment=args['summarylist'] or __(i16n_summarylist))
def parseLine(line):
    match = re.compile('(http\:\/\/(www\.)?(?P<code1>\S*?)\.wikia\.com)|(\[\[\s*w:c:(?P<code2>\S*)(\|[^\]]*)?\]\])', re.I).search(line)
    if match == None: return None
    return match.group('code1') or match.group('code2')
def processAdditions(text):
    pywikibot.output('\n\03{lightpurple}======   \03{lightgreen}PROCESSING NEW WIKIS   \03{lightpurple}======\03{default}')
    global i16n_couldntparseaddress, console_format, wikis
    lines = text.split('\n')
    new_lines = []
    for line in lines:
        line = line.rstrip(' \t\r\n')
        if not line.startswith('*'):
            new_lines.append(line)
            continue
        code = parseLine(line)
        if code == None:
            if line.endswith(__(i16n_couldntparseaddress)): new_lines.append(line)
            else: new_lines.append(line+' '+__(i16n_couldntparseaddress))
        match = re.compile('\[\[(?!w\:c\:)(?P<title>[^\]]+)\]\]').search(line)
        if match == None: title = ''
        else: title = match.group('title')
 
        rec = processWiki(code)
        rec['title'] = title or rec['topic']
 
        on_the_list = False
        for wiki in wikis:
            if wiki['address'] == rec['address']:
                on_the_list = True
                break
        if on_the_list:
            pywikibot.output('\03{lightpurple}%s\03{default} - already on the list' % (console_format % (rec['title'],rec['name'],rec['address'],rec['articles'],rec['images'],rec['users'],rec['admins'])))
        else:
            pywikibot.output(console_format % (rec['title'],rec['name'],rec['address'],rec['articles'],rec['images'],rec['users'],rec['admins']))
            wikis.append(rec)
 
    return u'\n'.join(new_lines)
def processWiki(wiki):
    rec = {}
    statinfo = get_wiki_statinfo(wiki)
    rec['address'] = statinfo['info']['wikia_code']
    rec['name'] = statinfo['info']['sitename']
    rec['topic'] = re.sub('((^\s?[Ww]iki\s?)|(\s?[Ww]iki\s?$))', '', statinfo['info']['sitename'])
 
    rec['articles'] = statinfo['stats']['articles']
    rec['images'] = statinfo['stats']['images']
    rec['users'] = statinfo['stats']['activeusers']
    rec['admins'] = len(get_wiki_admins(rec['address'], active=True))
    return rec
def processList(text):
    pywikibot.output('\n\03{lightpurple}======   \03{lightgreen}PROCESSING LIST   \03{lightpurple}======\03{default}')
    global list_record, wikis, console_format, args
    basic = re.compile("\{\{\s*%s(.*?)\}\}"%re.escape(list_record[0]), re.S)
    iter = basic.finditer(text)
    pywikibot.output('\03{lightyellow}%s\03{default}\n' % (console_format % ('Topic','Wiki','Address','Arts','Imgs','Usrs','Adms')))
    while True:
        try: match = iter.next().group(0)
        except StopIteration: break
        except AttributeError: continue
        else:
            rec = template_params(match, list_record)
            try:
                wiki = processWiki(rec['address'])
                rec.update(wiki)
            except InvalidWiki, e:
                pywikibot.output('\03{lightyellow}[%s]\03{default} - %s' % (rec['address'], ('wiki not found','wiki closed')[e.closed]))
                continue
            if not rec['title']: rec['title'] = rec['topic']
            pywikibot.output(console_format % (rec['title'],rec['name'],rec['address'],rec['articles'],rec['images'],rec['users'],rec['admins']))
            wikis.append(rec)
            if not args['skipsubpages']: updateSubpage(rec)
 
def updateSubpage(wiki):
    global site, subpage_params
    page = pywikibot.Page(site, wiki['title'])
    if not page.exists(): return False
 
    if page.isRedirectPage(): page = page.getRedirectTarget()
    if page.isRedirectPage(): return False
 
    pywikibot.output(u'\03{lightaqua}[[%s:%s]]\03{default}' % (site, page.title()))
 
    new_text = old_text = unicode(page.getOldVersion(page.latestRevision()))
 
    for param, value in subpage_params:
        new_text = re.sub(ur'(\|\s*%s\s*\=\s*).*?(\s*(\||\}\}))' % param, ur'\g<1>%s\g<2>'%(value%wiki), new_text, count=1)
 
    if old_text == new_text: return
    for param, value in subpage_params_after:
        new_text = re.sub(ur'(\|\s*%s\s*\=\s*).*?(\s*(\||\}\}))' % param, ur'\g<1>%s\g<2>'%(value%wiki), new_text, count=1)
 
    if old_text == new_text: return
    pywikibot.showDiff(old_text, new_text)
 
    global i16n_summaryarticle, args
    page.put_async(new_text, comment=args['summaryarticle'] or __(i16n_summaryarticle))
def compileList():
    global wikis, list_record, list_groups, list_wrap, args
    pywikibot.output('\n\03{lightpurple}======   \03{lightgreen}COMPILING LIST   \03{lightpurple}======\03{default}')
 
    qs(wikis, 'topic')
    lists = []
    template = prepare_template(list_record)
    if args['group']:
        for pattern, title in list_groups:
            rx = re.compile('^%s' % pattern)
            list = []
            for wiki in wikis:
                if rx.match(wiki['title']) == None: continue;
 
                if not 'other' in wiki: wiki['other'] = ''
                if not 'created' in wiki: wiki['created'] = ''
                if not 'status' in wiki: wiki['status'] = ''
                list.append(template % wiki)
            if not args['emptygroups'] and len(list) == 0: continue
            if len(list): list = list_wrap % u'\n'.join(list)
            else: list = ''
            lists.append('== %s ==\n%s' % (title, list))
        return (u'\n\n'.join(lists)).strip()
    else:
        list = []
        for wiki in wikis:
            if not 'other' in wiki: wiki['other'] = ''
            if not 'created' in wiki: wiki['created'] = ''
            if not 'status' in wiki: wiki['status'] = ''
            list.append(template % wiki)
        if len(list): list = list_wrap % u'\n'.join(list)
        else: list = ''
        return list.strip()
def template_params(text, template):
    rx = prepare_template(template, rx = True)
    named = prepare_template(template, rx = True, ret_named = True)
 
    info = {}
    for reg in named:
        m = reg.search(text)
        if m == None: continue
        info.update(m.groupdict())
 
    if rx:
        m = rx.search(text)
        if m != None: info.update(m.groupdict())
    return info
def prepare_template(template, rx = False, ret_named = False, ret_unnamed = False):
    named = []
    unnamed = []
 
    for param in template[1:]:
        if param.find('=') == -1: unnamed.append(param)
        else: named.append(param)
 
    if rx and ret_named:
        for i, nam in enumerate(named):
            m = re.findall(r"%[^\(]*\((.*?)\)", named[i])
            d = {}
            for x in m:
                d[x] = '(?P<%s>[^\|^\}]*?)'%x
            named[i] = nam % d
            named[i] = re.sub(r'^(.*?)\s*=\s*(.*?)\s*$', r'^\s*\|\s*\1\s*\=\s*\2\s*$', named[i])
            named[i] = re.compile(named[i], re.M)
    if ret_named: return named
    if ret_unnamed: return unnamed
 
    if rx and len(unnamed) == 0: return False
    if rx: join = '\s*\|\s*'
    else: join = ' | '
 
    if len(unnamed):
        unnamed = join + join.join(unnamed)
        if not rx: unnamed = unnamed + ' '
    else: unnamed = ''
 
    if rx:
        tpl = "^{{\s*%s%s%s" % (re.escape(template[0]), unnamed, ('\s*$','\s*}}')[len(named)==0])
        m = re.findall(r"%[\(]*\((.*?)\)", tpl)
        d = {}
        for x in m:
            d[x] = '(?P<%s>.*?)'%x
        tpl = tpl % d
        return re.compile(tpl, re.M)
 
 
    if len(named): named = '\n| ' + '\n| '.join(named) + '\n'
    else: named = ''
 
    return "{{%s%s%s}}" % (template[0], unnamed, named)
def request(url, tries=5):
    from socket import error as socket_error
    response = ''
    x = 0
    while x<tries:
        try:
            if response: break
            response = urllib2.urlopen(url)
        except urllib2.HTTPError: x += 1
        except socket_error: x += 1
        except urllib2.URLError, e: raise RequestError(u'URLError: %s' % unicode(e.reason))
 
    if not response: raise RequestError(u'No response')
 
    response = response.read()
    if response == '': raise RequestError('Empty response')
    return response
def json_from_url(url, tries=5):
 
    response = request(url, tries=tries)
 
    if response.find('page-Special_CloseWiki') >= 0: raise InvalidWiki(url,True)
    if response.find('page-Community_Central_Not_a_valid_Wikia') >= 0: raise InvalidWiki(url)
 
    try:
        obj = json.loads(response)
    except ValueError:
        raise JSONError('No JSON object could be decoded')
 
    return obj
def get_wiki_admins(address, active=False, useCache=True, useInfoCache=True):
    global json_cache, args, admin_active_days
    if useCache and address in json_cache['admins']: admins = json_cache['admins'][address]
    else:
        if args['extended']: pywikibot.output('JSON: Fetching admins for [%s]' % address)
        url = 'http://%s.wikia.com/api.php?action=query&list=allusers&auprop=editcount&augroup=sysop|bureaucrat&format=json' % address
        admins = json_from_url(url)['query']['allusers']
    if not active:
        json_cache['admins'][address] = admins
        return json_cache['admins'][address]
    now = datetime.datetime.strptime(get_wiki_info(address, useCache = useInfoCache)['time'], u'%Y-%m-%dT%H:%M:%SZ')
    activeadmins = []
    for admin in admins:
        if admin['editcount'] == 0: continue
        url = 'http://%s.wikia.com/api.php?action=query&list=usercontribs&uclimit=1&ucuser=%s&ucprop=timestamp&format=json' % (address, urllib2.quote(admin['name'].encode('utf-8')))
        try:
            delta = now-datetime.datetime.strptime(json_from_url(url)['query']['usercontribs'][0]['timestamp'], u'%Y-%m-%dT%H:%M:%SZ')
        except IndexError:
            continue
        if delta.days >= admin_active_days: continue
        activeadmins.append(admin)
    return activeadmins
def get_wiki_info(address, useCache=True):
    global json_cache, args
    if useCache and address in json_cache['info']: return json_cache['info'][address]
    if args['extended']: pywikibot.output('JSON: Fetching info about [%s]' % address)
    url = 'http://%s.wikia.com/api.php?action=query&meta=siteinfo&siprop=general&format=json' % address
    json_cache['info'][address] = json_from_url(url)['query']['general']
    match = re.compile('http:\/\/(www\.)?(.*?)\.wikia\.com', re.I).search(json_cache['info'][address]['server'])
    try: json_cache['info'][address]['wikia_code'] = match.group(2).strip()
    except AttributeError: json_cache['info'][address]['wikia_code'] = address.strip()
    return json_cache['info'][address]
def get_wiki_stats(address, useCache=True):
    global json_cache, args
    if useCache and address in json_cache['stats']: return json_cache['stats'][address]
    if args['extended']: pywikibot.output(u'JSON: Fetching statistics for [%s]' % address)
    url = 'http://%s.wikia.com/api.php?action=query&meta=siteinfo&siprop=statistics&format=json' % address
    json_cache['stats'][address] = json_from_url(url)['query']['statistics']
    return json_cache['stats'][address]
def get_wiki_statinfo(address, useCache=True):
    global json_cache, args
    if useCache and address in json_cache['stats']: stats = json_cache['stats'][address]
    else: stats = None
    if useCache and address in json_cache['info']: info = json_cache['info'][address]
    else: info = None
    if stats == None and info == None:
        if args['extended']: pywikibot.output(u'JSON: Fetching info and statistics for [%s]' % address)
        url = 'http://%s.wikia.com/api.php?action=query&meta=siteinfo&siprop=general|statistics&format=json' % address
        data = json_from_url(url)['query']
        json_cache['stats'][address] = data['statistics']
        json_cache['info'][address] = data['general']
    elif stats == None: stats = get_wiki_stats(address)
    elif info == None: info = get_wiki_info(address)
 
    match = re.compile('http:\/\/(www\.)?(.*?)\.wikia\.com', re.I).search(json_cache['info'][address]['server'])
    try: json_cache['info'][address]['wikia_code'] = match.group(2).strip()
    except AttributeError: json_cache['info'][address]['wikia_code'] = address.strip()
    return {'info':json_cache['info'][address],'stats':json_cache['stats'][address]}
def put_between(text, tag, what):
    start = text.find(tag[0])
    end = text.find(tag[1])
    if start != -1 and end != -1:
        start += len(tag[0])
        text = text[:start] + what + text[end:]
    else:
        raise TagsNotFound(tag, [start != -1,end != -1])
    return text
def get_between(text, tag):
    start = text.find(tag[0])
    end = text.find(tag[1])
    if start != -1 and end != -1:
        start += len(tag[0])
        return text[start:end]
    else:
        raise TagsNotFound(tag, [start != -1,end != -1])
    return text
 
def input_bool(text, answers = ['Yes', 'No'], hotkeys = ['Y','N'], isTrue = 'y', default = 'y', override = None):
    if override != None:
        pywikibot.output(u'%s \03{lightgreen}%s\03{default}' % (text, bool(override)))
        return bool(override)
    choice = pywikibot.inputChoice(text, answers, hotkeys, default)
    if isTrue.lower() == choice: return True
    return False
def input_number(text, override = None):
    if override != None:
        pywikibot.output(u'%s \03{lightgreen}%s\03{default}' % (text, override))
        return override
    while True:
        try:
            value = int(pywikibot.input(text))
        except ValueError:
            pywikibot.output(u'Numbers, please!')
            continue
        break
    return value
# Exceptions
class TagsNotFound(Exception):
    def __init__(self, tags, flags):
        self.tags = tags
        self.flags = flags
    def __str__(self):
        if not self.flags[0] and not self.flags[1]:
            return u'Tags not found: "%s" <-> "%s". Couldn\'t find both tags' % (re.escape(self.tags[0]), re.escape(self.tags[1]))
        if not self.flags[0]:
            return u'Tags not found: "%s" <-> "%s". Couldn\'t find starting tag' % (re.escape(self.tags[0]), re.escape(self.tags[1]))
        if not self.flags[1]:
            return u'Tags not found: "%s" <-> "%s". Couldn\'t find ending tag' % (re.escape(self.tags[0]), re.escape(self.tags[1]))
        return 'How the hell did you get here oO'
class InvalidWiki(Exception):
    def __init__(self,url,closed=False):
        self.url = url
        self.closed = closed
    def __str__(self):
        if self.closed:
            return u'Wiki has been closed: %s' % self.url
        else:
            return u'Wiki not found: %s' % self.url
class JSONError(Exception):
    def __init__(self, value):
        self.value = value
    def __str__(self):
        return self.value
class RequestError(Exception):
    def __init__(self, value):
        self.value = value
    def __str__(self):
        return self.value
 
# Quick sort
def qs(l, x):
    qsr (l, 0, len (l) - 1, x)
    return l
def qsr(l , s, e, x):
    if e > s :
        p = qsp (l, s, e, x)
        qsr (l, s, p - 1, x)
        qsr (l, p + 1, e, x)
def qsp( l, s, e, x):
    a = ( s + e ) / 2
    if x is not None:
        if l[s][x] > l[a][x] :
            l[s], l [a] = l [a], l[s]
        if l[s][x] > l [e][x] :
            l[s], l[e] = l[e], l[s]
        if l[a][x] > l[e][x] :
            l[a], l[e] = l[e], l[a]   
        l [a], l [s] = l[s], l[a]
    else:
        if l[s] > l[a] :
            l[s], l [a] = l [a], l[s]
        if l[s] > l [e] :
            l[s], l[e] = l[e], l[s]
        if l[a] > l[e] :
            l[a], l[e] = l[e], l[a]   
        l [a], l [s] = l[s], l[a]
 
    p = s
    i = s + 1
    j = e
 
    if x is not None:
        while ( True ):
            while ( i <= e and l[i][x] <= l[p][x] ):
                i += 1
            while ( j >= s and l[j][x] > l[p][x] ):
                j -= 1
            if i >= j :
                break
            else:
                l[i], l[j] = l[j], l[i]  
    else:
        while ( True ):
            while ( i <= e and l[i] <= l[p] ):
                i += 1
            while ( j >= s and l[j] > l[p] ):
                j -= 1
            if i >= j :
                break
            else:
                l[i], l[j] = l[j], l[i]  
 
    l[j], l[p] = l[p], l[j]
    return j
def __(dict):
    global site
    lang = site.lang
 
    if lang in dict: return dict[lang]
    else: return dict['en']
# i16n
i16n_summarylist = {
    'en': u"Robot: Updating list of wikis",
    'pl': u"Robot aktualizuje listę wiki",
}
i16n_summaryarticle = {
    'en': u"Robot: Updating wiki stats",
    'pl': u"Robot aktualizuje statystyki wiki",
}
i16n_couldntparseaddress = {
    'en': u"couldn't parse wiki address - fix the formatting, preferably to interwiki",
    'pl': u"nie udało się odczytać adresu - popraw format, najlepiej na interwiki",
}
 
if __name__ == "__main__":
    main()
    pywikibot.stopme()
    sys.exit(0)

Ad blocker interference detected!


Wikia is a free-to-use site that makes money from advertising. We have a modified experience for viewers using ad blockers

Wikia is not accessible if you’ve made further modifications. Remove the custom ad blocker rule(s) and the page will load as expected.