FANDOM


# -*- coding: utf-8 -*-
"""
List Wikia New Wikis
 
This script updates a list with new wikis.
 
==  List of accepted arguments  =====
 
-languages:               Languages that are to be listed - comma separated. Defaults to wiki language.
 
-page:                    Page that contains the list to be updated.
 
-count:                   How many new wikis to get (per language)
 
-summary:                 Summary of the edit.
 
-articles:num             Add only wikis with article count equal and above. Default: 20
 
 
 
-old -noold               Process old wikis like new ones?
-group -nogroup           Group final list by first letter of the url
-save -nosave             Save the page
 
-nodiff                   Don't show the final diff since it can take a while
 
"""
#
# Distributed under the terms of the CC-BY-NC 3.0 license.
# License summary: http://creativecommons.org/licenses/by-nc/3.0/
# Full legal code: http://creativecommons.org/licenses/by-nc/3.0/legalcode
#
# Author: Karol "Nanaki" Dylewski [http://spolecznosc.wikia.com/wiki/Użytkownik:Nanaki]
# 
# Latest version cannot be found anywhere right now :)
#
 
import wikipedia as pywikibot
import sys, re
import urllib2, json
 
# DEFAULT SETTING
 
 
list_format = u'* %(url)s — %(comment)s'
list_format_replace = (ur'^(\*.*?)\s*[-–—]\s*$', ur'\1')
 
list_start = u'<!-- List start -->\n'
list_end = u'\n<!-- List end -->'
 
list_format_new = u'* %(url)s'
list_format_rx = u'\*\s*.*?(https?:\/\/(?P<url>[\S^\/]+(\.[a-zA-Z]+))\/?).*?\s*([-–—]\s*(?P<comment>.+))?$'
 
number_group_label = u'0–9'
 
# Set this and it'll work on that wiki regardles of what wiki is set as default in user-config.py
# You can still override this with -family and -lang switches
 
url = 'http://community.wikia.com/wiki/Special:Newwikis?useskin=monobook&limit=%(limit)d&language=%(lang)s&offset=%(offset)s'
 
# And rest of the config is stored on the wiki
# Mediawiki:Ranking-bot-pages
 
# Default config structure - don't edit settings here 
 
def initialize():
    global langs, pagename, articles, users, wikicount, summary, group, savepage, processold, showfinaldiff
 
    langs = None
    pagename = None
    articles = None
    users = None
    wikicount = None
    summary = None
    group = None
    savepage = None
    processold = None
    showfinaldiff = True
 
    for arg in pywikibot.handleArgs():
        if arg.startswith(u'-languages:'): langs = arg[11:]
        elif arg.startswith(u'-page:'): pagename = arg[6:]
        elif arg.startswith(u'-articles:'): articles = int(arg[10:])
        elif arg.startswith(u'-users:'): users = int(arg[7:])
        elif arg.startswith(u'-count:'): wikicount = int(arg[7:])
        elif arg.startswith(u'-summary:'): summary = arg[9:]
        elif arg == u'-group': group = True
        elif arg == u'-nogroup': group = False
        elif arg == u'-save': savepage = True
        elif arg == u'-nosave': savepage = False
        elif arg == u'-old': processold = True
        elif arg == u'-noold': processold = False
        elif arg == u'-nodiff': showfinaldiff = False
 
    global json_cache
    json_cache = {
        'info': {},
        'stats': {},
    }
    global wikis
    wikis = {}
 
    global removed_count, added_count, updated_count
    removed_count = 0
    added_count = 0
    updated_count = 0
 
    global site
    site = pywikibot.getSite();
 
def main():
    initialize()
    global langs, summary, list_start, list_end, site, savepage, pagename
 
    if pagename == None:
        pagename = pywikibot.input('Page with the list to work with:')
    page = pywikibot.Page(site, pagename)
 
    old_text = page.get();
 
    text = get_between(old_text, [list_start, list_end])
 
    if langs: langs = langs.split(',')
    else: langs = [site.lang]
 
    processOldWikis(text)
 
    global wikicount
    count = input_number('Work on how many new wikis?%s' % (' (per language - %d langs defined)'%len(langs),'')[len(langs)==1], override=wikicount)
 
    list = []
    for ln in langs: list.extend(getNewWikis(count, ln))
 
    processNewWikis(list)
 
    global wikis
    text = compileList(wikis)
    new_text = put_between(old_text, [list_start, list_end], text)
    global removed_count, added_count, updated_count
 
    global showfinaldiff
    if showfinaldiff:
        pywikibot.output('-- DIFF ----------------------------------------------------')
        pywikibot.showDiff(old_text, new_text)
    pywikibot.output('------------------------------------------------------------')
    pywikibot.output('\03{lightaqua}%4d\03{default} wiki%s updated' % (updated_count, ('s','')[updated_count==1]))
    pywikibot.output('\03{lightgreen}%4d\03{default} wiki%s added' % (added_count, ('s','')[added_count==1]))
    pywikibot.output('\03{lightred}%4d\03{default} wiki%s removed' % (removed_count, ('s','')[removed_count==1]))
    pywikibot.output('------------------------------------------------------------')
 
    summary = summ({'updated': updated_count, 'added': added_count, 'removed': removed_count})
 
    if input_bool('Save the page?   \03{lightaqua}Summary:\03{default} %s\n' % summary, override = savepage):
        page.put(new_text, comment=summary)
 
def compileList(wikis):
    pywikibot.output('\n\03{lightpurple}====== \03{lightgreen}COMPILING LIST \03{lightpurple}======\03{default}')
    global list_format, group, number_group_label, list_format_replace
    lines = []
    list = wikis.values()
    qs(list, 'sort')
 
    group = input_bool(u'Group by first letter of url?\n', override=group)
    last_group = None
    count = len(list)
    format = u'[%%0%dd/%%0%dd]' % (len(str(count)), len(str(count)))
    no = 0
    global added_count
    for wiki in list:
        if group:
            try:
                letter = wiki['sort'][0].upper()
                int(letter)
                letter = number_group_label
            except ValueError:
                pass
            if last_group != letter:
                lines.extend([u'', u'== %s ==' % letter])
                last_group = letter
        lines.append(pywikibot.replaceExcept(list_format % wiki, list_format_replace[0], list_format_replace[1], []))
 
    return (u'\n'.join(lines)).strip()
def processOldWikis(text):
    global wikis, list_format_rx, langs
    wikis = {}
    pywikibot.output(u'\n\03{lightpurple}====== \03{lightgreen}PROCESSING CURRENT LIST \03{lightpurple}======\03{default}')
 
    re_format = re.compile(list_format_rx)
    re_sort = re.compile('^(('+('|'.join(map(re.escape,langs)))+')\.)?(.*)$')
 
    lines = text.split('\n')
    all_choice = None
    for line in lines:
        if not line.startswith('*'): continue
        match = re_format.search(line)
 
        if not match:
            if all_choice == None:
                choice = pywikibot.inputChoice("\nCouldn't parse this line:\n\03{lightyellow}%s\03{default}\nWant to do it manually or remove from list?" % line, ['Add','Remove','Add all','Remove all'], ['A','R','d','e'], 'R')
                if choice == 'd': all_choice = 'a'
                if choice == 'e': all_choice = 'r'
 
            if all_choice != None: choice = all_choice
            if choice == 'r':
                continue
            addr = pywikibot.input('Address (without http:// - just "xxxxx.wikia.com"):')
            comment = pywikibot.input('                                           Comment:')
        else:
            addr = match.group('url')
            comment = match.group('comment')
        # Sorting key
        sort = re_sort.search(addr).group(3)
 
        if not comment: comment = '';
        wikis[addr] = {
            'sort': sort,
            'url': 'http://%s/'%addr,
            'comment': u'%s'%comment,
        }
 
    pywikibot.output('Found \03{lightaqua}%d\03{default} wikis currently on the list' % len(wikis))
 
    global processold
    if input_bool("Do you want to process these wikis like new ones? (check article count, update url if changed - will take some time)", default='n', override=processold):
        global updated_count, removed_count
 
        arts = input_number("Minimum article count?     ")
        usrs = input_number("Minimum active user count? ")
 
        newwikis = {}
        all_choice_art = None
        all_choice_usr = None
 
        count = len(wikis)
        format = '[%%0%dd/%%0%dd]' % (len(str(count)), len(str(count)))
        no = 0
        for k, v in wikis.iteritems():
            no = no + 1
            pywikibot.output('\n%s   \03{lightyellow}%s\03{default}' % (format%(no,count), k))
            loop = 0
            cont = True
            while True:
                loop = loop+1
                try: statinfo = get_wiki_statinfo(k)
                except (RequestError, JSONError), e:
                    choice = pywikibot.inputChoice("\nCouldn't connect to \03{lightaqua}%s\03{default}. Reason: \03{lightyellow}%s\03{default}\n" % (k, e), ['Try again (another 5 times)', 'Leave as is', 'Remove from list', 'Fix the address'], ['T','L','R', 'F'], ('l','t')[loop==1])
                    if choice == 't': continue
                    if choice == 'f':
                        k = pywikibot.input('Input new address (just xxxxx.wikia.com)  ')
                        continue
                    if choice == 'l': newwikis[k] = v
                    break
                except InvalidWiki, e:
                    pywikibot.output('\n\03{lightaqua}%s\03{default}: %s' % (k, ('Wiki doesn\'t exist','Wiki closed')[e.closed]))
                    break
                cont = False
                break
            if cont: continue
 
            url = statinfo['info']['server']
            if not url.endswith('/'): url = url+'/'
            name = statinfo['info']['sitename']
            artcount = int(statinfo['stats']['articles'])
            usrcount = int(statinfo['stats']['activeusers'])
 
            artcount_color = '%d' % artcount
            usrcount_color = '%d' % usrcount
 
            if artcount < arts: artcount_color = '\03{lightred}%d\03{default}' % artcount
            if usrcount < usrs: usrcount_color = '\03{lightred}%d\03{default}' % usrcount
 
            addr = re.compile('(https?:\/\/)(?P<url>[\S^\/]+(\.[a-zA-Z]+))\/?').search(url).group('url')
 
            pywikibot.output('\n \03{white}URL      :\03{default} %s\n \03{white}Name     :\03{default} %s\n \03{white}Articles :\03{default} %s\n \03{white}Users    :\03{default} %s' % (url, name, artcount_color, usrcount_color))
 
            if artcount < arts:
                if all_choice_art == None:
                    choice = pywikibot.inputChoice("\nWiki's article count (%d) is below threshold (%d). Remove it from list?" % (artcount, arts), ['Yes','No','Yes to all','No to all'], ['Y','N','a','o'], 'Y')
                    if choice == 'a': all_choice_art = 'y'
                    if choice == 'o': all_choice_art = 'n'
 
                if all_choice_art != None: choice = all_choice_art
                if choice == 'y':
                    removed_count = removed_count + 1
                    continue
 
            if usrcount < usrs:
                if all_choice_usr == None:
                    choice = pywikibot.inputChoice("\nWiki's active user count (%d) is below threshold (%d). Remove it from list?" % (usrcount, usrs), ['Yes','No','Yes to all','No to all'], ['Y','N','a','o'], 'Y')
                    if choice == 'a': all_choice_usr = 'y'
                    if choice == 'o': all_choice_usr = 'n'
 
                if all_choice_usr != None: choice = all_choice_usr
                if choice == 'y':
                    removed_count = removed_count + 1
                    continue
 
            # Sorting key
            sort = re_sort.search(addr).group(3)
 
            if addr != k: updated_count = updated_count + 1
 
            if not v['comment']: v['comment'] = '';
            newwikis[addr] = {
                'sort': sort,
                'url': 'http://%s/'%addr,
                'comment': '%s'%v['comment'],
            }
        wikis = newwikis
def processNewWikis(list):
    global wikis
    pywikibot.output('\n\03{lightpurple}====== \03{lightgreen}PROCESSING NEW WIKI LIST \03{lightpurple}======\03{default}')
    pywikibot.output('Working on \03{lightaqua}%d\03{default} wikis' % len(list))
 
    pywikibot.output('\nAPI seems to return "-1" active users for most recently created wikis (cache I guess). As they most likely will have around 1 article there\'s no need to worry. But if you really want to add them just type -1 as minimum active user count.')
 
    global articles, users
    arts  = input_number("Minimum article count?     ", override=articles)
    usrs = input_number("Minimum active user count? ", override=users)
 
    re_sort = re.compile('^(('+('|'.join(map(re.escape,langs)))+')\.)?(.*)$')
    count = len(list)
    format = '[%%0%dd/%%0%dd]' % (len(str(count)), len(str(count)))
    no = 0
    global added_count
    for wiki in list:
        no = no + 1
        pywikibot.output('\n%s   \03{lightyellow}%s\03{default}' % (format%(no,count), wiki))
        try:
            statinfo = get_wiki_statinfo(wiki)
        except (RequestError, JSONError), e:
            pywikibot.output('\03{lightred}SKIPPING\03{default}     Error: %s' % e.value)
            continue
        except InvalidWiki, e:
            pywikibot.output('\03{lightred}SKIPPING\03{default}     %s' % ('Wiki doesn\'t exist','Wiki closed')[e.closed])
            continue
 
        url = statinfo['info']['server']
        if not url.endswith('/'): url = url+'/'
        name = statinfo['info']['sitename']
        artcount = int(statinfo['stats']['articles'])
        usrcount = int(statinfo['stats']['activeusers'])
 
        addr = re.compile('(https?:\/\/)(?P<url>[\S^\/]+(\.[a-zA-Z]+))\/?').search(url).group('url')
 
        if addr in wikis:
            pywikibot.output('\03{lightred}SKIPPING\03{default}     Wiki already on the list')
            continue
 
        artcount_color = '%d' % artcount
        usrcount_color = '%d' % usrcount
 
        if artcount < arts: artcount_color = '\03{lightred}%d\03{default}' % artcount
        if usrcount < usrs: usrcount_color = '\03{lightred}%d\03{default}' % usrcount
 
        pywikibot.output('   \03{white}URL      :\03{default} %s\n   \03{white}Name     :\03{default} %s\n   \03{white}Articles :\03{default} %s\n   \03{white}Users    :\03{default} %s' % (url, name, artcount_color, usrcount_color))
 
        if artcount < arts:
            pywikibot.output('\03{lightred}SKIPPING\03{default}     Not enough articles')
            continue
 
        if usrcount < usrs:
            pywikibot.output('\03{lightred}SKIPPING\03{default}     Not enough active users')
            continue
 
        # Sorting key
        sort = re_sort.search(addr).group(3)
 
        pywikibot.output('\03{lightgreen}ADDING WIKI TO LIST\03{default}')
        added_count = added_count + 1
        wikis[addr] = {
            'sort': sort,
            'url': 'http://%s/'%addr,
            'comment': '',
        }
 
 
def getNewWikis(count, lang):
    count = int(count)
 
    list = []
    offset = False
    while count > 0:
        limit = min(count, 500)
        count = count - limit
 
        if offset:
            obj = {'lang':lang, 'limit':limit, 'offset': offset}
            pywikibot.output('Requesting %(limit)s new wikis in "%(lang)s" language. Offset: %(offset)s' % obj);
            text = request(url % obj)
        else:
            obj = {'lang':lang, 'limit':limit, 'offset': ''}
            pywikibot.output('Requesting %(limit)s new wikis in "%(lang)s" language.' % obj);
            text = request(url % obj)
 
        text = text[re.compile(u'<div[^>]+id\s*=\s*(?P<quote>[\'"])\s*mw-content-text\s*(?P=quote)[^>]*>', re.I).search(text).end():re.compile('<div[^>]+id\s*=\s*(?P<quote>[\'"])\s*catlinks\s*(?P=quote)[^>]*>', re.I).search(text).start()]
 
        match = re.compile(u'<a[^>]+href\s*=\s*(?P<quote>[\'"])\s*\S+offset=([0-9]+)\S+\s*(?P=quote)[^>]+rel\s*=\s*(?P<quote2>[\'"])\s*next\s*(?P=quote2)[^>]*>', re.I).search(text)
        if match: offset = int(match.group(2))
 
        text = text[re.compile(u'<ul[^>]*>\s*<li[^>]*>', re.I).search(text).end():re.compile('</li[^>]*>\s*</ul[^>]*>', re.I).search(text).start()]
 
        part = re.split(u'</li[^>]*>\s*<li[^>]*>', text, 0, re.I)
        for rec in part:
            wikiurl = re.compile(u'<a[^>]+href\s*=\s*(?P<quote>[\'"])\s*(\S+)\s*(?P=quote)[^>]*>', re.I).search(rec).group(2)
            wikiurl = re.compile(u'https?\:\/\/([^\/]+)(\/.*)?', re.I).search(wikiurl).group(1)
 
            match = re.compile(u'qatestwiki[0-9]+\.wikia\.com').search(wikiurl)
 
            if match: continue
            list.append(wikiurl)
    return list
 
def request(url, tries=5):
    from socket import error as socket_error
    response = ''
    x = 0
    while x<tries:
        try:
            if response: break
            response = urllib2.urlopen(url)
        except urllib2.HTTPError: x += 1
        except socket_error: x += 1
        except urllib2.URLError, e: raise RequestError(u'URLError: %s' % unicode(e.reason))
 
    if not response: raise RequestError(u'No response')
 
    response = response.read()
    if response == '': raise RequestError('Empty response')
    return response
def json_from_url(url, tries=5):
 
    response = request(url, tries=tries)
 
    if response.find('page-Special_CloseWiki') >= 0: raise InvalidWiki(url,True)
    if response.find('page-Community_Central_Not_a_valid_Wikia') >= 0: raise InvalidWiki(url)
 
    try:
        obj = json.loads(response)
    except ValueError:
        raise JSONError('No JSON object could be decoded')
 
    return obj
def get_wiki_info(address):
    global json_cache, args
    if address in json_cache['info']: return json_cache['info'][address]
    if args['extended']: pywikibot.output('JSON: Fetching info about [%s]' % address)
    url = 'http://%s/api.php?action=query&meta=siteinfo&siprop=general&format=json' % address
    json_cache['info'][address] = json_from_url(url)['query']['general']
    match = re.compile('http:\/\/(www\.)?(.*?)\.wikia\.com', re.I).search(json_cache['info'][address]['server'])
    return json_cache['info'][address]
def get_wiki_stats(address):
    global json_cache, args
    if address in json_cache['stats']: return json_cache['stats'][address]
    if args['extended']: pywikibot.output(u'JSON: Fetching statistics for [%s]' % address)
    url = 'http://%s/api.php?action=query&meta=siteinfo&siprop=statistics&format=json' % address
    json_cache['stats'][address] = json_from_url(url)['query']['statistics']
    return json_cache['stats'][address]
def get_wiki_statinfo(address):
    global json_cache, args
    if address in json_cache['stats']: stats = json_cache['stats'][address]
    else: stats = None
    if address in json_cache['info']: info = json_cache['info'][address]
    else: info = None
    if stats == None and info == None:
        #pywikibot.output(u'JSON: Fetching info and statistics for [%s]' % address)
        url = 'http://%s/api.php?action=query&meta=siteinfo&siprop=general|statistics&format=json' % address
        data = json_from_url(url)['query']
        json_cache['stats'][address] = data['statistics']
        json_cache['info'][address] = data['general']
    elif stats == None: stats = get_wiki_stats(address)
    elif info == None: info = get_wiki_info(address)
 
    match = re.compile('http:\/\/(www\.)?(.*?)\.wikia\.com', re.I).search(json_cache['info'][address]['server'])
    return {'info':json_cache['info'][address],'stats':json_cache['stats'][address]}
def put_between(text, tag, what):
    start = text.find(tag[0])
    end = text.find(tag[1])
    if start != -1 and end != -1:
        start += len(tag[0])
        text = text[:start] + what + text[end:]
    else:
        raise TagsNotFound(tag, [start != -1,end != -1])
    return text
def get_between(text, tag):
    start = text.find(tag[0])
    end = text.find(tag[1])
    if start != -1 and end != -1:
        start += len(tag[0])
        return text[start:end]
    else:
        raise TagsNotFound(tag, [start != -1,end != -1])
    return text
 
def input_bool(text, answers = ['Yes', 'No'], hotkeys = ['Y','N'], isTrue = 'y', default = 'y', override = None):
    if override != None:
        pywikibot.output(u'%s \03{lightgreen}%s\03{default}' % (text, bool(override)))
        return bool(override)
    choice = pywikibot.inputChoice(text, answers, hotkeys, default)
    if isTrue.lower() == choice: return True
    return False
def input_number(text, override = None):
    if override != None:
        pywikibot.output(u'%s \03{lightgreen}%s\03{default}' % (text, override))
        return override
    while True:
        try:
            value = int(pywikibot.input(text))
        except ValueError:
            pywikibot.output(u'Numbers, please!')
            continue
        break
    return value
# Exceptions
class TagsNotFound(Exception):
    def __init__(self, tags, flags):
        self.tags = tags
        self.flags = flags
    def __str__(self):
        if not self.flags[0] and not self.flags[1]:
            return u'Tags not found: "%s" <-> "%s". Couldn\'t find both tags' % (re.escape(self.tags[0]), re.escape(self.tags[1]))
        if not self.flags[0]:
            return u'Tags not found: "%s" <-> "%s". Couldn\'t find starting tag' % (re.escape(self.tags[0]), re.escape(self.tags[1]))
        if not self.flags[1]:
            return u'Tags not found: "%s" <-> "%s". Couldn\'t find ending tag' % (re.escape(self.tags[0]), re.escape(self.tags[1]))
        return 'How the hell did you get here oO'
class InvalidWiki(Exception):
    def __init__(self,url,closed=False):
        self.url = url
        self.closed = closed
    def __str__(self):
        if self.closed:
            return u'Wiki has been closed: %s' % self.url
        else:
            return u'Wiki not found: %s' % self.url
class JSONError(Exception):
    def __init__(self, value):
        self.value = value
    def __str__(self):
        return self.value
class RequestError(Exception):
    def __init__(self, value):
        self.value = value
    def __str__(self):
        return self.value
 
# Quick sort
def qs(l, x):
    qsr (l, 0, len (l) - 1, x)
    return l
def qsr(l , s, e, x):
    if e > s :
        p = qsp (l, s, e, x)
        qsr (l, s, p - 1, x)
        qsr (l, p + 1, e, x)
def qsp( l, s, e, x):
    a = ( s + e ) / 2
    if x is not None:
        if l[s][x] > l[a][x] :
            l[s], l [a] = l [a], l[s]
        if l[s][x] > l [e][x] :
            l[s], l[e] = l[e], l[s]
        if l[a][x] > l[e][x] :
            l[a], l[e] = l[e], l[a]   
        l [a], l [s] = l[s], l[a]
    else:
        if l[s] > l[a] :
            l[s], l [a] = l [a], l[s]
        if l[s] > l [e] :
            l[s], l[e] = l[e], l[s]
        if l[a] > l[e] :
            l[a], l[e] = l[e], l[a]   
        l [a], l [s] = l[s], l[a]
 
    p = s
    i = s + 1
    j = e
 
    if x is not None:
        while ( True ):
            while ( i <= e and l[i][x] <= l[p][x] ):
                i += 1
            while ( j >= s and l[j][x] > l[p][x] ):
                j -= 1
            if i >= j :
                break
            else:
                l[i], l[j] = l[j], l[i]  
    else:
        while ( True ):
            while ( i <= e and l[i] <= l[p] ):
                i += 1
            while ( j >= s and l[j] > l[p] ):
                j -= 1
            if i >= j :
                break
            else:
                l[i], l[j] = l[j], l[i]  
 
    l[j], l[p] = l[p], l[j]
    return j
def summ(stats):
    global i16n_summary, i16n_stats, site
    lang = site.lang
 
    more = []
    for k, v in stats.iteritems():
        if v == 0: continue;
        if lang in i16n_stats: more.append(i16n_stats[lang][k] % v)
        else: more.append(i16n_stats['en'][k] % v)
 
    if len(more): more = '('+(', '.join(more))+')'
    else: more = ''
 
    if lang in i16n_summary: return i16n_summary[lang] % {'stats':more}
    else: return i16n_summary['en'] % {'stats':more}
i16n_summary = {
    'en': u"Robot: Updating wiki list %(stats)s",
    'pl': u"Robot aktualizuje listę wiki %(stats)s",
    'ru': u"Обновление списка вики с помощью бота %(stats)s",
    'uk': u"Оновлення переліку вікі за допомогою бота %(stats)s",
}
i16n_stats = {
    'en': {
       'updated': u"updated: %d",
       'added': u"added: %d",
       'removed': u"removed: %d",
    },
    'pl': {
       'updated': u"poprawione: %d",
       'added': u"dodane: %d",
       'removed': u"usunięte: %d",
    },
    'ru': {
       'updated': u"обновлено: %d",
       'added': u"добавлено: %d",
       'removed': u"удалено: %d",
    },
    'uk': {
       'updated': u"оновлено: %d",
       'added': u"додано: %d",
       'removed': u"усунено: %d",
    },
}
 
if __name__ == "__main__":
    main()
    pywikibot.stopme()
    sys.exit(0)

Ad blocker interference detected!


Wikia is a free-to-use site that makes money from advertising. We have a modified experience for viewers using ad blockers

Wikia is not accessible if you’ve made further modifications. Remove the custom ad blocker rule(s) and the page will load as expected.