Botnettoyage

De Wikipast
Aller à la navigation Aller à la recherche

Code

# -*- coding: utf-8 -*-

import urllib2
import requests
from bs4 import BeautifulSoup

user='XXXX'
passw='XXXX'
baseurl='http://wikipast.epfl.ch/wikipast/'

# Login request
payload={'action':'query','format':'json','utf8':'','meta':'tokens','type':'login'}
r1=requests.post(baseurl + 'api.php', data=payload)

#login confirm
login_token=r1.json()['query']['tokens']['logintoken']
payload={'action':'login','format':'json','utf8':'','lgname':user,'lgpassword':passw,'lgtoken':login_token}
r2=requests.post(baseurl + 'api.php', data=payload, cookies=r1.cookies)

#get edit token2
params3='?format=json&action=query&meta=tokens&continue='
r3=requests.get(baseurl + 'api.php' + params3, cookies=r2.cookies)
edit_token=r3.json()['query']['tokens']['csrftoken']

edit_cookie=r2.cookies.copy()
edit_cookie.update(r3.cookies)

protected_logins=[]
nom_fichier='protected_logins.txt'
fichier=open(nom_fichier,'r')
text=fichier.read().decode('utf-8')
fichier.close()
text=text.split('\n')
protected_logins=text[:(len(text)-1)]
protected_logins_found=[]

baseurl='http://wikipast.epfl.ch/wikipast/'
result=requests.post(baseurl+'api.php?action=query&list=allusers&aulimit=500&format=xml')
soup=BeautifulSoup(result.text,'lxml')
all_logins_suppr=[]
for primitive in soup.findAll("u"):
    if primitive["name"] in protected_logins:
        protected_logins_found.append(primitive["name"])
    else:
        all_logins_suppr.append(primitive["name"])
cont=soup.find("continue")
while cont!=None:
    last=cont["aufrom"]
    result=requests.post(baseurl+'api.php?action=query&list=allusers&aulimit=500&format=xml&aufrom='+last)
    soup=BeautifulSoup(result.text,'lxml')
    for primitive in soup.findAll("u"):
        if primitive["name"] in protected_logins:
            protected_logins_found.append(primitive["name"])
        else:
            all_logins_suppr.append(primitive["name"])
    cont=soup.find("continue")

print('len(protected_logins)='+str(len(protected_logins)))
print('len(protected_logins_found)='+str(len(protected_logins_found)))
if len(protected_logins)==len(protected_logins_found):
    print('ok')
    liste_pages_suppr=[]
    for user in all_logins_suppr:
        result=requests.post(baseurl+'api.php?action=query&list=usercontribs&ucuser='+user+'&format=xml')
        soup=BeautifulSoup(result.content,'lxml')
        for primitive in soup.usercontribs.findAll('item'):
            if len(primitive['comment'])>9:
                if primitive['comment'][0:10]==u'Page créée':
                    liste_pages_suppr.append(primitive['title'])
                    print(primitive['title'])

    for page_title in liste_pages_suppr:
        payload={'action':'delete','assert':'user','title':page_title,'reason':'Spam detected and removed','token':edit_token}
        r4=requests.post(baseurl+'api.php',data=payload,cookies=edit_cookie)
        print(r4.text)
    
else:
    print('error')