Misplaced Pages

User:Betacommand/Sandbox

Article snapshot taken from Wikipedia with creative commons attribution-sharealike license. Give it a read and then ask your questions in the chat. We can research this topic together.
< User:Betacommand

This is an old revision of this page, as edited by Betacommand (talk | contribs) at 17:49, 23 March 2007. The present address (URL) is a permanent link to this revision, which may differ significantly from the current revision.

Revision as of 17:49, 23 March 2007 by Betacommand (talk | contribs)(diff) ← Previous revision | Latest revision (diff) | Newer revision → (diff)

#!/usr/bin/python # coded by Betacommand # -*- coding: utf-8 -*- import sys, wikipedia import wikipedia, pagegenerators, catlib import editarticle import re def main(): acceptall = False #replace.py varable that I havent fixed yet website= '' site = wikipedia.getSite() ##code choped from speedy_delete.py if website == '': website = wikipedia.input(u'Please enter the website to be removed:\n') website = website ##end choped code website print website address = '*.' + website text = '' websitepage = wikipedia.Page(site,website) wikipedia.setAction('removing inappropriate link per ], ], ], and ] ') pages = sorted(set()) for page in pages: if page.namespace()==0: pg = page.get() if re.search("={1,}\s?xternal inks?\s?={1,}", pg): array = re.split("(={1,}\s?External inks?\s?={1,})", pg) lastpart = array regex= '\*??\|)\n' lastpart = re.sub(regex,'',lastpart) if re.search("(www|http|ftp)", lastpart): pass else: regex= '={1,}\s?xternal inks?\s?={1,}\n' lastpart = re.sub(regex,'',lastpart) new_text = array + array + lastpart if (len(new_text)-len(pg))!= 0: ## note the next part was copied from replace.py ## ## wikipedia.output(u'\n>>> %s <<<' % page.title()) wikipedia.showDiff(pg, new_text) if not acceptall: choice = wikipedia.inputChoice(u'Do you want to delete the link?', , , 'S') if choice in : acceptall = True if acceptall or choice in : try: page.put(new_text) except wikipedia.EditConflict: wikipedia.output(u'Skipping %s because of edit conflict' % (page.title())) except wikipedia.SpamfilterError, url: wikipedia.output(u'Cannot change %s because of blacklist entry %s' % (page.title(),url)) ## my code again else: pass else: print "Page doesn't contain \"External links\" section" if __name__ == '__main__': try: main() finally: wikipedia.stopme()