User:R. Hillgentleman/scripta
Appearance
testReadWriteBeta.py
[edit]#to load the text from wikiversity:sandbox and dump it on wikiversity talk:sandbox with interest
import wikipedia
# Define the main function
def main():
site = wikipedia.getSite()
pagename = 'Wikiversity:sandbox'
page = wikipedia.Page(site, pagename)
wikipedia.output(u"Loading %s..." % pagename) # Please, see the "u" before the text
try:
text = page.get(force = False, get_redirect=True, throttle = True, sysop = False,
nofollow_redirects=False, change_edit_time = True) # text = page.get() <-- is the same
except wikipedia.NoPage: # First except, prevent empty pages
text = ''
except wikipedia.IsRedirectPage: # second except, prevent redirect
wikipedia.output(u'%s is a redirect!' % pagename)
exit()# wikipedia.stopme() is in the finally, we don't need to use it twice, exit() will only close the script
except wikipedia.Error: # third exception, take the problem and print
wikipedia.output(u"Some Error, skipping..")
exit()
newtext = text + '\nHello, World!'
####--------------------------------------------------------------------------
pagename1 = 'wikiversity talk:sandbox'
page1 = wikipedia.Page(site, pagename1)
try:
text1 = page1.get(force = False, get_redirect=True, throttle = True, sysop = False,
nofollow_redirects=False, change_edit_time = True) # text = page1.get() <-- is the same
except wikipedia.NoPage: # First except, prevent empty pages
text1 = ''
except wikipedia.IsRedirectPage: # second except, prevent redirect
wikipedia.output(u'%s is a redirect!' % pagename1)
exit()# wikipedia.stopme() is in the finally, we don't need to use it twice, exit() will only close the script
except wikipedia.Error: # third exception, take the problem and print
wikipedia.output(u"Some Error, skipping..")
exit()
wikipedia.output(u"Loading %s..." % pagename1) # Please, see the "u" before the text
text1 = text1 + '\nWorld, hello!\n\n\n\n'
newtext = text1 + newtext
page1.put(newtext, comment='Mechanical Test: [[wikiversity:sandbox]] -> [[wikiversity talk:sandbox]]', watchArticle = None, minorEdit = True) # page.put(newtext, 'Bot: Test') <-- is the same
if __name__ == '__main__':
try:
main()
finally:
wikipedia.stopme()
testGetReferencesBeta.py
[edit]import wikipedia
page = wikipedia.Page(wikipedia.getSite(), 'Wikiversity:Research guidelines')
for pagetoparse in page.getReferences(follow_redirects=True, withTemplateInclusion=True, onlyTemplateInclusion=False, redirectsOnly=False):
wikipedia.output(pagetoparse.title())
wikipedia.stopme()
testGetReferencesWriteBeta.py
[edit]Run time: about half a minute to a minute
import wikipedia
# Define the function entry
def write_entry(k):
site = wikipedia.getSite()
pagename = 'wikiversity:sandbox'
page = wikipedia.Page(site, pagename)
wikipedia.output(u"Loading %s..." % pagename) # Please, see the "u" before the text
try:
text = page.get(force = False, get_redirect=False, throttle = True, sysop = False,
nofollow_redirects=False, change_edit_time = True) # text = page.get() <-- is the same
except wikipedia.NoPage: # First except, prevent empty pages
text = ''
except wikipedia.IsRedirectPage: # second except, prevent redirect
wikipedia.output(u'%s is a redirect!' % pagename)
exit()# wikipedia.stopme() is in the finally, we don't need to use it twice, exit() will only close the script
except wikipedia.Error: # third exception, take the problem and print
wikipedia.output(u"Some Error, skipping..")
exit()
newtext = text + '\n' + k
page.put(newtext, comment='Mechanical test: get references to [[wikiversity:Research guidelines]] and dump them on [[wikiversity:sandbox]]', watchArticle = None, minorEdit = True) # page.put(newtext, 'Bot: Test') <-- is the same
page = wikipedia.Page(wikipedia.getSite(), 'Wikiversity:Research guidelines')
for pagetoparse in page.getReferences(follow_redirects=True, withTemplateInclusion=True, onlyTemplateInclusion=False, redirectsOnly=False):
wikipedia.output(pagetoparse.title())
j=''
for i in page.getReferences():
j = j+ '\n\n' + i.title()
write_entry(j)
wikipedia.stopme()
pywikiBoilerplate.py
[edit]import wikipedia
#set up
site = wikipedia.getSite()
page = wikipedia.Page(site, u"pageName")
#to get a page:
text = page.get(get_redirect = True)
#to update a page:
page.put(u"newText", u"Edit comment")
#CategoryPageGenerator
site = wikipedia.getSite()
cat = catlib.Category(site,'Category:Living people')
gen = pagegenerators.CategorizedPageGenerator(cat)
for page in gen:
#Do something with the page object, for example:
text = page.get()
getCategoryWriteBeta.py
[edit]to get the pages in category:ZH and dump the pagenames in wikiversity:sandbox.
import wikipedia
import pagegenerators
import catlib
site = wikipedia.getSite()
cat = catlib.Category(site,'Category:ZH')
gen = pagegenerators.CategorizedPageGenerator(cat)
list=''
for page in gen:
list =list+ '\n' + page.title()
#print the list
try:
wikipedia.output(list)
except wikipedia.Error:
wikipedia.output(u"Some error, skipping..")
#write the list at the end of [[wikiversity:sandbox]]
sandbox = wikipedia.Page(site, u"wikiversity:sandbox")
sandboxtext = sandbox.get(get_redirect = True)
sandboxtext = sandboxtext + '\n' + list
sandbox.put(sandboxtext, comment='Mechanical test: get pages in [[Category:ZH]] and dump them on [[wikiversity:sandbox]]', watchArticle = None, minorEdit = True)
n
[edit]