aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPo-Chuan Hsieh <sunpoet@FreeBSD.org>2022-03-25 13:35:28 +0000
committerPo-Chuan Hsieh <sunpoet@FreeBSD.org>2022-03-25 13:38:26 +0000
commit2111cfd73ef2ce166a84b465f32822b88c52a362 (patch)
tree884b3e688d9b61e0d8edc0d745a6264643735793
parent06e817c53f61d9d6921a6b0f48cc74dc45134a62 (diff)
downloadports-2111cfd73ef2ce166a84b465f32822b88c52a362.tar.gz
ports-2111cfd73ef2ce166a84b465f32822b88c52a362.zip
www/py-wikitools: Fix build with setuptools 58.0.0+
With hat: python
-rw-r--r--www/py-wikitools/files/patch-2to3235
1 files changed, 235 insertions, 0 deletions
diff --git a/www/py-wikitools/files/patch-2to3 b/www/py-wikitools/files/patch-2to3
new file mode 100644
index 000000000000..9985b3d3e565
--- /dev/null
+++ b/www/py-wikitools/files/patch-2to3
@@ -0,0 +1,235 @@
+--- wikitools/api.py.orig 2010-02-20 21:21:28 UTC
++++ wikitools/api.py
+@@ -15,11 +15,11 @@
+ # You should have received a copy of the GNU General Public License
+ # along with wikitools. If not, see <http://www.gnu.org/licenses/>.
+
+-import urllib2
++import urllib.request, urllib.error, urllib.parse
+ import re
+ import time
+ import sys
+-from urllib import quote_plus, _is_unicode
++from urllib.parse import quote_plus
+ try:
+ from poster.encode import multipart_encode
+ canupload = True
+@@ -32,7 +32,7 @@ except:
+ import simplejson as json
+ try:
+ import gzip
+- import StringIO
++ import io
+ except:
+ gzip = False
+
+@@ -80,8 +80,8 @@ class APIRequest:
+ self.headers['Accept-Encoding'] = 'gzip'
+ self.wiki = wiki
+ self.response = False
+- self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(wiki.cookies))
+- self.request = urllib2.Request(self.wiki.apibase, self.encodeddata, self.headers)
++ self.opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(wiki.cookies))
++ self.request = urllib.request.Request(self.wiki.apibase, self.encodeddata, self.headers)
+
+ def setMultipart(self, multipart=True):
+ """Enable multipart data transfer, required for file uploads."""
+@@ -125,7 +125,7 @@ class APIRequest:
+ self.encodeddata = urlencode(self.data, 1)
+ self.headers['Content-Length'] = len(self.encodeddata)
+ self.headers['Content-Type'] = "application/x-www-form-urlencoded"
+- self.request = urllib2.Request(self.wiki.apibase, self.encodeddata, self.headers)
++ self.request = urllib.request.Request(self.wiki.apibase, self.encodeddata, self.headers)
+
+ def query(self, querycontinue=True):
+ """Actually do the query here and return usable stuff
+@@ -152,14 +152,14 @@ class APIRequest:
+ total = initialdata
+ res = initialdata
+ params = self.data
+- numkeys = len(res['query-continue'].keys())
++ numkeys = len(list(res['query-continue'].keys()))
+ while numkeys > 0:
+ key1 = ''
+ key2 = ''
+- possiblecontinues = res['query-continue'].keys()
++ possiblecontinues = list(res['query-continue'].keys())
+ if len(possiblecontinues) == 1:
+ key1 = possiblecontinues[0]
+- keylist = res['query-continue'][key1].keys()
++ keylist = list(res['query-continue'][key1].keys())
+ if len(keylist) == 1:
+ key2 = keylist[0]
+ else:
+@@ -171,7 +171,7 @@ class APIRequest:
+ key2 = keylist[0]
+ else:
+ for posskey in possiblecontinues:
+- keylist = res['query-continue'][posskey].keys()
++ keylist = list(res['query-continue'][posskey].keys())
+ for key in keylist:
+ if len(key) < 11:
+ key1 = posskey
+@@ -181,7 +181,7 @@ class APIRequest:
+ break
+ else:
+ key1 = possiblecontinues[0]
+- key2 = res['query-continue'][key1].keys()[0]
++ key2 = list(res['query-continue'][key1].keys())[0]
+ if isinstance(res['query-continue'][key1][key2], int):
+ cont = res['query-continue'][key1][key2]
+ else:
+@@ -198,7 +198,7 @@ class APIRequest:
+ for type in possiblecontinues:
+ total = resultCombine(type, total, res)
+ if 'query-continue' in res:
+- numkeys = len(res['query-continue'].keys())
++ numkeys = len(list(res['query-continue'].keys()))
+ else:
+ numkeys = 0
+ return total
+@@ -216,11 +216,11 @@ class APIRequest:
+ if gzip:
+ encoding = self.response.get('Content-encoding')
+ if encoding in ('gzip', 'x-gzip'):
+- data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data.read()))
+- except catcherror, exc:
++ data = gzip.GzipFile('', 'rb', 9, io.StringIO(data.read()))
++ except catcherror as exc:
+ errname = sys.exc_info()[0].__name__
+ errinfo = exc
+- print("%s: %s trying request again in %d seconds" % (errname, errinfo, self.sleep))
++ print(("%s: %s trying request again in %d seconds" % (errname, errinfo, self.sleep)))
+ time.sleep(self.sleep+0.5)
+ self.sleep+=5
+ return data
+@@ -234,10 +234,10 @@ class APIRequest:
+ content = None
+ if isinstance(parsed, dict):
+ content = APIResult(parsed)
+- content.response = self.response.items()
++ content.response = list(self.response.items())
+ elif isinstance(parsed, list):
+ content = APIListResult(parsed)
+- content.response = self.response.items()
++ content.response = list(self.response.items())
+ else:
+ content = parsed
+ if 'error' in content:
+@@ -246,7 +246,7 @@ class APIRequest:
+ lagtime = int(re.search("(\d+) seconds", content['error']['info']).group(1))
+ if lagtime > self.wiki.maxwaittime:
+ lagtime = self.wiki.maxwaittime
+- print("Server lag, sleeping for "+str(lagtime)+" seconds")
++ print(("Server lag, sleeping for "+str(lagtime)+" seconds"))
+ maxlag = True
+ time.sleep(int(lagtime)+0.5)
+ return False
+@@ -254,7 +254,7 @@ class APIRequest:
+ data.seek(0)
+ if "MediaWiki API is not enabled for this site. Add the following line to your LocalSettings.php<pre><b>$wgEnableAPI=true;</b></pre>" in data.read():
+ raise APIDisabled("The API is not enabled on this site")
+- print "Invalid JSON, trying request again"
++ print("Invalid JSON, trying request again")
+ # FIXME: Would be nice if this didn't just go forever if its never going to work
+ return False
+ return content
+@@ -276,7 +276,7 @@ def resultCombine(type, old, new):
+ if type in new['query']: # Basic list, easy
+ ret['query'][type].extend(new['query'][type])
+ else: # Else its some sort of prop=thing and/or a generator query
+- for key in new['query']['pages'].keys(): # Go through each page
++ for key in list(new['query']['pages'].keys()): # Go through each page
+ if not key in old['query']['pages']: # if it only exists in the new one
+ ret['query']['pages'][key] = new['query']['pages'][key] # add it to the list
+ else:
+@@ -300,7 +300,7 @@ def urlencode(query,doseq=0):
+ """
+ if hasattr(query,"items"):
+ # mapping objects
+- query = query.items()
++ query = list(query.items())
+ else:
+ # it's a bother at times that strings and string-like objects are
+ # sequences...
+@@ -315,7 +315,7 @@ def urlencode(query,doseq=0):
+ # preserved for consistency
+ except TypeError:
+ ty,va,tb = sys.exc_info()
+- raise TypeError, "not a valid non-string sequence or mapping object", tb
++ raise TypeError("not a valid non-string sequence or mapping object").with_traceback(tb)
+
+ l = []
+ if not doseq:
+--- wikitools/wiki.py.orig 2010-04-14 21:48:10 UTC
++++ wikitools/wiki.py
+@@ -15,15 +15,15 @@
+ # You should have received a copy of the GNU General Public License
+ # along with wikitools. If not, see <http://www.gnu.org/licenses/>.
+
+-import cookielib
+-import api
+-import urllib
++import http.cookiejar
++from . import api
++import urllib.request, urllib.parse, urllib.error
+ import re
+ import time
+ import os
+-from urlparse import urlparse
++from urllib.parse import urlparse
+ try:
+- import cPickle as pickle
++ import pickle as pickle
+ except:
+ import pickle
+
+@@ -112,10 +112,10 @@ class Wiki:
+ for ns in nsaliasdata:
+ self.NSaliases[ns['*']] = ns['id']
+ if not 'writeapi' in sidata:
+- print "WARNING: Write-API not enabled, you will not be able to edit"
++ print("WARNING: Write-API not enabled, you will not be able to edit")
+ version = re.search("\d\.(\d\d)", self.siteinfo['generator'])
+ if not int(version.group(1)) >= 13: # Will this even work on 13?
+- print "WARNING: Some features may not work on older versions of MediaWiki"
++ print("WARNING: Some features may not work on older versions of MediaWiki")
+ return self
+
+ def login(self, username, password=False, remember=False, force=False, verify=True, domain=None):
+@@ -145,10 +145,10 @@ class Wiki:
+ password = getpass()
+ def loginerror(info):
+ try:
+- print info['login']['result']
++ print(info['login']['result'])
+ except:
+- print info['error']['code']
+- print info['error']['info']
++ print(info['error']['code'])
++ print(info['error']['info'])
+ return False
+ data = {
+ "action" : "login",
+@@ -286,11 +286,11 @@ class Wiki:
+ class CookiesExpired(WikiError):
+ """Cookies are expired, needs to be an exception so login() will use the API instead"""
+
+-class WikiCookieJar(cookielib.FileCookieJar):
++class WikiCookieJar(http.cookiejar.FileCookieJar):
+ def save(self, site, filename=None, ignore_discard=False, ignore_expires=False):
+ if not filename:
+ filename = self.filename
+- old_umask = os.umask(0077)
++ old_umask = os.umask(0o077)
+ f = open(filename, 'w')
+ f.write('')
+ content = ''
+@@ -325,6 +325,6 @@ class WikiCookieJar(cookielib.FileCookieJar):
+ if not ignore_expires and cook.is_expired:
+ continue
+ self.set_cookie(cook)
+- exec sitedata
++ exec(sitedata)
+ f.close()
+