Modules: global dusting: call getJSON instead of making raw calls to urllib
This commit is contained in:
parent
66ec7cb7ca
commit
d14fec4cec
@ -2,7 +2,7 @@
|
||||
|
||||
"""Looking for books"""
|
||||
|
||||
import urllib.request
|
||||
import urllib
|
||||
|
||||
from hooks import hook
|
||||
from tools import web
|
||||
|
@ -1,17 +1,20 @@
|
||||
# coding=utf-8
|
||||
|
||||
from urllib.parse import quote
|
||||
from urllib.request import urlopen
|
||||
|
||||
from tools import web
|
||||
from tools.xmlparser import parse_string
|
||||
|
||||
|
||||
class DDGSearch:
|
||||
|
||||
def __init__(self, terms):
|
||||
self.terms = terms
|
||||
|
||||
raw = urlopen("https://api.duckduckgo.com/?q=%s&format=xml&no_redirect=1" % quote(terms), timeout=10)
|
||||
self.ddgres = parse_string(raw.read())
|
||||
self.ddgres = web.getXML(
|
||||
"https://api.duckduckgo.com/?q=%s&format=xml&no_redirect=1" %
|
||||
quote(terms),
|
||||
timeout=10)
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
|
@ -1,15 +1,18 @@
|
||||
# coding=utf-8
|
||||
|
||||
import json
|
||||
from urllib.parse import quote
|
||||
from urllib.request import urlopen
|
||||
|
||||
from tools import web
|
||||
|
||||
|
||||
class UrbanDictionnary:
|
||||
|
||||
def __init__(self, terms):
|
||||
self.terms = terms
|
||||
|
||||
raw = urlopen("http://api.urbandictionary.com/v0/define?term=%s" % quote(terms), timeout=10)
|
||||
self.udres = json.loads(raw.read().decode())
|
||||
self.udres = web.getJSON(
|
||||
"http://api.urbandictionary.com/v0/define?term=%s" % quote(terms),
|
||||
timeout=10)
|
||||
|
||||
@property
|
||||
def result_type(self):
|
||||
|
@ -1,19 +1,17 @@
|
||||
# coding=utf-8
|
||||
|
||||
from urllib.parse import quote
|
||||
from urllib.request import urlopen
|
||||
|
||||
from tools.xmlparser import parse_string
|
||||
from tools import web
|
||||
|
||||
|
||||
class WFASearch:
|
||||
def __init__(self, terms):
|
||||
self.terms = terms
|
||||
try:
|
||||
raw = urlopen("http://api.wolframalpha.com/v2/query?"
|
||||
"input=%s&appid=%s"
|
||||
% (quote(terms),
|
||||
CONF.getNode("wfaapi")["key"]), timeout=15)
|
||||
self.wfares = parse_string(raw.read())
|
||||
url = ("http://api.wolframalpha.com/v2/query?input=%s&appid=%s" %
|
||||
(quote(terms), CONF.getNode("wfaapi")["key"]))
|
||||
self.wfares = web.getXML(url)
|
||||
except (TypeError, KeyError):
|
||||
print ("You need a Wolfram|Alpha API key in order to use this "
|
||||
"module. Add it to the module configuration file:\n<wfaapi"
|
||||
@ -33,7 +31,8 @@ class WFASearch:
|
||||
if self.wfares is None:
|
||||
return "An error occurs during computation."
|
||||
elif self.wfares["error"] == "true":
|
||||
return "An error occurs during computation: " + self.wfares.getNode("error").getNode("msg").getContent()
|
||||
return ("An error occurs during computation: " +
|
||||
self.wfares.getNode("error").getNode("msg").getContent())
|
||||
elif self.wfares.hasNode("didyoumeans"):
|
||||
start = "Did you mean: "
|
||||
tag = "didyoumean"
|
||||
@ -66,6 +65,7 @@ class WFASearch:
|
||||
for node in self.wfares.getNodes("pod"):
|
||||
for subnode in node.getNodes("subpod"):
|
||||
if subnode.getFirstNode("plaintext").getContent() != "":
|
||||
yield node["title"] + " " + subnode["title"] + ": " + subnode.getFirstNode("plaintext").getContent()
|
||||
yield (node["title"] + " " + subnode["title"] + ": " +
|
||||
subnode.getFirstNode("plaintext").getContent())
|
||||
except IndexError:
|
||||
pass
|
||||
|
@ -2,13 +2,11 @@
|
||||
|
||||
"""Repositories, users or issues on GitHub"""
|
||||
|
||||
import json
|
||||
import re
|
||||
import urllib.error
|
||||
from urllib.parse import quote
|
||||
from urllib.request import urlopen
|
||||
|
||||
from hooks import hook
|
||||
from tools import web
|
||||
|
||||
nemubotversion = 3.4
|
||||
|
||||
@ -21,19 +19,17 @@ def help_full():
|
||||
|
||||
|
||||
def info_repos(repo):
|
||||
raw = urlopen("https://api.github.com/search/repositories?q=%s" %
|
||||
quote(repo), timeout=10)
|
||||
return json.loads(raw.read().decode())
|
||||
return web.getJSON("https://api.github.com/search/repositories?q=%s" %
|
||||
quote(repo), timeout=10)
|
||||
|
||||
|
||||
def info_user(username):
|
||||
raw = urlopen("https://api.github.com/users/%s" % quote(username),
|
||||
timeout=10)
|
||||
user = json.loads(raw.read().decode())
|
||||
user = web.getJSON("https://api.github.com/users/%s" % quote(username),
|
||||
timeout=10)
|
||||
|
||||
raw = urlopen("https://api.github.com/users/%s/repos?sort=updated" %
|
||||
quote(username), timeout=10)
|
||||
user["repos"] = json.loads(raw.read().decode())
|
||||
user["repos"] = web.getJSON("https://api.github.com/users/%s/"
|
||||
"repos?sort=updated" % quote(username),
|
||||
timeout=10)
|
||||
|
||||
return user
|
||||
|
||||
@ -45,17 +41,12 @@ def info_issue(repo, issue=None):
|
||||
else:
|
||||
fullname = repo
|
||||
|
||||
try:
|
||||
if issue is not None:
|
||||
raw = urlopen("https://api.github.com/repos/%s/issues/%s" %
|
||||
(quote(fullname), quote(issue)), timeout=10)
|
||||
return [json.loads(raw.read().decode())]
|
||||
else:
|
||||
raw = urlopen("https://api.github.com/repos/%s/issues?sort=updated"
|
||||
% quote(fullname), timeout=10)
|
||||
return json.loads(raw.read().decode())
|
||||
except urllib.error.HTTPError:
|
||||
raise IRCException("Repository not found")
|
||||
if issue is not None:
|
||||
return [web.getJSON("https://api.github.com/repos/%s/issues/%s" %
|
||||
(quote(fullname), quote(issue)))]
|
||||
else:
|
||||
return web.getJSON("https://api.github.com/repos/%s/issues?"
|
||||
"sort=updated" % quote(fullname))
|
||||
|
||||
|
||||
def info_commit(repo, commit=None):
|
||||
@ -65,17 +56,12 @@ def info_commit(repo, commit=None):
|
||||
else:
|
||||
fullname = repo
|
||||
|
||||
try:
|
||||
if commit is not None:
|
||||
raw = urlopen("https://api.github.com/repos/%s/commits/%s" %
|
||||
(quote(fullname), quote(commit)), timeout=10)
|
||||
return [json.loads(raw.read().decode())]
|
||||
else:
|
||||
raw = urlopen("https://api.github.com/repos/%s/commits" %
|
||||
quote(fullname), timeout=10)
|
||||
return json.loads(raw.read().decode())
|
||||
except urllib.error.HTTPError:
|
||||
raise IRCException("Repository not found")
|
||||
if commit is not None:
|
||||
return [web.getJSON("https://api.github.com/repos/%s/commits/%s" %
|
||||
(quote(fullname), quote(commit)))]
|
||||
else:
|
||||
return web.getJSON("https://api.github.com/repos/%s/commits" %
|
||||
quote(fullname))
|
||||
|
||||
|
||||
@hook("cmd_hook", "github")
|
||||
@ -162,6 +148,9 @@ def cmd_github(msg):
|
||||
|
||||
issues = info_issue(repo, issue)
|
||||
|
||||
if issues is None:
|
||||
raise IRCException("Repository not found")
|
||||
|
||||
for issue in issues:
|
||||
res.append_message("%s%s issue #%d: \x03\x02%s\x03\x02 opened by %s on %s: %s" %
|
||||
(issue["state"][0].upper(),
|
||||
@ -195,6 +184,9 @@ def cmd_github(msg):
|
||||
|
||||
commits = info_commit(repo, commit)
|
||||
|
||||
if commits is None:
|
||||
raise IRCException("Repository not found")
|
||||
|
||||
for commit in commits:
|
||||
res.append_message("Commit %s by %s on %s: %s" %
|
||||
(commit["sha"][:10],
|
||||
|
@ -2,19 +2,21 @@
|
||||
|
||||
"""Show many information about a movie or serie"""
|
||||
|
||||
import json
|
||||
import re
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
|
||||
from hooks import hook
|
||||
from tools import web
|
||||
|
||||
nemubotversion = 3.4
|
||||
|
||||
from more import Response
|
||||
|
||||
|
||||
def help_full():
|
||||
return "Search a movie title with: !imdbs <approximative title> ; View movie details with !imdb <title>"
|
||||
|
||||
|
||||
def get_movie(title=None, year=None, imdbid=None, fullplot=True, tomatoes=False):
|
||||
"""Returns the information about the matching movie"""
|
||||
|
||||
@ -34,8 +36,7 @@ def get_movie(title=None, year=None, imdbid=None, fullplot=True, tomatoes=False)
|
||||
print_debug(url)
|
||||
|
||||
# Make the request
|
||||
response = urllib.request.urlopen(url)
|
||||
data = json.loads(response.read().decode())
|
||||
data = web.getJSON(url)
|
||||
|
||||
# Return data
|
||||
if "Error" in data:
|
||||
@ -47,6 +48,7 @@ def get_movie(title=None, year=None, imdbid=None, fullplot=True, tomatoes=False)
|
||||
else:
|
||||
raise IRCException("An error occurs during movie search")
|
||||
|
||||
|
||||
def find_movies(title):
|
||||
"""Find existing movies matching a approximate title"""
|
||||
|
||||
@ -55,8 +57,7 @@ def find_movies(title):
|
||||
print_debug(url)
|
||||
|
||||
# Make the request
|
||||
raw = urllib.request.urlopen(url)
|
||||
data = json.loads(raw.read().decode())
|
||||
data = web.getJSON(url)
|
||||
|
||||
# Return data
|
||||
if "Error" in data:
|
||||
@ -86,9 +87,9 @@ def cmd_imdb(msg):
|
||||
else:
|
||||
data = get_movie(title=title)
|
||||
|
||||
res = Response(channel=msg.channel,
|
||||
title="%s (%s)" % (data['Title'], data['Year']),
|
||||
nomore="No more information, more at http://www.imdb.com/title/%s" % data['imdbID'])
|
||||
res = Response(channel=msg.channel,
|
||||
title="%s (%s)" % (data['Title'], data['Year']),
|
||||
nomore="No more information, more at http://www.imdb.com/title/%s" % data['imdbID'])
|
||||
|
||||
res.append_message("\x02rating\x0F: %s (%s votes); \x02plot\x0F: %s" %
|
||||
(data['imdbRating'], data['imdbVotes'], data['Plot']))
|
||||
@ -97,6 +98,7 @@ def cmd_imdb(msg):
|
||||
% (data['Type'], data['Country'], data['Released'], data['Genre'], data['Director'], data['Writer'], data['Actors']))
|
||||
return res
|
||||
|
||||
|
||||
@hook("cmd_hook", "imdbs")
|
||||
def cmd_search(msg):
|
||||
"""!imdbs <approximative title> to search a movie title"""
|
||||
|
@ -1,14 +1,17 @@
|
||||
# coding=utf-8
|
||||
|
||||
import json
|
||||
"""The mapquest module"""
|
||||
|
||||
import re
|
||||
from urllib.parse import quote
|
||||
from urllib.request import urlopen
|
||||
|
||||
from tools import web
|
||||
|
||||
nemubotversion = 3.4
|
||||
|
||||
from more import Response
|
||||
|
||||
|
||||
def load(context):
|
||||
if not CONF or not CONF.hasNode("mapquestapi") or not CONF.getNode("mapquestapi").hasAttribute("key"):
|
||||
print ("You need a MapQuest API key in order to use this "
|
||||
@ -21,31 +24,38 @@ def load(context):
|
||||
add_hook("cmd_hook", MessageHook(cmd_geocode, "geocode"))
|
||||
|
||||
|
||||
def help_tiny ():
|
||||
"""Line inserted in the response to the command !help"""
|
||||
return "The mapquest module"
|
||||
|
||||
def help_full ():
|
||||
def help_full():
|
||||
return "!geocode /place/: get coordinate of /place/."
|
||||
|
||||
|
||||
def geocode(location):
|
||||
raw = urlopen("http://open.mapquestapi.com/geocoding/v1/address?key=%s&location=%s" % (CONF.getNode("mapquestapi")["key"], quote(location)))
|
||||
obj = json.loads(raw.read().decode())
|
||||
obj = web.getJSON("http://open.mapquestapi.com/geocoding/v1/address?key=%s&location=%s" %
|
||||
(CONF.getNode("mapquestapi")["key"], quote(location)))
|
||||
|
||||
if "results" in obj and "locations" in obj["results"][0]:
|
||||
for loc in obj["results"][0]["locations"]:
|
||||
yield loc
|
||||
|
||||
|
||||
def where(loc):
|
||||
return re.sub(" +", " ", "%s %s %s %s %s" % (loc["street"], loc["adminArea5"], loc["adminArea4"], loc["adminArea3"], loc["adminArea1"])).strip()
|
||||
return re.sub(" +", " ",
|
||||
"{street} {adminArea5} {adminArea4} {adminArea3} "
|
||||
"{adminArea1}".format(**loc)).strip()
|
||||
|
||||
|
||||
def cmd_geocode(msg):
|
||||
if len(msg.cmds) < 2:
|
||||
raise IRCException("indicate a name")
|
||||
|
||||
locname = ' '.join(msg.cmds[1:])
|
||||
res = Response(channel=msg.channel, nick=msg.nick, nomore="No more geocode", count=" (%s more geocode)")
|
||||
res = Response(channel=msg.channel, nick=msg.nick,
|
||||
nomore="No more geocode", count=" (%s more geocode)")
|
||||
|
||||
for loc in geocode(locname):
|
||||
res.append_message("%s is at %s,%s (%s precision)" % (where(loc), loc["latLng"]["lat"], loc["latLng"]["lng"], loc["geocodeQuality"].lower()))
|
||||
res.append_message("%s is at %s,%s (%s precision)" %
|
||||
(where(loc),
|
||||
loc["latLng"]["lat"],
|
||||
loc["latLng"]["lng"],
|
||||
loc["geocodeQuality"].lower()))
|
||||
|
||||
return res
|
||||
|
@ -5,7 +5,6 @@
|
||||
import json
|
||||
import re
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
from hooks import hook
|
||||
from tools import web
|
||||
@ -22,8 +21,7 @@ def get_namespaces(site, ssl=False):
|
||||
print_debug(url)
|
||||
|
||||
# Make the request
|
||||
raw = urllib.request.urlopen(url)
|
||||
data = json.loads(raw.read().decode())
|
||||
data = web.getJSON(url)
|
||||
|
||||
namespaces = dict()
|
||||
for ns in data["query"]["namespaces"]:
|
||||
@ -38,8 +36,7 @@ def get_raw_page(site, term, ssl=False):
|
||||
print_debug(url)
|
||||
|
||||
# Make the request
|
||||
raw = urllib.request.urlopen(url)
|
||||
data = json.loads(raw.read().decode())
|
||||
data = web.getJSON(url)
|
||||
|
||||
for k in data["query"]["pages"]:
|
||||
try:
|
||||
@ -55,8 +52,7 @@ def get_unwikitextified(site, wikitext, ssl=False):
|
||||
print_debug(url)
|
||||
|
||||
# Make the request
|
||||
raw = urllib.request.urlopen(url)
|
||||
data = json.loads(raw.read().decode())
|
||||
data = web.getJSON(url)
|
||||
|
||||
return data["expandtemplates"]["*"]
|
||||
|
||||
@ -129,8 +125,7 @@ def search(site, term, ssl=False):
|
||||
print_debug(url)
|
||||
|
||||
# Make the request
|
||||
raw = urllib.request.urlopen(url)
|
||||
data = json.loads(raw.read().decode())
|
||||
data = web.getJSON(url)
|
||||
|
||||
if data is not None and "query" in data and "search" in data["query"]:
|
||||
for itm in data["query"]["search"]:
|
||||
|
@ -2,9 +2,9 @@
|
||||
|
||||
"""Get information about subreddit"""
|
||||
|
||||
import json
|
||||
import re
|
||||
import urllib
|
||||
|
||||
from tools import web
|
||||
|
||||
nemubotversion = 3.4
|
||||
|
||||
@ -38,16 +38,12 @@ def cmd_subreddit(msg):
|
||||
where = sub.group(1)
|
||||
else:
|
||||
where = "r"
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
"http://www.reddit.com/%s/%s/about.json" %
|
||||
(where, sub.group(2)),
|
||||
headers={'User-Agent': "nemubot v3"})
|
||||
raw = urllib.request.urlopen(req, timeout=10)
|
||||
except urllib.error.HTTPError as e:
|
||||
raise IRCException("HTTP error occurs: %s %s" %
|
||||
(e.code, e.reason))
|
||||
sbr = json.loads(raw.read().decode())
|
||||
|
||||
sbr = web.getJSON("http://www.reddit.com/%s/%s/about.json" %
|
||||
(where, sub.group(2)))
|
||||
|
||||
if sbr is None:
|
||||
raise IRCException("subreddit not found")
|
||||
|
||||
if "title" in sbr["data"]:
|
||||
res = Response(channel=msg.channel,
|
||||
|
@ -2,9 +2,8 @@
|
||||
|
||||
"""Find information about an SAP transaction codes"""
|
||||
|
||||
import urllib.request
|
||||
import json
|
||||
import re
|
||||
import urllib.parse
|
||||
|
||||
from hooks import hook
|
||||
from tools import web
|
||||
|
@ -2,10 +2,8 @@
|
||||
|
||||
"""Find synonyms"""
|
||||
|
||||
import json
|
||||
import re
|
||||
from urllib.parse import quote
|
||||
from urllib.request import urlopen
|
||||
|
||||
from hooks import hook
|
||||
from tools import web
|
||||
@ -14,9 +12,11 @@ nemubotversion = 3.4
|
||||
|
||||
from more import Response
|
||||
|
||||
|
||||
def help_full():
|
||||
return "!syno [LANG] <word>: give a list of synonyms for <word>."
|
||||
|
||||
|
||||
def load(context):
|
||||
global lang_binding
|
||||
|
||||
@ -55,8 +55,8 @@ def get_french_synos(word):
|
||||
|
||||
|
||||
def get_english_synos(key, word):
|
||||
raw = urlopen("http://words.bighugelabs.com/api/2/%s/%s/json" % (quote(key), quote(word.encode("ISO-8859-1"))))
|
||||
cnt = json.loads(raw.read().decode())
|
||||
cnt = web.getJSON("http://words.bighugelabs.com/api/2/%s/%s/json" %
|
||||
(quote(key), quote(word.encode("ISO-8859-1"))))
|
||||
|
||||
best = list(); synos = list(); anton = list()
|
||||
|
||||
|
@ -2,12 +2,10 @@
|
||||
|
||||
"""Translation module"""
|
||||
|
||||
import http.client
|
||||
import re
|
||||
import socket
|
||||
import json
|
||||
from urllib.parse import quote
|
||||
from urllib.request import urlopen
|
||||
|
||||
from tools import web
|
||||
|
||||
nemubotversion = 3.4
|
||||
|
||||
@ -59,11 +57,7 @@ def cmd_translate(msg):
|
||||
langTo = "fr"
|
||||
term = ' '.join(msg.cmds[1:])
|
||||
|
||||
try:
|
||||
raw = urlopen(URL % (langFrom, langTo, quote(term)))
|
||||
except:
|
||||
raise IRCException("invalid request")
|
||||
wres = json.loads(raw.read().decode())
|
||||
wres = web.getJSON(URL % (langFrom, langTo, quote(term)))
|
||||
|
||||
if "Error" in wres:
|
||||
raise IRCException(wres["Note"])
|
||||
@ -87,6 +81,7 @@ def cmd_translate(msg):
|
||||
extract_traslation(ent[i])))
|
||||
return res
|
||||
|
||||
|
||||
def meaning(entry):
|
||||
ret = list()
|
||||
if "sense" in entry and len(entry["sense"]) > 0:
|
||||
@ -98,6 +93,7 @@ def meaning(entry):
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def extract_traslation(entry):
|
||||
ret = list()
|
||||
for i in [ "FirstTranslation", "SecondTranslation", "ThirdTranslation", "FourthTranslation" ]:
|
||||
|
@ -3,12 +3,11 @@
|
||||
"""The weather module"""
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import re
|
||||
from urllib.parse import quote
|
||||
from urllib.request import urlopen
|
||||
|
||||
from hooks import hook
|
||||
from tools import web
|
||||
|
||||
import mapquest
|
||||
|
||||
@ -16,6 +15,7 @@ nemubotversion = 3.4
|
||||
|
||||
from more import Response
|
||||
|
||||
|
||||
def load(context):
|
||||
global DATAS
|
||||
DATAS.setIndex("name", "city")
|
||||
@ -40,9 +40,11 @@ def help_full ():
|
||||
def fahrenheit2celsius(temp):
|
||||
return int((temp - 32) * 50/9)/10
|
||||
|
||||
|
||||
def mph2kmph(speed):
|
||||
return int(speed * 160.9344)/100
|
||||
|
||||
|
||||
def inh2mmh(size):
|
||||
return int(size * 254)/10
|
||||
|
||||
@ -62,6 +64,7 @@ def format_wth(wth):
|
||||
int(wth["ozone"])
|
||||
))
|
||||
|
||||
|
||||
def format_forecast_daily(wth):
|
||||
return ("%s; between %s-%s °C; precipitation (%s %% chance) intensity: maximum %s mm/h; relative humidity: %s %%; wind speed: %s km/h %s°; cloud coverage: %s %%; pressure: %s hPa; ozone: %s DU" %
|
||||
(
|
||||
@ -77,6 +80,7 @@ def format_forecast_daily(wth):
|
||||
int(wth["ozone"])
|
||||
))
|
||||
|
||||
|
||||
def format_timestamp(timestamp, tzname, tzoffset, format="%c"):
|
||||
tz = datetime.timezone(datetime.timedelta(hours=tzoffset), tzname)
|
||||
time = datetime.datetime.fromtimestamp(timestamp, tz=tz)
|
||||
@ -126,8 +130,7 @@ def treat_coord(msg):
|
||||
|
||||
|
||||
def get_json_weather(coords):
|
||||
raw = urlopen("https://api.forecast.io/forecast/%s/%s,%s" % (CONF.getNode("darkskyapi")["key"], float(coords[0]), float(coords[1])), timeout=10)
|
||||
wth = json.loads(raw.read().decode())
|
||||
wth = web.getJSON("https://api.forecast.io/forecast/%s/%s,%s" % (CONF.getNode("darkskyapi")["key"], float(coords[0]), float(coords[1])))
|
||||
|
||||
# First read flags
|
||||
if "darksky-unavailable" in wth["flags"]:
|
||||
@ -147,6 +150,7 @@ def cmd_coordinates(msg):
|
||||
coords = DATAS.index[j]
|
||||
return Response("Les coordonnées de %s sont %s,%s" % (msg.args[0], coords["lat"], coords["long"]), channel=msg.channel)
|
||||
|
||||
|
||||
def cmd_alert(msg):
|
||||
loc, coords, specific = treat_coord(msg)
|
||||
wth = get_json_weather(coords)
|
||||
@ -159,6 +163,7 @@ def cmd_alert(msg):
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def cmd_weather(msg):
|
||||
loc, coords, specific = treat_coord(msg)
|
||||
wth = get_json_weather(coords)
|
||||
@ -211,6 +216,7 @@ def cmd_weather(msg):
|
||||
|
||||
gps_ask = re.compile(r"^\s*(?P<city>.*\w)\s*(?:(?:se|est)\s+(?:trouve|situ[ée]*)\s+[aà])\s*(?P<lat>-?[0-9]+(?:[,.][0-9]+))[^0-9.](?P<long>-?[0-9]+(?:[,.][0-9]+))\s*$", re.IGNORECASE)
|
||||
|
||||
|
||||
@hook("ask_default")
|
||||
def parseask(msg):
|
||||
res = gps_ask.match(msg.text)
|
||||
|
@ -5,10 +5,10 @@
|
||||
import re
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import quote
|
||||
from urllib.request import urlopen
|
||||
|
||||
from hooks import hook
|
||||
from message import TextMessage
|
||||
from tools import web
|
||||
|
||||
nemubotversion = 3.4
|
||||
|
||||
@ -54,11 +54,11 @@ def cmd_ycc(msg):
|
||||
snd_url = "http://ycc.fr/redirection/create/" + quote(url,
|
||||
"/:%@&=?")
|
||||
print_debug(snd_url)
|
||||
raw = urlopen(snd_url, timeout=10)
|
||||
page = web.getURLContent(snd_url)
|
||||
if o.netloc == "":
|
||||
res.append(gen_response(raw.read().decode(), msg, o.scheme))
|
||||
res.append(gen_response(page, msg, o.scheme))
|
||||
else:
|
||||
res.append(gen_response(raw.read().decode(), msg, o.netloc))
|
||||
res.append(gen_response(page, msg, o.netloc))
|
||||
else:
|
||||
res.append(gen_response(False, msg, url))
|
||||
return res
|
||||
|
Loading…
Reference in New Issue
Block a user