suivi: use getURLContent instead of call to urllib

This commit is contained in:
nemunaire 2017-07-07 06:38:00 +02:00 committed by nemunaire
parent 5f58f71d2f
commit 12403a3690

View File

@ -17,8 +17,7 @@ from more import Response
def get_tnt_info(track_id): def get_tnt_info(track_id):
values = [] values = []
data = getURLContent('www.tnt.fr/public/suivi_colis/recherche/' data = getURLContent('www.tnt.fr/public/suivi_colis/recherche/visubontransport.do?bonTransport=%s' % track_id)
'visubontransport.do?bonTransport=%s' % track_id)
soup = BeautifulSoup(data) soup = BeautifulSoup(data)
status_list = soup.find('div', class_='result__content') status_list = soup.find('div', class_='result__content')
if not status_list: if not status_list:
@ -32,8 +31,7 @@ def get_tnt_info(track_id):
def get_colissimo_info(colissimo_id): def get_colissimo_info(colissimo_id):
colissimo_data = getURLContent("http://www.colissimo.fr/portail_colissimo/" colissimo_data = getURLContent("http://www.colissimo.fr/portail_colissimo/suivre.do?colispart=%s" % colissimo_id)
"suivre.do?colispart=%s" % colissimo_id)
soup = BeautifulSoup(colissimo_data) soup = BeautifulSoup(colissimo_data)
dataArray = soup.find(class_='dataArray') dataArray = soup.find(class_='dataArray')
@ -47,9 +45,8 @@ def get_colissimo_info(colissimo_id):
def get_chronopost_info(track_id): def get_chronopost_info(track_id):
data = urllib.parse.urlencode({'listeNumeros': track_id}) data = urllib.parse.urlencode({'listeNumeros': track_id})
track_baseurl = "http://www.chronopost.fr/expedier/" \ track_baseurl = "http://www.chronopost.fr/expedier/inputLTNumbersNoJahia.do?lang=fr_FR"
"inputLTNumbersNoJahia.do?lang=fr_FR" track_data = getURLContent(track_baseurl, data.encode('utf-8'))
track_data = urllib.request.urlopen(track_baseurl, data.encode('utf-8'))
soup = BeautifulSoup(track_data) soup = BeautifulSoup(track_data)
infoClass = soup.find(class_='numeroColi2') infoClass = soup.find(class_='numeroColi2')
@ -65,9 +62,8 @@ def get_chronopost_info(track_id):
def get_colisprive_info(track_id): def get_colisprive_info(track_id):
data = urllib.parse.urlencode({'numColis': track_id}) data = urllib.parse.urlencode({'numColis': track_id})
track_baseurl = "https://www.colisprive.com/moncolis/pages/" \ track_baseurl = "https://www.colisprive.com/moncolis/pages/detailColis.aspx"
"detailColis.aspx" track_data = getURLContent(track_baseurl, data.encode('utf-8'))
track_data = urllib.request.urlopen(track_baseurl, data.encode('utf-8'))
soup = BeautifulSoup(track_data) soup = BeautifulSoup(track_data)
dataArray = soup.find(class_='BandeauInfoColis') dataArray = soup.find(class_='BandeauInfoColis')
@ -82,8 +78,7 @@ def get_laposte_info(laposte_id):
data = urllib.parse.urlencode({'id': laposte_id}) data = urllib.parse.urlencode({'id': laposte_id})
laposte_baseurl = "http://www.part.csuivi.courrier.laposte.fr/suivi/index" laposte_baseurl = "http://www.part.csuivi.courrier.laposte.fr/suivi/index"
laposte_data = urllib.request.urlopen(laposte_baseurl, laposte_data = getURLContent(laposte_baseurl, data.encode('utf-8'))
data.encode('utf-8'))
soup = BeautifulSoup(laposte_data) soup = BeautifulSoup(laposte_data)
search_res = soup.find(class_='resultat_rech_simple_table').tbody.tr search_res = soup.find(class_='resultat_rech_simple_table').tbody.tr
if (soup.find(class_='resultat_rech_simple_table').thead if (soup.find(class_='resultat_rech_simple_table').thead
@ -112,8 +107,7 @@ def get_postnl_info(postnl_id):
data = urllib.parse.urlencode({'barcodes': postnl_id}) data = urllib.parse.urlencode({'barcodes': postnl_id})
postnl_baseurl = "http://www.postnl.post/details/" postnl_baseurl = "http://www.postnl.post/details/"
postnl_data = urllib.request.urlopen(postnl_baseurl, postnl_data = getURLContent(postnl_baseurl, data.encode('utf-8'))
data.encode('utf-8'))
soup = BeautifulSoup(postnl_data) soup = BeautifulSoup(postnl_data)
if (soup.find(id='datatables') if (soup.find(id='datatables')
and soup.find(id='datatables').tbody and soup.find(id='datatables').tbody