Remove BS4 dependencie
This commit is contained in:
parent
8ed0d68179
commit
629d53ae14
2 changed files with 12 additions and 18 deletions
|
@ -3,8 +3,8 @@
|
||||||
## Pré-requis
|
## Pré-requis
|
||||||
```
|
```
|
||||||
requests-html==0.10.0
|
requests-html==0.10.0
|
||||||
bs4==4.10.0
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Utilisation
|
## Utilisation
|
||||||
```
|
```
|
||||||
python3 main.py "<lien-vers-l'instance-CAS-pour-la-connexion-à-uPortal>" "<pseudo>" "<mot-de-passe>"
|
python3 main.py "<lien-vers-l'instance-CAS-pour-la-connexion-à-uPortal>" "<pseudo>" "<mot-de-passe>"
|
||||||
|
|
28
main.py
28
main.py
|
@ -1,6 +1,5 @@
|
||||||
from sys import argv
|
from sys import argv
|
||||||
from requests_html import BaseSession, HTMLResponse
|
from requests_html import BaseSession
|
||||||
from bs4 import BeautifulSoup # Peut-être retiré par les fonctions de requests_html ?
|
|
||||||
|
|
||||||
class Universite:
|
class Universite:
|
||||||
def __init__(self, url: str, pseudo: str, motDePasse: str):
|
def __init__(self, url: str, pseudo: str, motDePasse: str):
|
||||||
|
@ -12,10 +11,6 @@ class Universite:
|
||||||
"submit": "SE CONNECTER"
|
"submit": "SE CONNECTER"
|
||||||
}
|
}
|
||||||
|
|
||||||
def maSoupe(self, page: HTMLResponse):
|
|
||||||
"""Macro pour utiliser le parser HTML de bs4."""
|
|
||||||
return BeautifulSoup(page.content, "html.parser")
|
|
||||||
|
|
||||||
def ecrirePageHTML(self, texte: str):
|
def ecrirePageHTML(self, texte: str):
|
||||||
"""Affiche la page HTML pour le debug."""
|
"""Affiche la page HTML pour le debug."""
|
||||||
with open("page.html", 'w') as f:
|
with open("page.html", 'w') as f:
|
||||||
|
@ -24,26 +19,24 @@ class Universite:
|
||||||
def recuperationNotes(self) -> str:
|
def recuperationNotes(self) -> str:
|
||||||
"""Récupère les notes."""
|
"""Récupère les notes."""
|
||||||
with BaseSession() as session:
|
with BaseSession() as session:
|
||||||
# login
|
|
||||||
reponse = session.get(self.url)
|
reponse = session.get(self.url)
|
||||||
soup = self.maSoupe(reponse)
|
|
||||||
self.loginData["lt"] = soup.find("input", attrs = {"name": "lt"})["value"]
|
# login
|
||||||
self.loginData["execution"] = soup.find("input", attrs = {"name": "execution"})["value"]
|
self.loginData["lt"] = [element.attrs["value"] for element in reponse.html.find("input") if element.attrs["name"] == "lt"][0]
|
||||||
|
self.loginData["execution"] = [element.attrs["value"] for element in reponse.html.find("input") if element.attrs["name"] == "execution"][0]
|
||||||
reponse = session.post(self.url, data = self.loginData)
|
reponse = session.post(self.url, data = self.loginData)
|
||||||
|
|
||||||
# page des résultats intermédiaire
|
# page des résultats intermédiaire
|
||||||
soup = self.maSoupe(reponse)
|
|
||||||
try:
|
try:
|
||||||
url = soup.find("a", attrs = {"id": "service-407"})["href"]
|
url = [element.attrs["href"] for element in reponse.html.find("a") if "id" in element.attrs if element.attrs["id"] == "service-407"][0]
|
||||||
except:
|
except IndexError: # Arrive quand "An Error Has Occurred"
|
||||||
raise TimeoutError("Le site a prit trop de temps pour répondre, veuillez réessayez plus tard.")
|
raise TimeoutError("Le site a prit trop de temps pour répondre, veuillez réessayez plus tard.")
|
||||||
reponse = session.get(url, allow_redirects = False)
|
reponse = session.get(url, allow_redirects = False)
|
||||||
url = reponse.headers["Location"]
|
url = reponse.headers["Location"]
|
||||||
reponse = session.get(url)
|
reponse = session.get(url)
|
||||||
|
|
||||||
# choix des années
|
# choix des années
|
||||||
soup = self.maSoupe(reponse)
|
url = f"{url}?{[element.attrs['action'] for element in reponse.html.find('form') if 'enctype' in element.attrs if element.attrs['enctype'] == 'application/x-www-form-urlencoded'][0].split('?')[1].replace('welcome', 'notes')}"
|
||||||
url = f"{url}?{soup.find('form', attrs = {'enctype': 'application/x-www-form-urlencoded'})['action'].split('?')[1].replace('welcome', 'notes')}"
|
|
||||||
reponse = session.get(url)
|
reponse = session.get(url)
|
||||||
self.ecrirePageHTML(reponse.text)
|
self.ecrirePageHTML(reponse.text)
|
||||||
|
|
||||||
|
@ -51,7 +44,7 @@ class Universite:
|
||||||
# TODO
|
# TODO
|
||||||
|
|
||||||
# récupération tableaux des notes
|
# récupération tableaux des notes
|
||||||
soup = self.maSoupe(reponse)
|
""" soup = self.maSoupe(reponse)
|
||||||
for attrs in soup.findAll("table"):
|
for attrs in soup.findAll("table"):
|
||||||
try:
|
try:
|
||||||
texte = str(attrs).split("thead")[1][2:-2]
|
texte = str(attrs).split("thead")[1][2:-2]
|
||||||
|
@ -59,7 +52,8 @@ class Universite:
|
||||||
texte = texte.replace(" ", ' ')
|
texte = texte.replace(" ", ' ')
|
||||||
return texte
|
return texte
|
||||||
except:
|
except:
|
||||||
pass
|
pass """
|
||||||
|
return "WIP"
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
argv = argv[1:]
|
argv = argv[1:]
|
||||||
|
|
Reference in a new issue