Remove BS4 dependencie
This commit is contained in:
parent
8ed0d68179
commit
629d53ae14
2 changed files with 12 additions and 18 deletions
|
@ -3,8 +3,8 @@
|
|||
## Pré-requis
|
||||
```
|
||||
requests-html==0.10.0
|
||||
bs4==4.10.0
|
||||
```
|
||||
|
||||
## Utilisation
|
||||
```
|
||||
python3 main.py "<lien-vers-l'instance-CAS-pour-la-connexion-à-uPortal>" "<pseudo>" "<mot-de-passe>"
|
||||
|
|
28
main.py
28
main.py
|
@ -1,6 +1,5 @@
|
|||
from sys import argv
|
||||
from requests_html import BaseSession, HTMLResponse
|
||||
from bs4 import BeautifulSoup # Peut-être retiré par les fonctions de requests_html ?
|
||||
from requests_html import BaseSession
|
||||
|
||||
class Universite:
|
||||
def __init__(self, url: str, pseudo: str, motDePasse: str):
|
||||
|
@ -12,10 +11,6 @@ class Universite:
|
|||
"submit": "SE CONNECTER"
|
||||
}
|
||||
|
||||
def maSoupe(self, page: HTMLResponse):
|
||||
"""Macro pour utiliser le parser HTML de bs4."""
|
||||
return BeautifulSoup(page.content, "html.parser")
|
||||
|
||||
def ecrirePageHTML(self, texte: str):
|
||||
"""Affiche la page HTML pour le debug."""
|
||||
with open("page.html", 'w') as f:
|
||||
|
@ -24,26 +19,24 @@ class Universite:
|
|||
def recuperationNotes(self) -> str:
|
||||
"""Récupère les notes."""
|
||||
with BaseSession() as session:
|
||||
# login
|
||||
reponse = session.get(self.url)
|
||||
soup = self.maSoupe(reponse)
|
||||
self.loginData["lt"] = soup.find("input", attrs = {"name": "lt"})["value"]
|
||||
self.loginData["execution"] = soup.find("input", attrs = {"name": "execution"})["value"]
|
||||
|
||||
# login
|
||||
self.loginData["lt"] = [element.attrs["value"] for element in reponse.html.find("input") if element.attrs["name"] == "lt"][0]
|
||||
self.loginData["execution"] = [element.attrs["value"] for element in reponse.html.find("input") if element.attrs["name"] == "execution"][0]
|
||||
reponse = session.post(self.url, data = self.loginData)
|
||||
|
||||
# page des résultats intermédiaire
|
||||
soup = self.maSoupe(reponse)
|
||||
try:
|
||||
url = soup.find("a", attrs = {"id": "service-407"})["href"]
|
||||
except:
|
||||
url = [element.attrs["href"] for element in reponse.html.find("a") if "id" in element.attrs if element.attrs["id"] == "service-407"][0]
|
||||
except IndexError: # Arrive quand "An Error Has Occurred"
|
||||
raise TimeoutError("Le site a prit trop de temps pour répondre, veuillez réessayez plus tard.")
|
||||
reponse = session.get(url, allow_redirects = False)
|
||||
url = reponse.headers["Location"]
|
||||
reponse = session.get(url)
|
||||
|
||||
# choix des années
|
||||
soup = self.maSoupe(reponse)
|
||||
url = f"{url}?{soup.find('form', attrs = {'enctype': 'application/x-www-form-urlencoded'})['action'].split('?')[1].replace('welcome', 'notes')}"
|
||||
url = f"{url}?{[element.attrs['action'] for element in reponse.html.find('form') if 'enctype' in element.attrs if element.attrs['enctype'] == 'application/x-www-form-urlencoded'][0].split('?')[1].replace('welcome', 'notes')}"
|
||||
reponse = session.get(url)
|
||||
self.ecrirePageHTML(reponse.text)
|
||||
|
||||
|
@ -51,7 +44,7 @@ class Universite:
|
|||
# TODO
|
||||
|
||||
# récupération tableaux des notes
|
||||
soup = self.maSoupe(reponse)
|
||||
""" soup = self.maSoupe(reponse)
|
||||
for attrs in soup.findAll("table"):
|
||||
try:
|
||||
texte = str(attrs).split("thead")[1][2:-2]
|
||||
|
@ -59,7 +52,8 @@ class Universite:
|
|||
texte = texte.replace(" ", ' ')
|
||||
return texte
|
||||
except:
|
||||
pass
|
||||
pass """
|
||||
return "WIP"
|
||||
|
||||
if __name__ == "__main__":
|
||||
argv = argv[1:]
|
||||
|
|
Reference in a new issue