récupération page principale

This commit is contained in:
Mylloon 2021-05-25 00:25:59 +02:00
parent 9c66127c88
commit fc8c7dd2e5

57
main.py
View file

@ -13,55 +13,48 @@ class Universite:
"submit": "SE CONNECTER"
}
def mySoup(self, page):
def maSoupe(self, page):
return BeautifulSoup(page.content, "html.parser")
""" def ecrirePageHTML(self, texte): # Utilisé pour du DEBUG
with open("page.html", "w") as f:
f.write(texte) """
def recuperationNotes(self):
with Session() as session:
# login
reponse = session.get(self.url)
soup = self.mySoup(reponse)
soup = self.maSoupe(reponse)
self.loginData["lt"] = soup.find("input", attrs = {"name": "lt"})["value"]
self.loginData["execution"] = soup.find("input", attrs = {"name": "execution"})["value"]
reponse = session.post(self.url, data = self.loginData)
# page des résultats intermédiaire
soup = self.mySoup(reponse)
url = soup.find("a", attrs = {"id": "service-407"})["href"]
soup = self.maSoupe(reponse)
try:
url = soup.find("a", attrs = {"id": "service-407"})["href"]
except:
raise TimeoutError("Le site a prit trop de temps pour répondre, veuillez réessayez plus tard.")
reponse = session.get(url, allow_redirects = False)
url = reponse.headers["Location"]
reponse = session.get(url)
# page choix de l'année --
soup = self.mySoup(reponse)
notes = {
"formMenu_SUBMIT": "1",
"formMenu:_idcl": "formMenu:linknotes1",
"formMenu:_link_hidden_": "",
"javax.faces.ViewState": soup.find("input", attrs = {"name": "javax.faces.ViewState"})["value"]
}
reponse = session.post(url, data = notes, allow_redirects = False)
print(reponse.status_code)
# url = reponse.headers["Location"]
# reponse = session.get(url)
# print(reponse.text)
""" # page des notes
dernierFormulaire = {
"_id142Pluto_146_ctf2_168897__SUBMIT": "1",
"_id142Pluto_146_ctf2_168897_:_idcl": "_id142Pluto_146_ctf2_168897_:tabledip:0:_id148Pluto_146_ctf2_168897_",
"_id142Pluto_146_ctf2_168897_:_link_hidden_": "",
"row": "0",
"javax.faces.ViewState": notes["javax.faces.ViewState"]
}
reponse = session.post(url, data = notes, allow_redirects = False)
url = reponse.headers["Location"]
# page des notes
soup = self.maSoupe(reponse)
url = f"{url}?{soup.find('form', attrs = {'enctype': 'application/x-www-form-urlencoded'})['action'].split('?')[1].replace('welcome', 'detailnotes')}"
reponse = session.get(url)
# self.ecrirePageHTML(reponse.text)
# récupération tableau
soup = self.mySoup(reponse)
return soup.find("table", attrs = {"id": "_id109Pluto_146_ctf2_168897_:tableel"}) """
# récupération tableaux des notes
soup = self.maSoupe(reponse)
for attrs in soup.findAll("table"):
try:
texte = str(attrs).split("thead")[1][2:-2]
while ' ' in texte:
texte = texte.replace(' ', ' ')
return texte
except:
pass
if __name__ == "__main__":
argv = argv[1:]