From 390e07fbc45c5e60e4cfefd0897878c5d945ab30 Mon Sep 17 00:00:00 2001
From: Bastien Le Querrec <blq@laquadrature.net>
Date: Fri, 23 Aug 2024 16:18:27 +0200
Subject: [PATCH] pref64: corrige l'analyse des pages

---
 Attrap_pref64.py | 86 ++++++++++++++++++------------------------------
 1 file changed, 32 insertions(+), 54 deletions(-)

diff --git a/Attrap_pref64.py b/Attrap_pref64.py
index af7b20f..ca94dde 100644
--- a/Attrap_pref64.py
+++ b/Attrap_pref64.py
@@ -11,15 +11,8 @@ class Attrap_pref64(Attrap):
 
     # Config
     __HOST = 'https://www.pyrenees-atlantiques.gouv.fr'
-    __RAA_PAGE = {
-        '2024': f'{__HOST}/Publications/Recueil-des-actes-administratifs/Annee-2024',
-        '2023': f'{__HOST}/Publications/Recueil-des-actes-administratifs/Annee-2023',
-        '2022': f'{__HOST}/Publications/Recueil-des-actes-administratifs/Annee-2022',
-        '2021': f'{__HOST}/Publications/Recueil-des-actes-administratifs/Annee-2021',
-        '2020': f'{__HOST}/Publications/Recueil-des-actes-administratifs/Annee-2020',
-        '2019': f'{__HOST}/Publications/Recueil-des-actes-administratifs/Annee-2019'
-    }
-    __USER_AGENT = 'Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/115.0'
+    __RAA_PAGE = f'{__HOST}/Publications/Recueil-des-actes-administratifs'
+    __USER_AGENT = 'Mozilla/5.0 (X11; Linux x86_64; rv:129.0) Gecko/20100101 Firefox/129.0'
     full_name = 'Préfecture des Pyrénées-Atlantiques'
     short_code = 'pref64'
 
@@ -28,53 +21,38 @@ class Attrap_pref64(Attrap):
         self.set_sleep_time(30)
 
     def get_raa(self, keywords):
-        year_pages_to_parse = []
-        if self.not_before.year <= 2024:
-            year_pages_to_parse.append(self.__RAA_PAGE['2024'])
-        if self.not_before.year <= 2023:
-            year_pages_to_parse.append(self.__RAA_PAGE['2023'])
-        if self.not_before.year <= 2022:
-            year_pages_to_parse.append(self.__RAA_PAGE['2022'])
-        if self.not_before.year <= 2021:
-            year_pages_to_parse.append(self.__RAA_PAGE['2021'])
-        if self.not_before.year <= 2020:
-            year_pages_to_parse.append(self.__RAA_PAGE['2020'])
-        if self.not_before.year <= 2019:
-            year_pages_to_parse.append(self.__RAA_PAGE['2019'])
-
-        pages_to_parse = []
-        # Pour chaque année, on cherche les sous-pages de mois
-        for year_page in year_pages_to_parse:
+        # On récupère les pages d'années
+        year_pages = []
+        page_content = self.get_page(self.__RAA_PAGE, 'get').content
+        for year_page in self.get_sub_pages(
+            page_content,
+            'div.fr-card__body div.fr-card__content h2.fr-card__title a',
+            self.__HOST,
+            False
+        ):
+            year = Attrap.guess_date(year_page['name'], '.* ([0-9]{4})').year
+            if year < 9999 and year >= self.not_before.year:
+                year_pages.append(year_page['url'])
+
+        # Pour chaque page d'année, on récupère les pages de mois
+        month_pages = []
+        for year_page in year_pages:
             page_content = self.get_page(year_page, 'get').content
-            month_pages = self.get_sub_pages(
+            for month_page in self.get_sub_pages(
                 page_content,
-                '.fr-card.fr-card--sm.fr-card--grey.fr-enlarge-link div.fr-card__body div.fr-card__content h2.fr-card__title a',
+                'div.fr-card__body div.fr-card__content h2.fr-card__title a',
                 self.__HOST,
                 False
-            )[::-1]
-
-            # Pour chaque page de mois, on récupère les liens vers des pages de RAA
-            for month_page in month_pages:
-                raa_links = self.get_sub_pages_with_pager(
-                    month_page['url'],
-                    'div.content-view-line div.class-file h2 a',
-                    'ul.fr-pagination__list li a.fr-pagination__link.fr-pagination__link--next',
-                    None,
-                    self.__HOST
-                )[::-1]
-
-                # Pour chaque lien vers un RAA, on filtre ceux ne correspondant pas à la période analysée
-                for raa_link in raa_links:
-                    guessed_date = Attrap.guess_date(raa_link['name'], 'n°[ 0-9-]* du ([0-9]*(?:er)? [a-zéû]* [0-9]*)')
-                    if guessed_date >= self.not_before:
-                        pages_to_parse.append(raa_link['url'])
+            ):
+                if Attrap.guess_date(month_page['name'], '(.*)').replace(day=1) >= self.not_before.replace(day=1):
+                    month_pages.append(month_page['url'])
 
-        # On parse les pages contenant des RAA
-        elements = []
-        for page in pages_to_parse:
-            page_content = self.get_page(page, 'get').content
-            for raa in self.get_raa_elements(page_content):
-                elements.append(raa)
+        # On récupère les RAA en suivant la navigation de chaque page de mois
+        elements = self.get_raa_with_pager(
+            month_pages,
+            'a.fr-pagination__link--next.fr-pagination__link--lg-label',
+            self.__HOST
+        )
 
         self.parse_raa(elements, keywords)
         self.mailer()
@@ -85,7 +63,7 @@ class Attrap_pref64(Attrap):
         soup = BeautifulSoup(page_content, 'html.parser')
 
         # On récupère chaque balise a
-        for a in soup.select('a.fr-link.fr-link--download'):
+        for a in soup.select('div.fr-card__body div.fr-card__content h2.fr-card__title a.fr-card__link.menu-item-link'):
             if a.get('href') and a['href'].endswith('.pdf'):
                 if a['href'].startswith('/'):
                     url = f"{self.__HOST}{a['href']}"
@@ -93,8 +71,8 @@ class Attrap_pref64(Attrap):
                     url = a['href']
 
                 url = unquote(url)
-                name = a.find('span').previous_sibling.replace('Télécharger ', '').strip()
-                date = datetime.datetime.strptime(a.find('span').get_text().split(' - ')[-1].strip(), '%d/%m/%Y')
+                name = a.get_text().strip()
+                date = datetime.datetime.strptime(a['title'].split(' - ')[-1].strip(), '%d/%m/%Y')
 
                 raa = Attrap.RAA(url, date, name)
                 elements.append(raa)
-- 
GitLab