From da99e3ab647ebdc4a0167f923101a8ce76270a3b Mon Sep 17 00:00:00 2001
From: Bastien Le Querrec <blq@laquadrature.net>
Date: Sat, 11 Jan 2025 19:13:25 +0100
Subject: [PATCH] pref44: utilise prefdpt

---
 Attrap_pref44.py | 107 ++++-------------------------------------------
 1 file changed, 7 insertions(+), 100 deletions(-)

diff --git a/Attrap_pref44.py b/Attrap_pref44.py
index 236698d..68fce00 100644
--- a/Attrap_pref44.py
+++ b/Attrap_pref44.py
@@ -1,109 +1,16 @@
-import os
-import datetime
-import logging
+from Attrap_prefdpt import Attrap_prefdpt
 
-from bs4 import BeautifulSoup
-from urllib.parse import unquote
 
-from Attrap import Attrap
+class Attrap_pref44(Attrap_prefdpt):
 
-logger = logging.getLogger(__name__)
-
-
-class Attrap_pref44(Attrap):
-
-    # Config
+    # Configuration de la préfecture
     hostname = 'https://www.loire-atlantique.gouv.fr'
     raa_page = f'{hostname}/Publications/Recueil-des-actes-administratifs-RAA-en-Loire-Atlantique'
-    user_agent = 'Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0'
     full_name = 'Préfecture de la Loire-Atlantique'
     short_code = 'pref44'
     timezone = 'Europe/Paris'
 
-    def __init__(self, data_dir):
-        super().__init__(data_dir, self.user_agent)
-        self.set_sleep_time(30)
-
-    def get_raa(self, keywords):
-        pages_to_parse = []
-
-        # Parfois un RAA est mal catégorisé et se retrouve sur la page racine, donc on la parse
-        pages_to_parse.append(self.raa_page)
-
-        # On détermine quelles pages d'année parser
-        year_pages_to_parse = []
-        page_content = self.get_page(self.raa_page, 'get').content
-        year_pages = self.get_sub_pages(
-            page_content,
-            '.fr-card.fr-card--sm.fr-card--grey.fr-enlarge-link div.fr-card__body div.fr-card__content h2.fr-card__title a',
-            self.hostname,
-            False
-        )
-        for year_page in year_pages:
-            year = 9999
-            try:
-                year = int(year_page['name'].strip())
-                if year is None:
-                    year = 9999
-            except Exception as exc:
-                logger.warning(f"Impossible de deviner l\'année de la page {year_page['name']}")
-                year = 9999
-
-            if year >= self.not_before.year:
-                year_pages_to_parse.append(year_page['url'])
-
-                # Parfois un RAA est mal catégorisé et se retrouve sur la page de l'année, donc on la parse
-                pages_to_parse.append(year_page['url'])
-
-        # Pour chaque année, on cherche les sous-pages de mois
-        month_pages_to_parse = []
-        for year_page in year_pages_to_parse:
-            page_content = self.get_page(year_page, 'get').content
-            month_pages = self.get_sub_pages(
-                page_content,
-                '.fr-card.fr-card--sm.fr-card--grey.fr-enlarge-link div.fr-card__body div.fr-card__content h2.fr-card__title a',
-                self.hostname,
-                False
-            )[::-1]
-
-            for month_page in month_pages:
-                pages_to_parse.append(month_page['url'])
-
-        # On parse les pages sélectionnées
-        elements = self.get_raa_with_pager(
-            pages_to_parse,
-            "ul.fr-pagination__list li a.fr-pagination__link.fr-pagination__link--next.fr-pagination__link--lg-label",
-            self.hostname
-        )[::-1]
-
-        self.parse_raa(elements, keywords)
-        self.mailer()
-
-    def get_raa_elements(self, page_content):
-        elements = []
-
-        # On récupère chaque carte avec un RAA
-        for card in BeautifulSoup(page_content, 'html.parser').select('div.fr-card.fr-card--horizontal div.fr-card__body div.fr-card__content'):
-            # On récupère le lien
-            links = card.select('h2.fr-card__title a.fr-card__link.menu-item-link')
-            # On récupère la date
-            dates_raw = card.select('div.fr-card__end p.fr-card__detail')
-
-            # Si on a toutes les infos, on continue
-            if links and links[0] and dates_raw and dates_raw[0]:
-                a = links[0]
-                date_raw = dates_raw[0]
-
-                if a.get('href') and a['href'].endswith('.pdf'):
-                    if a['href'].startswith('/'):
-                        url = f"{self.hostname}{a['href']}"
-                    else:
-                        url = a['href']
-
-                    url = unquote(url)
-                    name = a.get_text().strip()
-                    date = datetime.datetime.strptime(date_raw.get_text().replace('Publié le', '').strip(), '%d/%m/%Y')
-
-                    raa = Attrap.RAA(url, date, name, timezone=self.timezone)
-                    elements.append(raa)
-        return elements
+    # Configuration des widgets à analyser
+    Attrap_prefdpt.grey_card['regex']['year'] = '([0-9]{4})'
+    Attrap_prefdpt.grey_card['regex']['month'] = '([A-Za-zéû]* [0-9]{4})'
+    Attrap_prefdpt.grey_card['add_year_to_months'] = True
-- 
GitLab