importation d'événement mobilizon unique

Fix #394
This commit is contained in:
Jean-Marie Favreau 2025-04-27 12:15:15 +02:00
parent 53ce6ad29f
commit e38d31edc8
3 changed files with 120 additions and 86 deletions

View File

@ -24,19 +24,19 @@ from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__": if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), mobilizon.CExtractor()) u2e = URL2Events(SimpleDownloader(), mobilizon.CExtractor())
url = "https://mobilizon.fr/@attac63/events?" url = "https://keskonfai.fr/events/166fca9c-e758-437c-8002-9a55d822e34d"
url_human = "https://mobilizon.fr/@attac63/events" url_human = "https://keskonfai.fr/events/166fca9c-e758-437c-8002-9a55d822e34d"
try: try:
events = u2e.process( events = u2e.process(
url, url,
url_human, url_human,
cache="cache-attac63.html", cache="cache-single-event-mobilizon.html",
default_values={}, default_values={},
published=True, published=True,
) )
exportfile = "events-attac63.json" exportfile = "events-single-event-mobilizon.json"
print("Saving events to file {}".format(exportfile)) print("Saving events to file {}".format(exportfile))
with open(exportfile, "w") as f: with open(exportfile, "w") as f:
json.dump(events, f, indent=4, default=str) json.dump(events, f, indent=4, default=str)

View File

@ -315,6 +315,7 @@ class Extractor(ABC):
from .generic_extractors.ical import ICALExtractor from .generic_extractors.ical import ICALExtractor
from .custom_extractors.associations_cf import CExtractor as AssociationsCF from .custom_extractors.associations_cf import CExtractor as AssociationsCF
from .generic_extractors.helloasso import CExtractor as HelloAssoExtractor from .generic_extractors.helloasso import CExtractor as HelloAssoExtractor
from .generic_extractors.mobilizon import CExtractor as MobilizonExtractor
if single_event: if single_event:
return [ return [
@ -323,6 +324,7 @@ class Extractor(ABC):
AssociationsCF, AssociationsCF,
ICALExtractor, ICALExtractor,
HelloAssoExtractor, HelloAssoExtractor,
MobilizonExtractor,
EventNotFoundExtractor, EventNotFoundExtractor,
] ]
else: else:

View File

@ -13,10 +13,50 @@ logger = logging.getLogger(__name__)
# A class dedicated to get events from Mobilizon # A class dedicated to get events from Mobilizon
class CExtractor(Extractor): class CExtractor(Extractor):
event_params = """
id,
title,
url,
beginsOn,
endsOn,
options {
showStartTime,
showEndTime,
timezone
},
attributedTo {
avatar {
url,
}
name,
preferredUsername,
},
description,
onlineAddress,
physicalAddress {
locality,
description,
region
},
tags {
title,
id,
slug
},
picture {
url
},
status
"""
def __init__(self): def __init__(self):
super().__init__() super().__init__()
self.no_downloader = True self.no_downloader = True
def is_known_url(url, include_links=True):
u = urlparse(url)
return u.netloc in ["keskonfai.fr", "mobilizon.fr"]
# Source code adapted from https://framagit.org/Marc-AntoineA/mobilizon-client-python # Source code adapted from https://framagit.org/Marc-AntoineA/mobilizon-client-python
def _request(self, body, data): def _request(self, body, data):
headers = {} headers = {}
@ -57,49 +97,20 @@ query($preferredUsername: String!, $afterDatetime: DateTime) {
def _oncoming_events(self): def _oncoming_events(self):
def _oncoming_events_page(page): def _oncoming_events_page(page):
query = """ query = (
"""
query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) { query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
group(preferredUsername: $preferredUsername) { group(preferredUsername: $preferredUsername) {
organizedEvents(afterDatetime: $afterDatetime, page: $page) { organizedEvents(afterDatetime: $afterDatetime, page: $page) {
elements { elements {"""
id, + CExtractor.event_params
title, + """
url,
beginsOn,
endsOn,
options {
showStartTime,
showEndTime,
timezone
},
attributedTo {
avatar {
url,
}
name,
preferredUsername,
},
description,
onlineAddress,
physicalAddress {
locality,
description,
region
},
tags {
title,
id,
slug
},
picture {
url
},
status
} }
} }
} }
} }
""" """
)
today = datetime.now(timezone.utc).isoformat() today = datetime.now(timezone.utc).isoformat()
data = { data = {
@ -119,6 +130,68 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
page += 1 page += 1
return events return events
def _get_event(self):
query = (
"query GetEvent($uuid: UUID!) { event(uuid: $uuid) {"
+ CExtractor.event_params
+ "}}"
)
data = {
"uuid": self._uuid_event,
}
r = self._request(query, data)
return r["event"]
def add_mobilizon_event(self, e, default_values, published):
title = e["title"]
event_url = e["url"]
if "picture" in e and e["picture"] is not None:
image = e["picture"]["url"]
else:
image = None
location = (
e["physicalAddress"]["description"]
+ ", "
+ e["physicalAddress"]["locality"]
)
soup = BeautifulSoup(e["description"], "html.parser")
description = soup.get_text(separator="\n")
start = (
dateutil.parser.isoparse(e["beginsOn"])
.replace(tzinfo=timezone.utc)
.astimezone(tz=None)
)
end = (
dateutil.parser.isoparse(e["endsOn"])
.replace(tzinfo=timezone.utc)
.astimezone(tz=None)
)
start_day = start.date()
start_time = start.time() if e["options"]["showStartTime"] else None
end_day = end.date()
end_time = end.time() if e["options"]["showEndTime"] else None
self.add_event(
default_values,
title,
None,
start_day,
location,
description,
[],
uuids=[event_url],
recurrences=None,
url_human=event_url,
start_time=start_time,
published=published,
image=image,
end_day=end_day,
end_time=end_time,
)
def extract( def extract(
self, self,
content, content,
@ -145,52 +218,11 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
events = self._oncoming_events() events = self._oncoming_events()
for e in events: for e in events:
title = e["title"] self.add_mobilizon_event(e, default_values, published)
event_url = e["url"] elif "events" in url:
if "picture" in e and e["picture"] is not None: self._api_end_point = "https://" + urlparse(url).netloc + "/api"
image = e["picture"]["url"] self._uuid_event = url.split("/")[-1]
else: event = self._get_event()
image = None self.add_mobilizon_event(event, default_values, published)
location = (
e["physicalAddress"]["description"]
+ ", "
+ e["physicalAddress"]["locality"]
)
soup = BeautifulSoup(e["description"], "html.parser")
description = soup.get_text(separator="\n")
start = (
dateutil.parser.isoparse(e["beginsOn"])
.replace(tzinfo=timezone.utc)
.astimezone(tz=None)
)
end = (
dateutil.parser.isoparse(e["endsOn"])
.replace(tzinfo=timezone.utc)
.astimezone(tz=None)
)
start_day = start.date()
start_time = start.time() if e["options"]["showStartTime"] else None
end_day = end.date()
end_time = end.time() if e["options"]["showEndTime"] else None
self.add_event(
default_values,
title,
None,
start_day,
location,
description,
[],
uuids=[event_url],
recurrences=None,
url_human=event_url,
start_time=start_time,
published=published,
image=image,
end_day=end_day,
end_time=end_time,
)
return self.get_structure() return self.get_structure()