importation d'événement mobilizon unique

Fix #394
This commit is contained in:
Jean-Marie Favreau 2025-04-27 12:15:15 +02:00
parent 53ce6ad29f
commit e38d31edc8
3 changed files with 120 additions and 86 deletions

View File

@ -24,19 +24,19 @@ from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), mobilizon.CExtractor())
url = "https://mobilizon.fr/@attac63/events?"
url_human = "https://mobilizon.fr/@attac63/events"
url = "https://keskonfai.fr/events/166fca9c-e758-437c-8002-9a55d822e34d"
url_human = "https://keskonfai.fr/events/166fca9c-e758-437c-8002-9a55d822e34d"
try:
events = u2e.process(
url,
url_human,
cache="cache-attac63.html",
cache="cache-single-event-mobilizon.html",
default_values={},
published=True,
)
exportfile = "events-attac63.json"
exportfile = "events-single-event-mobilizon.json"
print("Saving events to file {}".format(exportfile))
with open(exportfile, "w") as f:
json.dump(events, f, indent=4, default=str)

View File

@ -315,6 +315,7 @@ class Extractor(ABC):
from .generic_extractors.ical import ICALExtractor
from .custom_extractors.associations_cf import CExtractor as AssociationsCF
from .generic_extractors.helloasso import CExtractor as HelloAssoExtractor
from .generic_extractors.mobilizon import CExtractor as MobilizonExtractor
if single_event:
return [
@ -323,6 +324,7 @@ class Extractor(ABC):
AssociationsCF,
ICALExtractor,
HelloAssoExtractor,
MobilizonExtractor,
EventNotFoundExtractor,
]
else:

View File

@ -13,10 +13,50 @@ logger = logging.getLogger(__name__)
# A class dedicated to get events from Mobilizon
class CExtractor(Extractor):
event_params = """
id,
title,
url,
beginsOn,
endsOn,
options {
showStartTime,
showEndTime,
timezone
},
attributedTo {
avatar {
url,
}
name,
preferredUsername,
},
description,
onlineAddress,
physicalAddress {
locality,
description,
region
},
tags {
title,
id,
slug
},
picture {
url
},
status
"""
def __init__(self):
super().__init__()
self.no_downloader = True
def is_known_url(url, include_links=True):
u = urlparse(url)
return u.netloc in ["keskonfai.fr", "mobilizon.fr"]
# Source code adapted from https://framagit.org/Marc-AntoineA/mobilizon-client-python
def _request(self, body, data):
headers = {}
@ -57,49 +97,20 @@ query($preferredUsername: String!, $afterDatetime: DateTime) {
def _oncoming_events(self):
def _oncoming_events_page(page):
query = """
query = (
"""
query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
group(preferredUsername: $preferredUsername) {
organizedEvents(afterDatetime: $afterDatetime, page: $page) {
elements {
id,
title,
url,
beginsOn,
endsOn,
options {
showStartTime,
showEndTime,
timezone
},
attributedTo {
avatar {
url,
}
name,
preferredUsername,
},
description,
onlineAddress,
physicalAddress {
locality,
description,
region
},
tags {
title,
id,
slug
},
picture {
url
},
status
elements {"""
+ CExtractor.event_params
+ """
}
}
}
}
"""
)
today = datetime.now(timezone.utc).isoformat()
data = {
@ -119,6 +130,68 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
page += 1
return events
def _get_event(self):
query = (
"query GetEvent($uuid: UUID!) { event(uuid: $uuid) {"
+ CExtractor.event_params
+ "}}"
)
data = {
"uuid": self._uuid_event,
}
r = self._request(query, data)
return r["event"]
def add_mobilizon_event(self, e, default_values, published):
title = e["title"]
event_url = e["url"]
if "picture" in e and e["picture"] is not None:
image = e["picture"]["url"]
else:
image = None
location = (
e["physicalAddress"]["description"]
+ ", "
+ e["physicalAddress"]["locality"]
)
soup = BeautifulSoup(e["description"], "html.parser")
description = soup.get_text(separator="\n")
start = (
dateutil.parser.isoparse(e["beginsOn"])
.replace(tzinfo=timezone.utc)
.astimezone(tz=None)
)
end = (
dateutil.parser.isoparse(e["endsOn"])
.replace(tzinfo=timezone.utc)
.astimezone(tz=None)
)
start_day = start.date()
start_time = start.time() if e["options"]["showStartTime"] else None
end_day = end.date()
end_time = end.time() if e["options"]["showEndTime"] else None
self.add_event(
default_values,
title,
None,
start_day,
location,
description,
[],
uuids=[event_url],
recurrences=None,
url_human=event_url,
start_time=start_time,
published=published,
image=image,
end_day=end_day,
end_time=end_time,
)
def extract(
self,
content,
@ -145,52 +218,11 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
events = self._oncoming_events()
for e in events:
title = e["title"]
event_url = e["url"]
if "picture" in e and e["picture"] is not None:
image = e["picture"]["url"]
else:
image = None
location = (
e["physicalAddress"]["description"]
+ ", "
+ e["physicalAddress"]["locality"]
)
soup = BeautifulSoup(e["description"], "html.parser")
description = soup.get_text(separator="\n")
start = (
dateutil.parser.isoparse(e["beginsOn"])
.replace(tzinfo=timezone.utc)
.astimezone(tz=None)
)
end = (
dateutil.parser.isoparse(e["endsOn"])
.replace(tzinfo=timezone.utc)
.astimezone(tz=None)
)
start_day = start.date()
start_time = start.time() if e["options"]["showStartTime"] else None
end_day = end.date()
end_time = end.time() if e["options"]["showEndTime"] else None
self.add_event(
default_values,
title,
None,
start_day,
location,
description,
[],
uuids=[event_url],
recurrences=None,
url_human=event_url,
start_time=start_time,
published=published,
image=image,
end_day=end_day,
end_time=end_time,
)
self.add_mobilizon_event(e, default_values, published)
elif "events" in url:
self._api_end_point = "https://" + urlparse(url).netloc + "/api"
self._uuid_event = url.split("/")[-1]
event = self._get_event()
self.add_mobilizon_event(event, default_values, published)
return self.get_structure()