Compare commits
10 Commits
feat/issue
...
13_export_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
de61752726 | ||
|
|
ce5ff2df63 | ||
|
|
018b7a052f | ||
|
|
663c4cff71 | ||
|
|
8bd4fafb17 | ||
|
|
6795a221da | ||
|
|
1db30afc1a | ||
|
|
fc3b0f8320 | ||
|
|
bd310c7b44 | ||
|
|
7b3aca21f4 |
@@ -1,30 +0,0 @@
|
||||
name: Nouvelle source
|
||||
about: Proposer une nouvelle source
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Merci de proposer une nouvelle source :)
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Cette issue a été créée par un **template** d'issue.
|
||||
visible: [content]
|
||||
- type: input
|
||||
id: contact
|
||||
attributes:
|
||||
label: Contact
|
||||
description: Comment pouvons-nous vous contacter?
|
||||
placeholder: ex. email@example.com
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: Décrivez la source que vous souhaitez voir supportée.
|
||||
placeholder: ex. J'aimerais voir supporté le site https://example.com
|
||||
value: |
|
||||
J'aimerais voir supporté le site ...
|
||||
validations:
|
||||
required: true
|
||||
@@ -1,43 +0,0 @@
|
||||
name: Lint and Format
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
pull_request:
|
||||
branches:
|
||||
- dev
|
||||
|
||||
env:
|
||||
SRC_DIRS: "src experimentations"
|
||||
|
||||
jobs:
|
||||
lint-format:
|
||||
name: Run Linter and Formatter
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run Ruff Fix
|
||||
uses: chartboost/ruff-action@v1
|
||||
with:
|
||||
src: ${{ env.SRC_DIRS }}
|
||||
args: --fix
|
||||
|
||||
- name: Run Ruff Check
|
||||
uses: chartboost/ruff-action@v1
|
||||
with:
|
||||
src: ${{ env.SRC_DIRS }}
|
||||
|
||||
- name: Run Black Formatting
|
||||
uses: psf/black@stable
|
||||
with:
|
||||
src: ${{ env.SRC_DIRS }}
|
||||
|
||||
- name: Run Black Check
|
||||
uses: psf/black@stable
|
||||
with:
|
||||
src: ${{ env.SRC_DIRS }}
|
||||
options: --check
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -3,7 +3,6 @@ src/staticfiles/
|
||||
src/mediafiles/
|
||||
src/static/
|
||||
src/media
|
||||
src/fixtures
|
||||
|
||||
test-compose.yml
|
||||
|
||||
@@ -88,6 +87,3 @@ letsencrypt/
|
||||
experimentations/cache/
|
||||
experimentations/cache-augustes.ical
|
||||
experimentations/events-augustes.json
|
||||
|
||||
# MacOS
|
||||
.DS_Store
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
default_language_version:
|
||||
python: python3.13
|
||||
repos:
|
||||
# Using this mirror lets us use mypyc-compiled black, which is about 2x faster
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 25.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
43
Makefile
43
Makefile
@@ -6,7 +6,6 @@ PROJECTNAME ?= agenda_culturel
|
||||
APP_NAME := $(PROJECTNAME)
|
||||
BACKEND_APP_NAME := $(APP_NAME)-backend
|
||||
|
||||
DOCKERCOMPOSE=$(shell if command -v docker-compose 2>&1 >/dev/null; then echo "docker-compose"; else echo "docker compose"; fi)
|
||||
define HELP
|
||||
|
||||
Manage $(PROJECTNAME). Usage:
|
||||
@@ -21,7 +20,6 @@ make build-dev Build and run dev environment
|
||||
make stop-dev Stop dev environment
|
||||
make stop-prod Stop prod environment
|
||||
make build-prod Build and run prod environment
|
||||
make restar-prod Restart prod environment
|
||||
make all Show help
|
||||
|
||||
endef
|
||||
@@ -32,7 +30,7 @@ help:
|
||||
@echo "$$HELP"
|
||||
|
||||
lint:
|
||||
@bash ./scripts/lint.sh
|
||||
@bash ./scripts/lint.sh
|
||||
|
||||
format:
|
||||
@bash ./scripts/format.sh
|
||||
@@ -56,51 +54,26 @@ create-categories:
|
||||
docker exec -it $(BACKEND_APP_NAME) $(SHELL) "-c" \
|
||||
"python3 manage.py runscript create_categories"
|
||||
|
||||
create-reference-locations:
|
||||
docker exec -it $(BACKEND_APP_NAME) $(SHELL) "-c" \
|
||||
"python3 manage.py runscript create_reference_locations"
|
||||
|
||||
cleanup-unused-media:
|
||||
docker exec -it $(BACKEND_APP_NAME) $(SHELL) "-c" \
|
||||
"python3 manage.py cleanup_unused_media"
|
||||
|
||||
make-translations:
|
||||
docker exec -it $(BACKEND_APP_NAME) $(SHELL) "-c" \
|
||||
"python3 manage.py makemessages --locale fr"
|
||||
|
||||
build-translations:
|
||||
docker exec -it $(BACKEND_APP_NAME) $(SHELL) "-c" \
|
||||
"python3 manage.py compilemessages"
|
||||
|
||||
build-dev:
|
||||
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 $(DOCKERCOMPOSE) -f docker-compose.yml up --build -d
|
||||
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 docker-compose -f docker-compose.yml up --build -d
|
||||
|
||||
build-dev-log:
|
||||
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 $(DOCKERCOMPOSE) -f docker-compose.yml up --build
|
||||
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 docker-compose -f docker-compose.yml up --build
|
||||
|
||||
build-prod:
|
||||
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 $(DOCKERCOMPOSE) -f docker-compose.prod.yml up --build -d
|
||||
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 docker-compose -f docker-compose.prod.yml up --build -d
|
||||
|
||||
build-prod-log:
|
||||
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 $(DOCKERCOMPOSE) -f docker-compose.prod.yml up --build
|
||||
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 docker-compose -f docker-compose.prod.yml up --build
|
||||
|
||||
stop-dev:
|
||||
@$(DOCKERCOMPOSE) -f docker-compose.yml down
|
||||
@docker-compose -f docker-compose.yml down
|
||||
|
||||
stop-prod:
|
||||
@$(DOCKERCOMPOSE) -f docker-compose.prod.yml up
|
||||
|
||||
up-dev: #use up-dev to attach a running environment and have an access to the logs
|
||||
@$(DOCKERCOMPOSE) -f docker-compose.yml up
|
||||
|
||||
up-prod:
|
||||
@$(DOCKERCOMPOSE) -f docker-compose.prod.yml down
|
||||
@docker-compose -f docker-compose.prod.yml down
|
||||
|
||||
restart-backend-prod:
|
||||
$(DOCKERCOMPOSE) -f docker-compose.prod.yml restart backend
|
||||
|
||||
prod-restart:
|
||||
DOCKER_BUILDKIT=1 COMPOSE_DOCKER_CLI_BUILD=1 $(DOCKERCOMPOSE) -f docker-compose.prod.yml restart
|
||||
docker-compose -f docker-compose.prod.yml restart backend
|
||||
|
||||
all: help
|
||||
|
||||
|
||||
52
README.md
52
README.md
@@ -1,18 +1,9 @@
|
||||
# Agenda culturel
|
||||
|
||||
L'agenda culturel est un projet django créé à partir de [Django Docker Quickstard](https://github.com/godd0t/django-docker-quickstart/) pour faciliter son développement et déploiement. Il est distribué sous licence AGPL.
|
||||
L'agenda culturel est un projet django créé à partir de [Django Docker Quickstard](https://github.com/godd0t/django-docker-quickstart/) pour faciliter son développemnt et déploiement. Il est distribué sous licence AGPL.
|
||||
|
||||
Une instance de démonstration est disponible à l'adresse https://pommesdelune.fr/.
|
||||
|
||||
Parmi les outils et ressources sur lesquelles s'appuie l'agenda culturel, on peut lister:
|
||||
|
||||
* [Django](https://www.djangoproject.com/)
|
||||
* [redis](https://redis.io/)
|
||||
* [celery](https://docs.celeryq.dev/en/stable/)
|
||||
* [Selenium](https://www.selenium.dev/)
|
||||
* [Feather icons](https://feathericons.com/)
|
||||
* [Pico CSS](https://picocss.com/)
|
||||
|
||||
## Installation
|
||||
|
||||
Pour installer une version de développement, reportez-vous à la documentation de [Django Docker Quickstard](https://github.com/godd0t/django-docker-quickstart/). En résumé:
|
||||
@@ -24,51 +15,14 @@ On peut aussi peupler les catégories avec un choix de catégories élémentaire
|
||||
|
||||
* ```make create-categories```
|
||||
|
||||
On peut aussi peupler les positions de référence qui serviront aux recherches géographiques avec la commande, après avoir éventuellement modifié le fichier [communes.json](./src/scripts/communes.json) qui contient pour l'exemple toutes les communes récupérées depuis [public.opendatasoft.com](https://public.opendatasoft.com/explore/dataset/georef-france-commune/export/?flg=fr-fr&disjunctive.reg_name&disjunctive.dep_name&disjunctive.arrdep_name&disjunctive.ze2020_name&disjunctive.bv2022_name&disjunctive.epci_name&disjunctive.ept_name&disjunctive.com_name&disjunctive.ze2010_name&disjunctive.com_is_mountain_area&sort=year&refine.dep_name=Puy-de-D%C3%B4me&location=9,45.51597,3.05969&basemap=jawg.light):
|
||||
|
||||
* ```make create-reference-locations```
|
||||
|
||||
## Utilisation d'un proxy socket
|
||||
|
||||
On peut activer à la main (pour l'instant) un proxy type socket pour l'import d'événements.
|
||||
|
||||
* se connecter au docker du celery worker : ```docker exec -it agenda_culturel-celery-worker bash```
|
||||
* mettre à jour les dépôts ```apt update```
|
||||
* installer le client ssh ```apt install ssh-client```
|
||||
* créer un socket ssh ```ssh -D 12345 USER@HOST```
|
||||
* modifier le drapeau proxy dans le constructeur de [downloader.py](src/agenda_culturel/import_tasks/downloader.py).
|
||||
|
||||
## Notes aux développeurs
|
||||
|
||||
Pour bénéficier de la vérification du code avant commit, installer ```pre-commit``` avec la commande ```pre-commit install``` pour bénéficier du hook préparé dans ```.pre-commit-config.yaml```.
|
||||
|
||||
### Ajout d'une nouvelle source *custom*
|
||||
|
||||
Pour ajouter une nouvelle source custom:
|
||||
- ajouter un fichier dans ```src/agenda_culturel/import_tasks/custom_extractors``` (ou ```src/agenda_culturel/import_tasks/generic_extractors``` s'il s'agit d'un format de source qui est réutilisable) en s'inspirant des autres fichiers présents. Il existe de nombreuses facilités dans les classes mères correspondantes
|
||||
- ajouter un fichier dans ```src/agenda_culturel/import_tasks/custom_extractors``` en s'inspirant des autres fichiers présents. Il existe de nombreuses facilités dans les classes mères correspondantes
|
||||
- s'inspirer des scripts présents dans ```experimentations/``` pour créer son propre script de test
|
||||
- quand l'import fonctionne de manière indépendante dans ces expérimentations, il est tant de l'ajouter au site internet:
|
||||
- ajouter à la classe ```RecurrentImport.PROCESSOR``` présente dans le fichier ```src/agenda_culturel/models.py``` une entrée correspondant à cette source pour qu'elle soit proposée aux utilisateurs
|
||||
- ajouter à la fonction ```run_recurrent_import``` présente dans le fichier ```src/agenda_culturel/celery.py``` le test correspondant à cet ajout, pour lancer le bon extracteur
|
||||
- se rendre sur le site, page administration, et ajouter un import récurrent correspondant à cette nouvelle source
|
||||
|
||||
### Récupérer un dump du prod sur un serveur dev
|
||||
|
||||
* sur le serveur de prod:
|
||||
* ```docker exec -i agenda_culturel-backend python3 manage.py dumpdata --natural-foreign --natural-primary --format=json --exclude=admin.logentry --indent=2 > fixtures/postgres-backup-20241101.json``` (à noter qu'ici on oublie les comptes, qu'il faudra recréer)
|
||||
* sur le serveur de dev:
|
||||
* On récupère le dump json ```scp $SERVEUR:$PATH/fixtures/postgres-backup-20241101.json src/fixtures/```
|
||||
* ```scripts/reset-database.sh FIXTURE COMMIT``` où ```FIXTURE``` est le timestamp dans le nom de la fixture, et ```COMMIT``` est l'ID du commit git correspondant à celle en prod sur le serveur au moment de la création de la fixture
|
||||
|
||||
On peut ensuite modifier le mot de passe de l'utilisateur root qui a tous les droits:
|
||||
|
||||
* ```docker exec -it agenda_culturel-backend python3 manage.py changepassword root```
|
||||
|
||||
À noter que les images ne sont pas récupérées. Les images des catégories peuvent être récupérées par la commande:
|
||||
|
||||
* ```cp src/agenda_culturel/migrations/images/* src/media/```
|
||||
|
||||
### Complètement réinitialiser une instance
|
||||
|
||||
* ```docker compose down --rmi all --volumes```
|
||||
* ```make build-dev```
|
||||
- se rendre sur le site, page administration, et ajouter un import récurrent correspondant à cette nouvelle source
|
||||
@@ -5,11 +5,10 @@ WORKDIR /usr/src/app
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends -y build-essential libpq-dev gettext chromium-driver gdal-bin fonts-symbola \
|
||||
apt-get install --no-install-recommends -y build-essential libpq-dev gettext chromium-driver \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
|
||||
COPY src/requirements.txt ./requirements.txt
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
|
||||
@@ -5,9 +5,6 @@ http {
|
||||
default_type application/octet-stream;
|
||||
client_max_body_size 100M;
|
||||
|
||||
gzip on;
|
||||
gzip_types text/plain text/css text/javascript;
|
||||
|
||||
|
||||
upstream backend {
|
||||
server backend:8000;
|
||||
@@ -35,9 +32,5 @@ http {
|
||||
error_page 502 /static/html/500.html;
|
||||
error_page 503 /static/html/500.html;
|
||||
|
||||
if ($http_user_agent ~* "Amazonbot|meta-externalagent|ClaudeBot|ahrefsbot|semrushbot") {
|
||||
return 444;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ services:
|
||||
command: [ "/bin/bash", "/app/deployment/scripts/wait-db.sh", "/app/deployment/scripts/backend/start.sh" ]
|
||||
|
||||
db:
|
||||
image: postgis/postgis:15-3.4-alpine
|
||||
image: postgres:15.2-alpine
|
||||
container_name: "${APP_NAME}-db"
|
||||
hostname: "${POSTGRES_HOST:-db}"
|
||||
volumes:
|
||||
@@ -42,7 +42,6 @@ services:
|
||||
redis:
|
||||
container_name: "${APP_NAME}-redis"
|
||||
image: redis:latest
|
||||
command: ["redis-server", "--appendonly", "no", "--maxmemory", "500mb", "--maxmemory-policy", "allkeys-lru"]
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ services:
|
||||
command: [ "/bin/bash", "/app/deployment/scripts/backend/start.sh" ]
|
||||
|
||||
db:
|
||||
image: postgis/postgis:15-3.4-alpine
|
||||
image: postgres:15.2-alpine
|
||||
container_name: "${APP_NAME}-db"
|
||||
hostname: "${POSTGRES_HOST:-db}"
|
||||
volumes:
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), amisdutempsdescerises.CExtractor())
|
||||
url = "https://amisdutempsdescerises.org/page.php"
|
||||
url_human = "https://amisdutempsdescerises.org/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-amiscerices.xml", default_values = {"category": "Rencontres & Débats"}, published = True)
|
||||
|
||||
exportfile = "events-amiscerices.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -1,41 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), arachnee.CExtractor())
|
||||
url = "https://www.arachnee-concerts.com/wp-admin/admin-ajax.php?action=movies-filter&per_page=9999&date=NaN.NaN.NaN&theatres=Clermont-Fd&cat=&sorting=&list_all_events=¤t_page="
|
||||
url_human = "https://www.arachnee-concerts.com/agenda-des-concerts/Clermont-Fd/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-arachnee.html", default_values = {}, published = True)
|
||||
|
||||
exportfile = "events-arachnee.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), billetterie_cf.CExtractor())
|
||||
url = "https://billetterie-c3c.clermont-ferrand.fr/"
|
||||
url_human = "https://billetterie-c3c.clermont-ferrand.fr/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-c3c.html", default_values = {"location": "La Cour des 3 Coquins"}, published = True)
|
||||
|
||||
exportfile = "events-c3c.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -16,12 +16,11 @@ parent = os.path.dirname(current)
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.generic_extractors.fbevent import *
|
||||
from src.agenda_culturel.import_tasks.extractor_facebook import *
|
||||
|
||||
|
||||
|
||||
@@ -29,8 +28,8 @@ from src.agenda_culturel.import_tasks.generic_extractors.fbevent import *
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), CExtractor())
|
||||
url="https://www.facebook.com/events/3575802569389184/3575802576055850/?active_tab=about"
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), FacebookEventExtractor(single_event=True))
|
||||
url="https://www.facebook.com/events/872781744074648"
|
||||
|
||||
events = u2e.process(url, cache = "fb.html", published = True)
|
||||
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.generic_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), fbevents.CExtractor())
|
||||
url = "https://www.facebook.com/laJeteeClermont/upcoming_hosted_events"
|
||||
url_human = "https://www.facebook.com/laJeteeClermont/upcoming_hosted_events"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-lajetee-fb.html", default_values = {"location": "La Jetée"}, published = True)
|
||||
|
||||
exportfile = "events-lajetee-fb.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), billetterie_cf.CExtractor())
|
||||
url = "https://billetterie-gds.clermont-ferrand.fr/"
|
||||
url_human = "https://billetterie-gds.clermont-ferrand.fr/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-gds.html", default_values = {}, published = True)
|
||||
|
||||
exportfile = "events-gds.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -16,12 +16,11 @@ parent = os.path.dirname(current)
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.generic_extractors.ical import *
|
||||
from src.agenda_culturel.import_tasks.extractor_ical import *
|
||||
|
||||
|
||||
|
||||
@@ -33,7 +32,7 @@ if __name__ == "__main__":
|
||||
url = "https://calendar.google.com/calendar/ical/programmation.lesaugustes%40gmail.com/public/basic.ics"
|
||||
url_human = "https://www.cafelesaugustes.fr/la-programmation/"
|
||||
|
||||
events = u2e.process(url, url_human, cache = "cache-augustes.ical", default_values = {"category": "Sans catégorie", "location": "Café lecture les Augustes"}, published = True)
|
||||
events = u2e.process(url, url_human, cache = "cache-augustes.ical", default_values = {"category": "Autre", "location": "Café lecture les Augustes"}, published = True)
|
||||
|
||||
exportfile = "events-augustes.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), laraymonde.CExtractor())
|
||||
url = "https://www.raymondbar.net/"
|
||||
url_human = "https://www.raymondbar.net/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-la-raymonde.html", default_values = {"location": "La Raymonde", "category": "Fêtes & Concerts"}, published = True)
|
||||
|
||||
exportfile = "events-la-raymonde.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -16,7 +16,6 @@ parent = os.path.dirname(current)
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
@@ -30,8 +29,8 @@ from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), lacomedie.CExtractor())
|
||||
url = "https://lacomediedeclermont.com/saison24-25/wp-admin/admin-ajax.php?action=load_dates_existantes"
|
||||
url_human = "https://lacomediedeclermont.com/saison24-25/"
|
||||
url = "https://lacomediedeclermont.com/saison23-24/wp-admin/admin-ajax.php?action=load_dates_existantes"
|
||||
url_human = "https://lacomediedeclermont.com/saison23-24/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-lacomedie.html", default_values = {"location": "La Comédie de Clermont"}, published = True)
|
||||
|
||||
@@ -16,7 +16,6 @@ parent = os.path.dirname(current)
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
@@ -34,7 +33,7 @@ if __name__ == "__main__":
|
||||
url_human = "https://www.lacoope.org/concerts-calendrier/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-lacoope.html", default_values = {"category": "Fêtes & Concerts", "location": "La Coopérative"}, published = True)
|
||||
events = u2e.process(url, url_human, cache = "cache-lacoope.html", default_values = {"category": "Concert", "location": "La Coopérative"}, published = True)
|
||||
|
||||
exportfile = "events-lacoope.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
|
||||
@@ -16,7 +16,6 @@ parent = os.path.dirname(current)
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
@@ -30,8 +29,8 @@ from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), lapucealoreille.CExtractor())
|
||||
url = "https://www.lapucealoreille63.fr/agenda"
|
||||
url_human = "https://www.lapucealoreille63.fr/agenda"
|
||||
url = "https://www.lapucealoreille63.fr/programmation/"
|
||||
url_human = "https://www.lapucealoreille63.fr/programmation/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-lapucealoreille.xml", default_values = {}, published = True)
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), wordpress_mec.CExtractor())
|
||||
url = "https://www.cabaretlepoulailler.fr/agenda/tout-lagenda/"
|
||||
url_human = "https://www.cabaretlepoulailler.fr/agenda/tout-lagenda/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-le-poulailler.html", default_values = {"location": "Le Poulailler"}, published = True)
|
||||
|
||||
exportfile = "events-le-poulailler.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), lerio.CExtractor())
|
||||
url = "https://www.cinemalerio.com/evenements/"
|
||||
url_human = "https://www.cinemalerio.com/evenements/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-le-rio.html", default_values = {"location": "Cinéma le Rio", "category": "Cinéma"}, published = True)
|
||||
|
||||
exportfile = "events-le-roi.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -16,7 +16,6 @@ parent = os.path.dirname(current)
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.generic_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), wordpress_mec.CExtractor())
|
||||
url = "https://www.lesvinzelles.com/index.php/programme/"
|
||||
url_human = "https://www.lesvinzelles.com/index.php/programme/"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-les-vinzelles.html", default_values = {"location": "Les Vinzelles"}, published = True)
|
||||
|
||||
exportfile = "events-les-vinzelles.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.generic_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), iguana_agenda.CExtractor())
|
||||
url = "https://bibliotheques-clermontmetropole.eu/iguana/Service.PubContainer.cls?uuid=a4a1f992-06da-4ff4-9176-4af0a095c7d1"
|
||||
url_human = "https://bibliotheques-clermontmetropole.eu/iguana/www.main.cls?surl=AGENDA_Tout%20lagenda"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-mediatheques.html", default_values = {}, published = True)
|
||||
|
||||
exportfile = "events-mediatheques.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.custom_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), mille_formes.CExtractor())
|
||||
url = "https://www.milleformes.fr/programme"
|
||||
url_human = "https://www.milleformes.fr/programme"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-1000formes.html", default_values = {}, published = True)
|
||||
|
||||
exportfile = "events-1000formes.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.generic_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), mobilizon.CExtractor())
|
||||
url = "https://mobilizon.fr/@attac63/events?"
|
||||
url_human = "https://mobilizon.fr/@attac63/events"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-attac63.html", default_values = {}, published = True)
|
||||
|
||||
exportfile = "events-attac63.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
sys.path.append(parent + "/src")
|
||||
|
||||
from src.agenda_culturel.import_tasks.downloader import *
|
||||
from src.agenda_culturel.import_tasks.extractor import *
|
||||
from src.agenda_culturel.import_tasks.importer import *
|
||||
from src.agenda_culturel.import_tasks.generic_extractors import *
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
u2e = URL2Events(SimpleDownloader(), apidae_tourisme.CExtractor())
|
||||
url = "https://widgets.apidae-tourisme.com/filter.js?widget[id]=48"
|
||||
url_human = "https://ens.puy-de-dome.fr/agenda.html"
|
||||
|
||||
try:
|
||||
events = u2e.process(url, url_human, cache = "cache-puydedome.html", default_values = {}, published = True)
|
||||
|
||||
exportfile = "events-puydedome.json"
|
||||
print("Saving events to file {}".format(exportfile))
|
||||
with open(exportfile, "w") as f:
|
||||
json.dump(events, f, indent=4, default=str)
|
||||
except Exception as e:
|
||||
print("Exception: " + str(e))
|
||||
@@ -1,94 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
|
||||
FIXTURE=$1
|
||||
COMMIT=$2
|
||||
FORCE=$3
|
||||
|
||||
help() {
|
||||
echo "USAGE: scripts/reset-database.sh [FIXTURE] [COMMIT]"
|
||||
echo " "
|
||||
echo "Parameters:"
|
||||
echo " FIXTURE A timestamp used in fixture name"
|
||||
echo " COMMIT A commit ID used by git checkout"
|
||||
echo " "
|
||||
echo "Example:"
|
||||
echo " scripts/reset-database.sh 20241110 cb69ece6ca5ba04e94dcc2758f53869c70224592"
|
||||
}
|
||||
|
||||
bold=$(tput bold)
|
||||
normal=$(tput sgr0)
|
||||
echobold() {
|
||||
echo "${bold}$1${normal}"
|
||||
}
|
||||
|
||||
if ! [ -n "$FORCE" ]; then
|
||||
nginx=`docker ps|grep nginx`
|
||||
if [ -n "$nginx" ]; then
|
||||
echo "WARNING: this script is probably run on a production server. Use a third parameter if you really want to run it."
|
||||
exit 3
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! [ -n "$FIXTURE" ]; then
|
||||
echo "No fixture defined. Abort."
|
||||
help
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [ -n "$COMMIT" ]; then
|
||||
echo "No commit version defined. Abort."
|
||||
help
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
FFILE=fixtures/postgres-backup-$FIXTURE.json
|
||||
|
||||
if ! [ -f "src/$FFILE" ]; then
|
||||
echo "ERROR: missing fixture file ($FFILE)"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
echo " "
|
||||
echobold "WARNING: use Ctrl+C to stop the reset process since a 'no' answer cannot be detected."
|
||||
echo " "
|
||||
|
||||
# remove all elements in database
|
||||
echobold "Flush database"
|
||||
docker exec -i agenda_culturel-backend python3 manage.py flush
|
||||
|
||||
# move back database structure to the original
|
||||
echobold "Setup database structure to zero"
|
||||
docker exec -i agenda_culturel-backend python3 manage.py migrate agenda_culturel zero
|
||||
|
||||
# reset code depending on a specific commit
|
||||
echobold "Move back to the desired commit"
|
||||
git checkout $COMMIT
|
||||
|
||||
# change database to reach this specific version
|
||||
echobold "Setup database stucture according to the selected commit"
|
||||
docker exec -i agenda_culturel-backend python3 manage.py migrate agenda_culturel
|
||||
|
||||
# remove all elements in database
|
||||
echobold "Flush database"
|
||||
docker exec -i agenda_culturel-backend python3 manage.py flush --no-input
|
||||
|
||||
# import data
|
||||
echobold "Import data"
|
||||
docker exec -i agenda_culturel-backend python3 manage.py loaddata --format=json $FFILE
|
||||
|
||||
# reset code to uptodate version
|
||||
echobold "Move back to last commit"
|
||||
git checkout main
|
||||
|
||||
# update database structure
|
||||
echobold "Update database"
|
||||
docker exec -i agenda_culturel-backend python3 manage.py migrate agenda_culturel
|
||||
|
||||
# copy media files
|
||||
echobold "Copy media files"
|
||||
cp src/agenda_culturel/migrations/images/* src/media/
|
||||
@@ -3,15 +3,11 @@ from django import forms
|
||||
from .models import (
|
||||
Event,
|
||||
Category,
|
||||
Tag,
|
||||
StaticContent,
|
||||
DuplicatedEvents,
|
||||
BatchImportation,
|
||||
RecurrentImport,
|
||||
Place,
|
||||
Message,
|
||||
ReferenceLocation,
|
||||
Organisation,
|
||||
)
|
||||
from django_better_admin_arrayfield.admin.mixins import DynamicArrayMixin
|
||||
from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget
|
||||
@@ -19,15 +15,11 @@ from django_better_admin_arrayfield.models.fields import DynamicArrayField
|
||||
|
||||
|
||||
admin.site.register(Category)
|
||||
admin.site.register(Tag)
|
||||
admin.site.register(StaticContent)
|
||||
admin.site.register(DuplicatedEvents)
|
||||
admin.site.register(BatchImportation)
|
||||
admin.site.register(RecurrentImport)
|
||||
admin.site.register(Place)
|
||||
admin.site.register(Message)
|
||||
admin.site.register(ReferenceLocation)
|
||||
admin.site.register(Organisation)
|
||||
|
||||
|
||||
class URLWidget(DynamicArrayWidget):
|
||||
|
||||
@@ -1,18 +1,11 @@
|
||||
from datetime import datetime, timedelta, date, time
|
||||
import calendar
|
||||
from django.db.models import Q, F
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.template.defaultfilters import date as _date
|
||||
from django.http import Http404
|
||||
|
||||
from django.db.models import CharField
|
||||
from django.db.models.functions import Lower
|
||||
|
||||
import logging
|
||||
|
||||
CharField.register_lookup(Lower)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -27,8 +20,7 @@ def daterange(start, end, step=timedelta(1)):
|
||||
|
||||
|
||||
class DayInCalendar:
|
||||
midnight = time(0, 0, 0)
|
||||
cats = None
|
||||
midnight = time(23, 59, 59)
|
||||
|
||||
def __init__(self, d, on_requested_interval=True):
|
||||
self.date = d
|
||||
@@ -37,13 +29,10 @@ class DayInCalendar:
|
||||
|
||||
self.in_past = d < now
|
||||
self.today = d == now
|
||||
self.tomorrow = d == now + timedelta(days=+1)
|
||||
self.events = []
|
||||
self.on_requested_interval = on_requested_interval
|
||||
|
||||
self.events_by_category = {}
|
||||
self.time_intervals = None
|
||||
self.id = d.strftime("%Y-%m-%d")
|
||||
|
||||
def is_in_past(self):
|
||||
return self.in_past
|
||||
@@ -51,9 +40,6 @@ class DayInCalendar:
|
||||
def is_today(self):
|
||||
return self.today
|
||||
|
||||
def is_tomorrow(self):
|
||||
return self.tomorrow
|
||||
|
||||
def is_ancestor_uuid_event_from_other(self, event):
|
||||
for e in self.events:
|
||||
if event.is_ancestor_by_uuid(e):
|
||||
@@ -88,170 +74,30 @@ class DayInCalendar:
|
||||
self._add_event_internal(event)
|
||||
|
||||
def _add_event_internal(self, event):
|
||||
from .models import Category
|
||||
from copy import copy
|
||||
|
||||
# copy event
|
||||
local_event = copy(event)
|
||||
|
||||
# set values
|
||||
if local_event.start_day != self.date:
|
||||
local_event.start_day = self.date
|
||||
local_event.start_time = None
|
||||
if local_event.end_day != self.date:
|
||||
local_event.end_day = None
|
||||
local_event.end_time = None
|
||||
|
||||
# add event to the day
|
||||
self.events.append(local_event)
|
||||
|
||||
# add in its category
|
||||
if local_event.category is None:
|
||||
cat = Category.default_name
|
||||
self.events.append(event)
|
||||
if event.category is None:
|
||||
if "" not in self.events_by_category:
|
||||
self.events_by_category[""] = []
|
||||
self.events_by_category[""].append(event)
|
||||
else:
|
||||
cat = local_event.category.name
|
||||
if cat not in self.events_by_category:
|
||||
self.events_by_category[cat] = []
|
||||
self.events_by_category[cat].append(local_event)
|
||||
if event.category.name not in self.events_by_category:
|
||||
self.events_by_category[event.category.name] = []
|
||||
self.events_by_category[event.category.name].append(event)
|
||||
|
||||
def filter_events(self):
|
||||
self.events.sort(
|
||||
key=lambda e: (
|
||||
DayInCalendar.midnight if e.start_time is None else e.start_time
|
||||
)
|
||||
key=lambda e: DayInCalendar.midnight
|
||||
if e.start_time is None
|
||||
else e.start_time
|
||||
)
|
||||
self.today_night = False
|
||||
if self.is_today():
|
||||
self.today_night = True
|
||||
now = timezone.now()
|
||||
nday = now.date()
|
||||
ntime = now.time()
|
||||
found = False
|
||||
for idx, e in enumerate(self.events):
|
||||
if (nday < e.start_day) or (
|
||||
nday == e.start_day and e.start_time and ntime <= e.start_time
|
||||
):
|
||||
self.events[idx].is_first_after_now = True
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
self.today_night = False
|
||||
|
||||
def is_today_after_events(self):
|
||||
return self.is_today() and self.today_night
|
||||
|
||||
def events_by_category_ordered(self):
|
||||
from .models import Category
|
||||
|
||||
if DayInCalendar.cats is None:
|
||||
DayInCalendar.cats = Category.objects.order_by("position")
|
||||
result = []
|
||||
for c in DayInCalendar.cats:
|
||||
if c.name in self.events_by_category:
|
||||
result.append((c.name, self.events_by_category[c.name]))
|
||||
return result
|
||||
|
||||
def build_time_intervals(
|
||||
self,
|
||||
all_day_name,
|
||||
all_day_short_name,
|
||||
interval_names,
|
||||
interval_short_names,
|
||||
interval_markers,
|
||||
):
|
||||
self.time_intervals = [
|
||||
IntervalInDay(self.date, i, n[0], n[1])
|
||||
for i, n in enumerate(
|
||||
zip(
|
||||
[all_day_name] + interval_names,
|
||||
[all_day_short_name] + interval_short_names,
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
for e in self.events:
|
||||
if e.start_time is None:
|
||||
self.time_intervals[0].add_event(e)
|
||||
else:
|
||||
dt = datetime.combine(e.start_day, e.start_time)
|
||||
ok = False
|
||||
for i in range(len(interval_markers)):
|
||||
if dt < interval_markers[i]:
|
||||
self.time_intervals[i + 1].add_event(e)
|
||||
ok = True
|
||||
break
|
||||
if not ok:
|
||||
self.time_intervals[-1].add_event(e)
|
||||
|
||||
def get_time_intervals(self):
|
||||
if self.time_intervals is None:
|
||||
if self.is_today():
|
||||
all_day_name = _("All day today")
|
||||
interval_names = [
|
||||
_("This morning"),
|
||||
_("This noon"),
|
||||
_("This afternoon"),
|
||||
_("This evening"),
|
||||
]
|
||||
elif self.is_tomorrow():
|
||||
name = _("Tomorrow")
|
||||
all_day_name = _("All day tomorrow")
|
||||
interval_names = [
|
||||
_("%s morning") % name,
|
||||
_("%s noon") % name,
|
||||
_("%s afternoon") % name,
|
||||
_("%s evening") % name,
|
||||
]
|
||||
else:
|
||||
name = _date(self.date, "l")
|
||||
all_day_name = _("All day %s") % name
|
||||
interval_names = [
|
||||
_("%s morning") % name,
|
||||
_("%s noon") % name,
|
||||
_("%s afternoon") % name,
|
||||
_("%s evening") % name,
|
||||
]
|
||||
all_day_short_name = _("All day")
|
||||
interval_short_names = [
|
||||
_("Morning"),
|
||||
_("Noon"),
|
||||
_("Afternoon"),
|
||||
_("Evening"),
|
||||
]
|
||||
interval_markers = [
|
||||
datetime.combine(self.date, time(h, m))
|
||||
for h, m in [(11, 30), (13, 0), (18, 0)]
|
||||
]
|
||||
self.build_time_intervals(
|
||||
all_day_name,
|
||||
all_day_short_name,
|
||||
interval_names,
|
||||
interval_short_names,
|
||||
interval_markers,
|
||||
)
|
||||
|
||||
return self.time_intervals
|
||||
|
||||
|
||||
class IntervalInDay(DayInCalendar):
|
||||
|
||||
def __init__(self, d, id, name, short_name):
|
||||
self.name = name
|
||||
self.short_name = short_name
|
||||
super().__init__(d)
|
||||
self.id = self.id + "-" + str(id)
|
||||
|
||||
|
||||
class CalendarList:
|
||||
def __init__(
|
||||
self, firstdate, lastdate, filter=None, exact=False, ignore_dup=None, qs=None
|
||||
):
|
||||
def __init__(self, firstdate, lastdate, filter=None, exact=False):
|
||||
self.firstdate = firstdate
|
||||
self.lastdate = lastdate
|
||||
self.now = date.today()
|
||||
self.filter = filter
|
||||
self.ignore_dup = ignore_dup
|
||||
self.qs = qs
|
||||
|
||||
if exact:
|
||||
self.c_firstdate = self.firstdate
|
||||
@@ -262,12 +108,6 @@ class CalendarList:
|
||||
# end the last day of the last week
|
||||
self.c_lastdate = lastdate + timedelta(days=6 - lastdate.weekday())
|
||||
|
||||
self.calendar_days = None
|
||||
|
||||
def is_today(self):
|
||||
return self.firstdate == date.today()
|
||||
|
||||
def build_internal(self):
|
||||
# create a list of DayInCalendars
|
||||
self.create_calendar_days()
|
||||
|
||||
@@ -278,12 +118,6 @@ class CalendarList:
|
||||
for i, c in self.calendar_days.items():
|
||||
c.filter_events()
|
||||
|
||||
def get_calendar_days(self):
|
||||
if self.calendar_days is None:
|
||||
self.build_internal()
|
||||
|
||||
return self.calendar_days
|
||||
|
||||
def today_in_calendar(self):
|
||||
return self.firstdate <= self.now and self.lastdate >= self.now
|
||||
|
||||
@@ -292,67 +126,23 @@ class CalendarList:
|
||||
|
||||
def fill_calendar_days(self):
|
||||
if self.filter is None:
|
||||
if self.qs is None:
|
||||
from .models import Event
|
||||
from .models import Event
|
||||
|
||||
qs = Event.objects.all()
|
||||
else:
|
||||
qs = self.qs
|
||||
qs = Event.objects.all()
|
||||
else:
|
||||
qs = self.filter.qs
|
||||
|
||||
if self.ignore_dup:
|
||||
qs = qs.exclude(other_versions=self.ignore_dup)
|
||||
startdatetime = timezone.make_aware(
|
||||
datetime.combine(self.c_firstdate, time.min),
|
||||
timezone.get_default_timezone(),
|
||||
)
|
||||
lastdatetime = timezone.make_aware(
|
||||
datetime.combine(self.c_lastdate, time.max), timezone.get_default_timezone()
|
||||
)
|
||||
qs = (
|
||||
qs.filter(
|
||||
(
|
||||
Q(recurrences__isnull=False)
|
||||
& (
|
||||
Q(recurrence_dtend__isnull=True)
|
||||
& Q(recurrence_dtstart__isnull=False)
|
||||
& Q(recurrence_dtstart__lte=lastdatetime)
|
||||
)
|
||||
| (
|
||||
Q(recurrence_dtend__isnull=False)
|
||||
& ~(
|
||||
Q(recurrence_dtstart__gt=lastdatetime)
|
||||
| Q(recurrence_dtend__lt=startdatetime)
|
||||
)
|
||||
)
|
||||
)
|
||||
| (
|
||||
Q(
|
||||
start_day__lte=self.c_lastdate
|
||||
) # start before the end of the desired period
|
||||
& (
|
||||
(
|
||||
Q(end_day__isnull=True) & Q(start_day__gte=self.c_firstdate)
|
||||
) # end after the begining of desired period
|
||||
| (Q(end_day__isnull=False) & Q(end_day__gte=self.c_firstdate))
|
||||
)
|
||||
startdatetime = datetime.combine(self.c_firstdate, time.min)
|
||||
lastdatetime = datetime.combine(self.c_lastdate, time.max)
|
||||
self.events = qs.filter(
|
||||
(Q(recurrence_dtend__isnull=True) & Q(recurrence_dtstart__lte=lastdatetime))
|
||||
| (
|
||||
Q(recurrence_dtend__isnull=False)
|
||||
& ~(
|
||||
Q(recurrence_dtstart__gt=lastdatetime)
|
||||
| Q(recurrence_dtend__lt=startdatetime)
|
||||
)
|
||||
)
|
||||
.filter(
|
||||
Q(other_versions__isnull=True)
|
||||
| Q(other_versions__representative=F("pk"))
|
||||
| Q(other_versions__representative__isnull=True)
|
||||
)
|
||||
.order_by("start_time", "title__unaccent__lower")
|
||||
)
|
||||
|
||||
qs = (
|
||||
qs.select_related("category")
|
||||
.select_related("other_versions")
|
||||
.select_related("other_versions__representative")
|
||||
)
|
||||
self.events = qs
|
||||
).order_by("start_time")
|
||||
|
||||
firstdate = datetime.fromordinal(self.c_firstdate.toordinal())
|
||||
if firstdate.tzinfo is None or firstdate.tzinfo.utcoffset(firstdate) is None:
|
||||
@@ -363,21 +153,9 @@ class CalendarList:
|
||||
lastdate = timezone.make_aware(lastdate, timezone.get_default_timezone())
|
||||
|
||||
for e in self.events:
|
||||
if e.recurrences is None:
|
||||
if e.end_day is None:
|
||||
if e.start_day >= self.firstdate and e.start_day <= self.lastdate:
|
||||
self.calendar_days[e.start_day.__str__()].add_event(e)
|
||||
else:
|
||||
for d in daterange(
|
||||
max(e.start_day, self.firstdate), min(e.end_day, self.lastdate)
|
||||
):
|
||||
self.calendar_days[d.__str__()].add_event(e)
|
||||
else:
|
||||
for e_rec in e.get_recurrences_between(firstdate, lastdate):
|
||||
end = e_rec.start_day if e_rec.end_day is None else e_rec.end_day
|
||||
for d in daterange(
|
||||
max(e_rec.start_day, self.firstdate), min(end, self.lastdate)
|
||||
):
|
||||
for e_rec in e.get_recurrences_between(firstdate, lastdate):
|
||||
for d in daterange(e_rec.start_day, e_rec.end_day):
|
||||
if d.__str__() in self.calendar_days:
|
||||
self.calendar_days[d.__str__()].add_event(e_rec)
|
||||
|
||||
def create_calendar_days(self):
|
||||
@@ -395,37 +173,14 @@ class CalendarList:
|
||||
return hasattr(self, "month")
|
||||
|
||||
def calendar_days_list(self):
|
||||
return list(self.get_calendar_days().values())
|
||||
return list(self.calendar_days.values())
|
||||
|
||||
def nb_days(self):
|
||||
return (self.c_lastdate - self.c_firstdate).days + 1
|
||||
|
||||
def time_intervals_list(self, onlyfirst=False):
|
||||
ds = self.calendar_days_list()
|
||||
result = []
|
||||
for d in ds:
|
||||
tis = d.get_time_intervals()
|
||||
for t in tis:
|
||||
if len(t.events) > 0:
|
||||
result.append(t)
|
||||
if onlyfirst:
|
||||
break
|
||||
return result
|
||||
|
||||
def time_intervals_list_first(self):
|
||||
return self.time_intervals_list(True)
|
||||
|
||||
def export_to_ics(self, request):
|
||||
from .models import Event
|
||||
|
||||
events = [
|
||||
event for day in self.get_calendar_days().values() for event in day.events
|
||||
]
|
||||
return Event.export_to_ics(events, request)
|
||||
def get_events(self):
|
||||
return [event for jour in self.calendar_days_list() for event in jour.events]
|
||||
|
||||
|
||||
class CalendarMonth(CalendarList):
|
||||
def __init__(self, year, month, filter, qs=None):
|
||||
def __init__(self, year, month, filter):
|
||||
self.year = year
|
||||
self.month = month
|
||||
r = calendar.monthrange(year, month)
|
||||
@@ -433,7 +188,7 @@ class CalendarMonth(CalendarList):
|
||||
first = date(year, month, 1)
|
||||
last = date(year, month, r[1])
|
||||
|
||||
super().__init__(first, last, filter, qs)
|
||||
super().__init__(first, last, filter)
|
||||
|
||||
def get_month_name(self):
|
||||
return self.firstdate.strftime("%B")
|
||||
@@ -446,17 +201,14 @@ class CalendarMonth(CalendarList):
|
||||
|
||||
|
||||
class CalendarWeek(CalendarList):
|
||||
def __init__(self, year, week, filter, qs=None):
|
||||
def __init__(self, year, week, filter):
|
||||
self.year = year
|
||||
self.week = week
|
||||
|
||||
try:
|
||||
first = date.fromisocalendar(self.year, self.week, 1)
|
||||
last = date.fromisocalendar(self.year, self.week, 7)
|
||||
except Exception:
|
||||
raise Http404()
|
||||
first = date.fromisocalendar(self.year, self.week, 1)
|
||||
last = date.fromisocalendar(self.year, self.week, 7)
|
||||
|
||||
super().__init__(first, last, filter, qs)
|
||||
super().__init__(first, last, filter)
|
||||
|
||||
def next_week(self):
|
||||
return self.firstdate + timedelta(days=7)
|
||||
@@ -466,8 +218,8 @@ class CalendarWeek(CalendarList):
|
||||
|
||||
|
||||
class CalendarDay(CalendarList):
|
||||
def __init__(self, date, filter=None, qs=None):
|
||||
super().__init__(date, date, filter=filter, qs=qs, exact=True)
|
||||
def __init__(self, date, filter=None):
|
||||
super().__init__(date, date, filter, exact=True)
|
||||
|
||||
def get_events(self):
|
||||
return self.calendar_days_list()[0].events
|
||||
|
||||
@@ -1,23 +1,16 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
from celery import Celery, Task, chain
|
||||
from celery import Celery
|
||||
from celery.schedules import crontab
|
||||
from celery.utils.log import get_task_logger
|
||||
from celery.exceptions import MaxRetriesExceededError
|
||||
import time as time_
|
||||
from django.conf import settings
|
||||
from celery.signals import worker_ready
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from .import_tasks.extractor import Extractor
|
||||
from .import_tasks.importer import URL2Events
|
||||
from .import_tasks.downloader import SimpleDownloader, ChromiumHeadlessDownloader
|
||||
from .import_tasks.downloader import *
|
||||
from .import_tasks.extractor import *
|
||||
from .import_tasks.importer import *
|
||||
from .import_tasks.extractor_ical import *
|
||||
from .import_tasks.custom_extractors import *
|
||||
from .import_tasks.generic_extractors import *
|
||||
|
||||
from django.core.cache import cache
|
||||
|
||||
# Set the default Django settings module for the 'celery' program.
|
||||
APP_ENV = os.getenv("APP_ENV", "dev")
|
||||
@@ -25,7 +18,6 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", f"agenda_culturel.settings.{APP_
|
||||
|
||||
app = Celery("agenda_culturel")
|
||||
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
@@ -38,33 +30,6 @@ app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
# Load task modules from all registered Django apps.
|
||||
app.autodiscover_tasks()
|
||||
|
||||
LOCK_EXPIRE = 60 * 10 # Lock expires in 10 minutes
|
||||
|
||||
|
||||
@contextmanager
|
||||
def memcache_chromium_lock(oid):
|
||||
lock_id = "chromium-lock"
|
||||
timeout_at = time_.monotonic() + LOCK_EXPIRE - 3
|
||||
# cache.add fails if the key already exists
|
||||
status = cache.add(lock_id, oid, LOCK_EXPIRE)
|
||||
try:
|
||||
yield status
|
||||
finally:
|
||||
# memcache delete is very slow, but we have to use it to take
|
||||
# advantage of using add() for atomic locking
|
||||
if time_.monotonic() < timeout_at and status:
|
||||
# don't release the lock if we exceeded the timeout
|
||||
# to lessen the chance of releasing an expired lock
|
||||
# owned by someone else
|
||||
# also don't release the lock if we didn't acquire it
|
||||
cache.delete(lock_id)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def free_memecache_chromium_lock():
|
||||
lock_id = "chromium-lock"
|
||||
cache.delete(lock_id)
|
||||
|
||||
|
||||
def close_import_task(taskid, success, error_message, importer):
|
||||
from agenda_culturel.models import BatchImportation
|
||||
@@ -73,15 +38,14 @@ def close_import_task(taskid, success, error_message, importer):
|
||||
task.status = (
|
||||
BatchImportation.STATUS.SUCCESS if success else BatchImportation.STATUS.FAILED
|
||||
)
|
||||
task.nb_initial = 0 if importer is None else importer.get_nb_events()
|
||||
task.nb_imported = 0 if importer is None else importer.get_nb_imported_events()
|
||||
task.nb_updated = 0 if importer is None else importer.get_nb_updated_events()
|
||||
task.nb_removed = 0 if importer is None else importer.get_nb_removed_events()
|
||||
task.nb_initial = importer.get_nb_events()
|
||||
task.nb_imported = importer.get_nb_imported_events()
|
||||
task.nb_updated = importer.get_nb_updated_events()
|
||||
task.nb_removed = importer.get_nb_removed_events()
|
||||
|
||||
fields = ["status", "nb_initial", "nb_updated", "nb_imported", "nb_removed"]
|
||||
if not success:
|
||||
logger.error(error_message)
|
||||
task.error_message = str(error_message)[:512]
|
||||
task.error_message = error_message
|
||||
fields.append("error_message")
|
||||
task.save(update_fields=fields)
|
||||
|
||||
@@ -105,46 +69,41 @@ def import_events_from_json(self, json):
|
||||
|
||||
# finally, close task
|
||||
close_import_task(self.request.id, success, error_message, importer)
|
||||
"""except Exception as e:
|
||||
logger.error(e)
|
||||
close_import_task(self.request.id, False, e, importer)"""
|
||||
|
||||
|
||||
class ChromiumTask(Task):
|
||||
_chm = None
|
||||
|
||||
@property
|
||||
def chromiumDownloader(self):
|
||||
if self._chm is None:
|
||||
self._chm = ChromiumHeadlessDownloader()
|
||||
return self._chm
|
||||
|
||||
def restartDownloader(self):
|
||||
logger.info("Restart selenium")
|
||||
if self._chm is not None:
|
||||
del self._chm
|
||||
self._chm = ChromiumHeadlessDownloader()
|
||||
|
||||
|
||||
def run_recurrent_import_internal(rimport, downloader, req_id):
|
||||
@app.task(bind=True)
|
||||
def run_recurrent_import(self, pk):
|
||||
from agenda_culturel.models import RecurrentImport, BatchImportation
|
||||
from .db_importer import DBImporterEvents
|
||||
|
||||
logger.info("Run recurrent import: {}".format(req_id))
|
||||
logger.info("Run recurrent import: {}".format(self.request.id))
|
||||
|
||||
# get the recurrent import
|
||||
rimport = RecurrentImport.objects.get(pk=pk)
|
||||
|
||||
# create a batch importation
|
||||
importation = BatchImportation(recurrentImport=rimport, celery_id=req_id)
|
||||
importation = BatchImportation(recurrentImport=rimport, celery_id=self.request.id)
|
||||
# save batch importation
|
||||
importation.save()
|
||||
|
||||
# create an importer
|
||||
importer = DBImporterEvents(req_id)
|
||||
importer = DBImporterEvents(self.request.id)
|
||||
|
||||
# prepare downloading and extracting processes
|
||||
downloader = (
|
||||
SimpleDownloader()
|
||||
if rimport.downloader == RecurrentImport.DOWNLOADER.SIMPLE
|
||||
else ChromiumHeadlessDownloader()
|
||||
)
|
||||
if rimport.processor == RecurrentImport.PROCESSOR.ICAL:
|
||||
extractor = ical.ICALExtractor()
|
||||
extractor = ICALExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.ICALNOBUSY:
|
||||
extractor = ical.ICALNoBusyExtractor()
|
||||
extractor = ICALNoBusyExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.ICALNOVC:
|
||||
extractor = ical.ICALNoVCExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.ICALNAIVETZ:
|
||||
extractor = ical.ICALNaiveTimezone()
|
||||
extractor = ICALNoVCExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.LACOOPE:
|
||||
extractor = lacoope.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.LACOMEDIE:
|
||||
@@ -153,34 +112,12 @@ def run_recurrent_import_internal(rimport, downloader, req_id):
|
||||
extractor = lefotomat.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.LAPUCEALOREILLE:
|
||||
extractor = lapucealoreille.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.MECWORDPRESS:
|
||||
extractor = wordpress_mec.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.FBEVENTS:
|
||||
extractor = fbevents.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.BILLETTERIECF:
|
||||
extractor = billetterie_cf.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.ARACHNEE:
|
||||
extractor = arachnee.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.LERIO:
|
||||
extractor = lerio.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.LARAYMONDE:
|
||||
extractor = laraymonde.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.APIDAE:
|
||||
extractor = apidae_tourisme.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.IGUANA:
|
||||
extractor = iguana_agenda.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.MILLEFORMES:
|
||||
extractor = mille_formes.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.AMISCERISES:
|
||||
extractor = amisdutempsdescerises.CExtractor()
|
||||
elif rimport.processor == RecurrentImport.PROCESSOR.MOBILIZON:
|
||||
extractor = mobilizon.CExtractor()
|
||||
else:
|
||||
extractor = None
|
||||
|
||||
if extractor is None:
|
||||
logger.error("Unknown extractor")
|
||||
close_import_task(req_id, False, "Unknown extractor", importer)
|
||||
close_import_task(self.request.id, False, "Unknown extractor", importer)
|
||||
|
||||
# set parameters
|
||||
u2e = URL2Events(downloader, extractor)
|
||||
@@ -190,29 +127,16 @@ def run_recurrent_import_internal(rimport, downloader, req_id):
|
||||
location = rimport.defaultLocation
|
||||
tags = rimport.defaultTags
|
||||
published = rimport.defaultPublished
|
||||
organisers = (
|
||||
[] if rimport.defaultOrganiser is None else [rimport.defaultOrganiser.pk]
|
||||
)
|
||||
|
||||
try:
|
||||
# get events from website
|
||||
events = u2e.process(
|
||||
url,
|
||||
browsable_url,
|
||||
default_values={
|
||||
"category": category,
|
||||
"location": location,
|
||||
"tags": tags,
|
||||
"organisers": organisers,
|
||||
},
|
||||
default_values={"category": category, "location": location, "tags": tags},
|
||||
published=published,
|
||||
)
|
||||
|
||||
# force location if required
|
||||
if rimport.forceLocation and location:
|
||||
for i, e in enumerate(events["events"]):
|
||||
events["events"][i]["location"] = location
|
||||
|
||||
# convert it to json
|
||||
json_events = json.dumps(events, default=str)
|
||||
|
||||
@@ -220,346 +144,55 @@ def run_recurrent_import_internal(rimport, downloader, req_id):
|
||||
success, error_message = importer.import_events(json_events)
|
||||
|
||||
# finally, close task
|
||||
close_import_task(req_id, success, error_message, importer)
|
||||
return True
|
||||
close_import_task(self.request.id, success, error_message, importer)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
close_import_task(req_id, False, e, importer)
|
||||
return False
|
||||
|
||||
|
||||
@app.task(base=ChromiumTask, bind=True)
|
||||
def run_recurrent_import(self, pklist):
|
||||
from agenda_culturel.models import RecurrentImport
|
||||
|
||||
if isinstance(pklist, list):
|
||||
pk = pklist[0]
|
||||
is_list = True
|
||||
else:
|
||||
is_list = False
|
||||
pk = pklist
|
||||
|
||||
# get the recurrent import
|
||||
rimport = RecurrentImport.objects.get(pk=pk)
|
||||
|
||||
# prepare downloading and extracting processes
|
||||
if rimport.downloader == RecurrentImport.DOWNLOADER.SIMPLE:
|
||||
downloader = SimpleDownloader()
|
||||
elif rimport.downloader == RecurrentImport.DOWNLOADER.CHROMIUMHEADLESS:
|
||||
downloader = self.chromiumDownloader
|
||||
downloader.pause = False
|
||||
else:
|
||||
downloader = self.chromiumDownloader
|
||||
downloader.pause = True
|
||||
|
||||
# only one thread using Chromium can run at a time,
|
||||
# to prevent from errors (including strange Facebook errors)
|
||||
if rimport.downloader in [
|
||||
RecurrentImport.DOWNLOADER.CHROMIUMHEADLESS,
|
||||
RecurrentImport.DOWNLOADER.CHROMIUMHEADLESSPAUSE,
|
||||
]:
|
||||
with memcache_chromium_lock(self.app.oid) as acquired:
|
||||
if acquired:
|
||||
valid = run_recurrent_import_internal(
|
||||
rimport, downloader, self.request.id
|
||||
)
|
||||
if not valid:
|
||||
self.restartDownloader()
|
||||
return pklist[1:] if is_list else True
|
||||
else:
|
||||
valid = run_recurrent_import_internal(rimport, downloader, self.request.id)
|
||||
if not valid:
|
||||
self.restartDownloader()
|
||||
return pklist[1:] if is_list else True
|
||||
|
||||
try:
|
||||
# if chromium is locked, we wait before retrying
|
||||
raise self.retry(countdown=120)
|
||||
except MaxRetriesExceededError as e:
|
||||
logger.error(e)
|
||||
close_import_task(self.request.id, False, e, None)
|
||||
return pklist[1:] if is_list else False
|
||||
|
||||
|
||||
def run_recurrent_imports_from_list(pklist):
|
||||
|
||||
tasks = chain(
|
||||
run_recurrent_import.s(pklist) if i == 0 else run_recurrent_import.s()
|
||||
for i in range(len(pklist))
|
||||
)
|
||||
tasks.delay()
|
||||
close_import_task(self.request.id, False, e, importer)
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def daily_imports(self):
|
||||
from agenda_culturel.models import RecurrentImport
|
||||
|
||||
logger.info("Everyday imports")
|
||||
logger.info("Imports quotidiens")
|
||||
imports = RecurrentImport.objects.filter(
|
||||
recurrence=RecurrentImport.RECURRENCE.DAILY
|
||||
).order_by("pk")
|
||||
|
||||
run_recurrent_imports_from_list([imp.pk for imp in imports])
|
||||
|
||||
|
||||
SCREENSHOT_FILE = settings.MEDIA_ROOT + "/screenshot.png"
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def screenshot(self):
|
||||
downloader = ChromiumHeadlessDownloader(noimage=False)
|
||||
downloader.screenshot("https://pommesdelune.fr", SCREENSHOT_FILE)
|
||||
|
||||
|
||||
@worker_ready.connect
|
||||
def at_start(sender, **k):
|
||||
logger.info("Worker is ready")
|
||||
# create screenshot
|
||||
if not os.path.isfile(SCREENSHOT_FILE):
|
||||
logger.info("Init screenshot file")
|
||||
with sender.app.connection() as conn:
|
||||
sender.app.send_task(
|
||||
"agenda_culturel.celery.screenshot", None, connection=conn
|
||||
)
|
||||
else:
|
||||
logger.info("Screenshot file already exists")
|
||||
|
||||
# cancel running tasks
|
||||
from agenda_culturel.models import BatchImportation
|
||||
|
||||
logger.info("Cancel running importation tasks")
|
||||
BatchImportation.objects.filter(status=BatchImportation.STATUS.RUNNING).update(
|
||||
status=BatchImportation.STATUS.CANCELED
|
||||
)
|
||||
|
||||
for imp in imports:
|
||||
run_recurrent_import.delay(imp.pk)
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def run_all_recurrent_imports(self, only_fb=False):
|
||||
def run_all_recurrent_imports(self):
|
||||
from agenda_culturel.models import RecurrentImport
|
||||
|
||||
logger.info("Run all imports")
|
||||
if only_fb:
|
||||
imports = RecurrentImport.objects.filter(
|
||||
processor=RecurrentImport.PROCESSOR.FBEVENTS
|
||||
).order_by("pk")
|
||||
else:
|
||||
imports = RecurrentImport.objects.all().order_by("pk")
|
||||
logger.info("Imports complets")
|
||||
imports = RecurrentImport.objects.all()
|
||||
|
||||
run_recurrent_imports_from_list([imp.pk for imp in imports])
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def run_all_recurrent_imports_failed(self):
|
||||
from agenda_culturel.models import RecurrentImport, BatchImportation
|
||||
|
||||
logger.info("Run only failed imports")
|
||||
imports = RecurrentImport.objects.all().order_by("pk")
|
||||
imports = [(imp.pk, imp.last_import()) for imp in imports]
|
||||
|
||||
run_recurrent_imports_from_list(
|
||||
[
|
||||
imp[0]
|
||||
for imp in imports
|
||||
if (imp[1] is not None) and imp[1].status == BatchImportation.STATUS.FAILED
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def run_all_recurrent_imports_canceled(self):
|
||||
from agenda_culturel.models import RecurrentImport, BatchImportation
|
||||
|
||||
logger.info("Run only canceled imports")
|
||||
imports = RecurrentImport.objects.all().order_by("pk")
|
||||
imports = [(imp.pk, imp.last_import()) for imp in imports]
|
||||
|
||||
run_recurrent_imports_from_list(
|
||||
[
|
||||
imp[0]
|
||||
for imp in imports
|
||||
if (imp[1] is not None)
|
||||
and imp[1].status == BatchImportation.STATUS.CANCELED
|
||||
]
|
||||
)
|
||||
for imp in imports:
|
||||
run_recurrent_import.delay(imp.pk)
|
||||
|
||||
|
||||
@app.task(bind=True)
|
||||
def weekly_imports(self):
|
||||
from agenda_culturel.models import RecurrentImport
|
||||
|
||||
logger.info("Weekly imports")
|
||||
logger.info("Imports hebdomadaires")
|
||||
imports = RecurrentImport.objects.filter(
|
||||
recurrence=RecurrentImport.RECURRENCE.WEEKLY
|
||||
).order_by("pk")
|
||||
|
||||
run_recurrent_imports_from_list([imp.pk for imp in imports])
|
||||
|
||||
|
||||
@app.task(base=ChromiumTask, bind=True)
|
||||
def import_events_from_url(
|
||||
self,
|
||||
urls,
|
||||
cat=None,
|
||||
tags=None,
|
||||
force=False,
|
||||
user_id=None,
|
||||
email=None,
|
||||
comments=None,
|
||||
):
|
||||
from .db_importer import DBImporterEvents
|
||||
from agenda_culturel.models import BatchImportation
|
||||
from agenda_culturel.models import Event
|
||||
|
||||
if isinstance(urls, list):
|
||||
url = urls[0]
|
||||
is_list = True
|
||||
else:
|
||||
is_list = False
|
||||
url = urls
|
||||
|
||||
with memcache_chromium_lock(self.app.oid) as acquired:
|
||||
if acquired:
|
||||
|
||||
logger.info(
|
||||
"URL import: {}".format(self.request.id) + " force " + str(force)
|
||||
)
|
||||
|
||||
# clean url
|
||||
url = Extractor.clean_url(url)
|
||||
|
||||
# we check if the url is known
|
||||
existing = None if force else Event.objects.filter(uuids__contains=[url])
|
||||
# if it's unknown
|
||||
if force or len(existing) == 0:
|
||||
|
||||
# create an importer
|
||||
importer = DBImporterEvents(self.request.id)
|
||||
|
||||
# create a batch importation
|
||||
importation = BatchImportation(
|
||||
url_source=url, celery_id=self.request.id
|
||||
)
|
||||
# save batch importation
|
||||
importation.save()
|
||||
|
||||
try:
|
||||
## create loader
|
||||
u2e = URL2Events(ChromiumHeadlessDownloader(), single_event=True)
|
||||
# set default values
|
||||
values = {}
|
||||
if cat is not None:
|
||||
values["category"] = cat
|
||||
if tags is not None:
|
||||
values["tags"] = tags
|
||||
if email is not None:
|
||||
values["email"] = email
|
||||
if comments is not None:
|
||||
values["comments"] = comments
|
||||
|
||||
# get event
|
||||
events = u2e.process(url, published=False, default_values=values)
|
||||
|
||||
if events:
|
||||
# convert it to json
|
||||
json_events = json.dumps(events, default=str)
|
||||
|
||||
# import events (from json)
|
||||
success, error_message = importer.import_events(
|
||||
json_events, user_id
|
||||
)
|
||||
|
||||
# finally, close task
|
||||
close_import_task(
|
||||
self.request.id, success, error_message, importer
|
||||
)
|
||||
else:
|
||||
close_import_task(
|
||||
self.request.id, False, "Cannot find any event", importer
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
close_import_task(self.request.id, False, e, importer)
|
||||
|
||||
return urls[1:] if is_list else True
|
||||
|
||||
# if chromium is locked, we wait 30 seconds before retrying
|
||||
raise self.retry(countdown=30)
|
||||
|
||||
|
||||
@app.task(base=ChromiumTask, bind=True)
|
||||
def import_events_from_urls(
|
||||
self, urls_cat_tags, user_id=None, email=None, comments=None
|
||||
):
|
||||
for ucat in urls_cat_tags:
|
||||
if ucat is not None:
|
||||
url = ucat[0]
|
||||
cat = ucat[1]
|
||||
tags = ucat[2]
|
||||
|
||||
import_events_from_url.delay(
|
||||
url, cat, tags, user_id=user_id, email=email, comments=comments
|
||||
)
|
||||
|
||||
|
||||
@app.task(base=ChromiumTask, bind=True)
|
||||
def update_orphan_pure_import_events(self):
|
||||
from agenda_culturel.models import RecurrentImport
|
||||
from agenda_culturel.models import Event
|
||||
from django.db.models import Q, F
|
||||
|
||||
# get all recurrent sources
|
||||
srcs = RecurrentImport.objects.all().values_list("source")
|
||||
today = date.today()
|
||||
# get all events in future with a source and not related to a recurrent import
|
||||
urls = (
|
||||
Event.objects.filter(Q(start_day__gte=today))
|
||||
.filter(
|
||||
(
|
||||
Q(import_sources__isnull=False)
|
||||
& (
|
||||
Q(modified_date__isnull=True)
|
||||
| Q(modified_date__lte=F("imported_date"))
|
||||
)
|
||||
)
|
||||
& ~Q(import_sources__overlap=srcs)
|
||||
)
|
||||
.values_list("import_sources", flat=True)
|
||||
)
|
||||
# get urls
|
||||
urls = [url_l[0] for url_l in urls if len(url_l) > 0]
|
||||
|
||||
# run tasks as a chain
|
||||
tasks = chain(
|
||||
(
|
||||
import_events_from_url.s(urls, force=True)
|
||||
if i == 0
|
||||
else import_events_from_url.s(force=True)
|
||||
)
|
||||
for i in range(len(urls))
|
||||
)
|
||||
tasks.delay()
|
||||
for imp in imports:
|
||||
run_recurrent_import.delay(imp.pk)
|
||||
|
||||
|
||||
app.conf.beat_schedule = {
|
||||
"daily_orphans_update": {
|
||||
"task": "agenda_culturel.celery.update_orphan_pure_import_events",
|
||||
# Update of orphan events at 2:22 a.m.
|
||||
"schedule": crontab(hour=2, minute=22),
|
||||
},
|
||||
"daily_imports": {
|
||||
"task": "agenda_culturel.celery.daily_imports",
|
||||
# Daily imports at 3:14 a.m.
|
||||
"schedule": crontab(hour=3, minute=14),
|
||||
},
|
||||
"daily_import_failed": {
|
||||
"task": "agenda_culturel.celery.run_all_recurrent_imports_failed",
|
||||
# As a workaround for FB restrictions
|
||||
"schedule": crontab(hour=5, minute=5),
|
||||
},
|
||||
"daily_screenshot": {
|
||||
"task": "agenda_culturel.celery.screenshot",
|
||||
"schedule": crontab(hour=3, minute=3),
|
||||
},
|
||||
"weekly_imports": {
|
||||
"task": "agenda_culturel.celery.weekly_imports",
|
||||
# Daily imports on Mondays at 2:22 a.m.
|
||||
|
||||
@@ -11,7 +11,6 @@ class DBImporterEvents:
|
||||
def __init__(self, celery_id):
|
||||
self.celery_id = celery_id
|
||||
self.error_message = ""
|
||||
self.user_id = None
|
||||
self.init_result_properties()
|
||||
self.today = timezone.now().date().isoformat()
|
||||
|
||||
@@ -35,18 +34,15 @@ class DBImporterEvents:
|
||||
def get_nb_removed_events(self):
|
||||
return self.nb_removed
|
||||
|
||||
def import_events(self, json_structure, user_id=None):
|
||||
def import_events(self, json_structure):
|
||||
print(json_structure)
|
||||
self.init_result_properties()
|
||||
self.user_id = user_id
|
||||
|
||||
try:
|
||||
structure = json.loads(json_structure)
|
||||
except Exception:
|
||||
except:
|
||||
return (False, "JSON file is not correctly structured")
|
||||
|
||||
if len(structure) == 0:
|
||||
return (True, "")
|
||||
|
||||
if "header" not in structure:
|
||||
return (False, "JSON is not correctly structured: missing header")
|
||||
if "events" not in structure:
|
||||
@@ -72,10 +68,6 @@ class DBImporterEvents:
|
||||
# conversion to Event, and return an error if it failed
|
||||
if not self.load_event(event):
|
||||
return (False, self.error_message)
|
||||
else:
|
||||
logger.warning(
|
||||
"Event in the past, will not be imported: {}".format(event)
|
||||
)
|
||||
|
||||
# finally save the loaded events in database
|
||||
self.save_imported()
|
||||
@@ -100,9 +92,7 @@ class DBImporterEvents:
|
||||
|
||||
def save_imported(self):
|
||||
self.db_event_objects, self.nb_updated, self.nb_removed = Event.import_events(
|
||||
self.event_objects,
|
||||
remove_missing_from_source=self.url,
|
||||
user_id=self.user_id,
|
||||
self.event_objects, remove_missing_from_source=self.url
|
||||
)
|
||||
|
||||
def is_valid_event_structure(self, event):
|
||||
@@ -120,7 +110,7 @@ class DBImporterEvents:
|
||||
|
||||
def load_event(self, event):
|
||||
if self.is_valid_event_structure(event):
|
||||
logger.info(
|
||||
logger.warning(
|
||||
"Valid event: {} {}".format(event["last_modified"], event["title"])
|
||||
)
|
||||
event_obj = Event.from_structure(event, self.url)
|
||||
|
||||
@@ -1,633 +0,0 @@
|
||||
import django_filters
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django import forms
|
||||
from django.contrib.postgres.search import SearchQuery, SearchHeadline
|
||||
from django.db.models import Q, F
|
||||
from datetime import date, timedelta
|
||||
from urllib.parse import urlparse, parse_qs, urlencode
|
||||
|
||||
from django.http import QueryDict
|
||||
from django.contrib.gis.measure import D
|
||||
|
||||
|
||||
from .models import (
|
||||
ReferenceLocation,
|
||||
RecurrentImport,
|
||||
Tag,
|
||||
Event,
|
||||
Category,
|
||||
Message,
|
||||
DuplicatedEvents,
|
||||
)
|
||||
|
||||
|
||||
class EventFilter(django_filters.FilterSet):
|
||||
RECURRENT_CHOICES = [
|
||||
("remove_recurrent", "Masquer les événements récurrents"),
|
||||
("only_recurrent", "Montrer uniquement les événements récurrents"),
|
||||
]
|
||||
|
||||
DISTANCE_CHOICES = [5, 10, 15, 30]
|
||||
|
||||
position = django_filters.ModelChoiceFilter(
|
||||
label="À proximité de",
|
||||
method="no_filter",
|
||||
empty_label=_("Select a location"),
|
||||
to_field_name="slug",
|
||||
queryset=ReferenceLocation.objects.filter(main__gt=0).order_by(
|
||||
"-main", "name__unaccent"
|
||||
),
|
||||
)
|
||||
|
||||
radius = django_filters.ChoiceFilter(
|
||||
label="Dans un rayon de",
|
||||
method="no_filter",
|
||||
choices=[(x, str(x) + " km") for x in DISTANCE_CHOICES],
|
||||
null_label=None,
|
||||
empty_label=None,
|
||||
)
|
||||
|
||||
exclude_tags = django_filters.MultipleChoiceFilter(
|
||||
label="Exclure les étiquettes",
|
||||
choices=[],
|
||||
lookup_expr="icontains",
|
||||
field_name="tags",
|
||||
exclude=True,
|
||||
widget=forms.SelectMultiple,
|
||||
)
|
||||
|
||||
tags = django_filters.MultipleChoiceFilter(
|
||||
label="Inclure les étiquettes",
|
||||
choices=[],
|
||||
lookup_expr="icontains",
|
||||
conjoined=True,
|
||||
field_name="tags",
|
||||
widget=forms.SelectMultiple,
|
||||
)
|
||||
|
||||
recurrences = django_filters.ChoiceFilter(
|
||||
label="Inclure la récurrence",
|
||||
choices=RECURRENT_CHOICES,
|
||||
method="filter_recurrences",
|
||||
)
|
||||
|
||||
status = django_filters.MultipleChoiceFilter(
|
||||
label="Filtrer par status",
|
||||
choices=Event.STATUS.choices,
|
||||
field_name="status",
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Event
|
||||
fields = ["tags", "exclude_tags", "status", "recurrences"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# urls were using pk, now we moved to slug
|
||||
if len(args) > 0 and "position" in args[0] and args[0]["position"].isdigit():
|
||||
args[0]._mutable = True
|
||||
el = (
|
||||
ReferenceLocation.objects.filter(pk=int(args[0]["position"]))
|
||||
.values("slug")
|
||||
.first()
|
||||
)
|
||||
args[0]["position"] = None if el is None else el["slug"]
|
||||
args[0]._mutable = False
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
if not kwargs["request"].user.is_authenticated:
|
||||
self.form.fields.pop("status")
|
||||
self.form.fields["exclude_tags"].choices = Tag.get_tag_groups(
|
||||
exclude=True, nb_suggestions=0
|
||||
)
|
||||
self.form.fields["tags"].choices = Tag.get_tag_groups(include=True)
|
||||
|
||||
def has_category_parameters(self):
|
||||
url = self.request.get_full_path()
|
||||
return "category=" in url and not url.startswith("/cat:")
|
||||
|
||||
def get_new_url(self):
|
||||
url = self.request.get_full_path()
|
||||
if url.startswith("/cat:"):
|
||||
return url
|
||||
else:
|
||||
parsed_url = urlparse(url)
|
||||
params = parse_qs(parsed_url.query)
|
||||
if len(params["category"]) == 0:
|
||||
return url
|
||||
else:
|
||||
cat_id = params["category"][0]
|
||||
del params["category"]
|
||||
url = parsed_url._replace(query=urlencode(params, doseq=True)).geturl()
|
||||
if cat_id.isdigit():
|
||||
category = Category.objects.filter(pk=cat_id).first()
|
||||
if category is None:
|
||||
return url
|
||||
else:
|
||||
return "/cat:" + category.slug + url
|
||||
else:
|
||||
return url
|
||||
|
||||
def filter_recurrences(self, queryset, name, value):
|
||||
# construct the full lookup expression
|
||||
lookup = "__".join([name, "isnull"])
|
||||
return queryset.filter(**{lookup: value == "remove_recurrent"})
|
||||
|
||||
def no_filter(self, queryset, name, value):
|
||||
return queryset
|
||||
|
||||
@property
|
||||
def qs(self):
|
||||
parent = super().qs
|
||||
if (
|
||||
self.get_cleaned_data("position") is None
|
||||
or self.get_cleaned_data("radius") is None
|
||||
):
|
||||
return parent
|
||||
d = self.get_cleaned_data("radius")
|
||||
p = self.get_cleaned_data("position")
|
||||
if not isinstance(d, str) or not isinstance(p, ReferenceLocation):
|
||||
return parent
|
||||
try:
|
||||
d = float(d)
|
||||
except ValueError:
|
||||
return parent
|
||||
if d <= 0:
|
||||
return parent
|
||||
|
||||
p = p.location
|
||||
|
||||
return parent.exclude(exact_location=False).filter(
|
||||
exact_location__location__distance_lt=(p, D(km=d))
|
||||
)
|
||||
|
||||
def has_location(self):
|
||||
d = self.get_cleaned_data("radius")
|
||||
p = self.get_cleaned_data("position")
|
||||
if not isinstance(d, str) or not isinstance(p, ReferenceLocation):
|
||||
return False
|
||||
try:
|
||||
d = float(d)
|
||||
except ValueError:
|
||||
return False
|
||||
if d <= 0:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def get_url(self):
|
||||
if isinstance(self.form.data, QueryDict):
|
||||
return self.form.data.urlencode()
|
||||
else:
|
||||
return ""
|
||||
|
||||
def get_full_url(self):
|
||||
return self.request.get_full_path()
|
||||
|
||||
def get_url_without_filters(self):
|
||||
return self.request.get_full_path().split("?")[0]
|
||||
|
||||
def get_cleaned_data(self, name):
|
||||
|
||||
try:
|
||||
return self.form.cleaned_data[name]
|
||||
except AttributeError:
|
||||
return {}
|
||||
except KeyError:
|
||||
return {}
|
||||
|
||||
def get_tags(self):
|
||||
return self.get_cleaned_data("tags")
|
||||
|
||||
def get_exclude_tags(self):
|
||||
return self.get_cleaned_data("exclude_tags")
|
||||
|
||||
def get_status(self):
|
||||
return self.get_cleaned_data("status")
|
||||
|
||||
def get_position(self):
|
||||
return self.get_cleaned_data("position")
|
||||
|
||||
def get_radius(self):
|
||||
return self.get_cleaned_data("radius")
|
||||
|
||||
def to_str(self, prefix=""):
|
||||
self.form.full_clean()
|
||||
result = " ".join(
|
||||
[t for t in self.get_tags()]
|
||||
+ ["~" + t for t in self.get_exclude_tags()]
|
||||
+ [str(self.get_position()), str(self.get_radius())]
|
||||
)
|
||||
if len(result) > 0:
|
||||
result = prefix + result
|
||||
return result
|
||||
|
||||
def get_status_names(self):
|
||||
if "status" in self.form.cleaned_data:
|
||||
return [
|
||||
dict(Event.STATUS.choices)[s] for s in self.get_cleaned_data("status")
|
||||
]
|
||||
else:
|
||||
return []
|
||||
|
||||
def get_recurrence_filtering(self):
|
||||
if "recurrences" in self.form.cleaned_data:
|
||||
d = dict(self.RECURRENT_CHOICES)
|
||||
v = self.form.cleaned_data["recurrences"]
|
||||
if v in d:
|
||||
return d[v]
|
||||
else:
|
||||
return ""
|
||||
else:
|
||||
return ""
|
||||
|
||||
def is_resetable(self):
|
||||
if self.request.user.is_authenticated:
|
||||
if (
|
||||
len(self.get_cleaned_data("status")) != 1
|
||||
or self.get_cleaned_data("status")[0] != Event.STATUS.PUBLISHED
|
||||
):
|
||||
return True
|
||||
else:
|
||||
if len(self.get_cleaned_data("status")) != 0:
|
||||
return True
|
||||
return (
|
||||
len(self.get_cleaned_data("tags")) != 0
|
||||
or len(self.get_cleaned_data("exclude_tags")) != 0
|
||||
or len(self.get_cleaned_data("recurrences")) != 0
|
||||
or (
|
||||
(self.get_cleaned_data("position") is not None)
|
||||
and (self.get_cleaned_data("radius") is not None)
|
||||
)
|
||||
)
|
||||
|
||||
def is_active(self, only_categories=False):
|
||||
if only_categories:
|
||||
return len(self.get_cleaned_data("category")) != 0
|
||||
else:
|
||||
return (
|
||||
len(self.get_cleaned_data("status")) != 0
|
||||
or len(self.get_cleaned_data("tags")) != 0
|
||||
or len(self.get_cleaned_data("exclude_tags")) != 0
|
||||
or len(self.get_cleaned_data("recurrences")) != 0
|
||||
or (
|
||||
(self.get_cleaned_data("position") is not None)
|
||||
and (self.get_cleaned_data("radius") is not None)
|
||||
)
|
||||
)
|
||||
|
||||
def is_selected_tag(self, tag):
|
||||
return (
|
||||
"tags" in self.form.cleaned_data and tag in self.form.cleaned_data["tags"]
|
||||
)
|
||||
|
||||
def get_url_add_tag(self, tag):
|
||||
full_path = self.request.get_full_path()
|
||||
|
||||
result = full_path + ("&" if "?" in full_path else "?") + "tags=" + str(tag)
|
||||
|
||||
return result
|
||||
|
||||
def tag_exists(self, tag):
|
||||
return tag in [t[0] for g in self.form.fields["tags"].choices for t in g[1]]
|
||||
|
||||
def set_default_values(request):
|
||||
if request.user.is_authenticated:
|
||||
if request.GET.get("status", None) is None:
|
||||
tempdict = request.GET.copy()
|
||||
tempdict["status"] = "published"
|
||||
request.GET = tempdict
|
||||
return request
|
||||
return request
|
||||
|
||||
def get_position_radius(self):
|
||||
if (
|
||||
self.get_cleaned_data("position") is None
|
||||
or self.get_cleaned_data("radius") is None
|
||||
):
|
||||
return ""
|
||||
else:
|
||||
return (
|
||||
str(self.get_cleaned_data("position"))
|
||||
+ " ("
|
||||
+ str(self.get_cleaned_data("radius"))
|
||||
+ " km)"
|
||||
)
|
||||
|
||||
def is_filtered_by_position_radius(self):
|
||||
return (
|
||||
self.get_cleaned_data("position") is not None
|
||||
and self.get_cleaned_data("radius") is not None
|
||||
)
|
||||
|
||||
def get_url_add_suggested_position(self, location):
|
||||
result = self.request.get_full_path()
|
||||
return (
|
||||
result
|
||||
+ ("&" if "?" in result else "?")
|
||||
+ "position="
|
||||
+ str(location.slug)
|
||||
+ "&radius="
|
||||
+ str(location.suggested_distance)
|
||||
)
|
||||
|
||||
|
||||
class EventFilterAdmin(django_filters.FilterSet):
|
||||
status = django_filters.MultipleChoiceFilter(
|
||||
choices=Event.STATUS.choices, widget=forms.CheckboxSelectMultiple
|
||||
)
|
||||
|
||||
representative = django_filters.MultipleChoiceFilter(
|
||||
label=_("Representative version"),
|
||||
choices=[(True, _("Yes")), (False, _("No"))],
|
||||
method="filter_by_representative",
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
|
||||
pure_import = django_filters.MultipleChoiceFilter(
|
||||
label=_("Pure import"),
|
||||
choices=[(True, _("Yes")), (False, _("No"))],
|
||||
method="filter_by_pure_import",
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
|
||||
in_recurrent_import = django_filters.MultipleChoiceFilter(
|
||||
label=_("In recurrent import"),
|
||||
choices=[(True, _("Yes")), (False, _("No"))],
|
||||
method="filter_by_in_recurrent_import",
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
|
||||
o = django_filters.ChoiceFilter(
|
||||
label=_("Sort by"),
|
||||
choices=[
|
||||
("moderated_date", _("last moderated first")),
|
||||
("modified_date", _("last modified first")),
|
||||
("imported_date", _("last imported first")),
|
||||
("created_date", _("last created first")),
|
||||
],
|
||||
method="sort_on_date",
|
||||
)
|
||||
|
||||
import_sources = django_filters.ModelChoiceFilter(
|
||||
label=_("Imported from"),
|
||||
method="filter_by_source",
|
||||
queryset=RecurrentImport.objects.all().order_by("name__unaccent"),
|
||||
)
|
||||
|
||||
def sort_on_date(self, queryset, name, value):
|
||||
print(name, value)
|
||||
if value in [
|
||||
"created_date",
|
||||
"imported_date",
|
||||
"modified_date",
|
||||
"moderated_date",
|
||||
]:
|
||||
notnull = value + "__isnull"
|
||||
return queryset.filter(**{notnull: False}).order_by("-" + value)
|
||||
else:
|
||||
return queryset
|
||||
|
||||
def filter_by_source(self, queryset, name, value):
|
||||
src = RecurrentImport.objects.get(pk=value.pk).source
|
||||
return queryset.filter(import_sources__contains=[src])
|
||||
|
||||
def filter_by_in_recurrent_import(self, queryset, name, value):
|
||||
if value is None or len(value) != 1:
|
||||
return queryset
|
||||
else:
|
||||
srcs = RecurrentImport.objects.all().values_list("source")
|
||||
q = Q(import_sources__overlap=srcs)
|
||||
if value[0] == "True":
|
||||
print(q)
|
||||
return queryset.filter(q)
|
||||
else:
|
||||
return queryset.exclude(q)
|
||||
|
||||
def filter_by_pure_import(self, queryset, name, value):
|
||||
if value is None or len(value) != 1:
|
||||
return queryset
|
||||
else:
|
||||
q = Q(import_sources__isnull=False) & (
|
||||
Q(modified_date__isnull=True) | Q(modified_date__lte=F("imported_date"))
|
||||
)
|
||||
if value[0] == "True":
|
||||
print(q)
|
||||
return queryset.filter(q)
|
||||
else:
|
||||
return queryset.exclude(q)
|
||||
|
||||
def filter_by_representative(self, queryset, name, value):
|
||||
if value is None or len(value) != 1:
|
||||
return queryset
|
||||
else:
|
||||
q = (
|
||||
Q(other_versions__isnull=True)
|
||||
| Q(other_versions__representative=F("pk"))
|
||||
| Q(other_versions__representative__isnull=True)
|
||||
)
|
||||
if value[0] == "True":
|
||||
return queryset.filter(q)
|
||||
else:
|
||||
return queryset.exclude(q)
|
||||
|
||||
class Meta:
|
||||
model = Event
|
||||
fields = ["status"]
|
||||
|
||||
|
||||
class MessagesFilterAdmin(django_filters.FilterSet):
|
||||
closed = django_filters.MultipleChoiceFilter(
|
||||
label=_("Status"),
|
||||
choices=((True, _("Closed")), (False, _("Open"))),
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
spam = django_filters.MultipleChoiceFilter(
|
||||
label=_("Spam"),
|
||||
choices=((True, _("Spam")), (False, _("Non spam"))),
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
|
||||
message_type = django_filters.MultipleChoiceFilter(
|
||||
label=_("Type"),
|
||||
choices=Message.TYPE.choices,
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Message
|
||||
fields = ["closed", "spam", "message_type"]
|
||||
|
||||
def is_contact_messages(self):
|
||||
return (
|
||||
"message_type" in self.form.cleaned_data
|
||||
and "contact_form" in self.form.cleaned_data["message_type"]
|
||||
)
|
||||
|
||||
|
||||
class SimpleSearchEventFilter(django_filters.FilterSet):
|
||||
q = django_filters.CharFilter(
|
||||
method="custom_filter",
|
||||
label=_("Search"),
|
||||
widget=forms.TextInput(attrs={"type": "search"}),
|
||||
)
|
||||
|
||||
status = django_filters.MultipleChoiceFilter(
|
||||
label="Filtrer par status",
|
||||
choices=Event.STATUS.choices,
|
||||
field_name="status",
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
|
||||
past = django_filters.ChoiceFilter(
|
||||
label=_("In the past"),
|
||||
choices=[(False, _("No")), (True, _("Yes"))],
|
||||
null_label=None,
|
||||
empty_label=None,
|
||||
method="in_past",
|
||||
widget=forms.Select,
|
||||
)
|
||||
|
||||
def in_past(self, queryset, name, value):
|
||||
if value and value == "True":
|
||||
now = date.today()
|
||||
qs = queryset.filter(start_day__lt=now).order_by(
|
||||
"-start_day", "-start_time"
|
||||
)
|
||||
else:
|
||||
start = date.today() + timedelta(days=-2)
|
||||
qs = queryset.filter(start_day__gte=start).order_by(
|
||||
"start_day", "start_time"
|
||||
)
|
||||
return qs
|
||||
|
||||
def custom_filter(self, queryset, name, value):
|
||||
search_query = SearchQuery(value, config="french")
|
||||
qs = queryset.filter(
|
||||
Q(title__icontains=value)
|
||||
| Q(category__name__icontains=value)
|
||||
| Q(tags__icontains=[value])
|
||||
| Q(exact_location__name__icontains=value)
|
||||
| Q(description__icontains=value)
|
||||
)
|
||||
for f in ["title", "category__name", "exact_location__name", "description"]:
|
||||
params = {
|
||||
f
|
||||
+ "_hl": SearchHeadline(
|
||||
f,
|
||||
search_query,
|
||||
start_sel='<span class="highlight">',
|
||||
stop_sel="</span>",
|
||||
config="french",
|
||||
)
|
||||
}
|
||||
qs = qs.annotate(**params)
|
||||
return qs
|
||||
|
||||
class Meta:
|
||||
model = Event
|
||||
fields = ["q"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if not kwargs["request"].user.is_authenticated:
|
||||
self.form.fields.pop("status")
|
||||
|
||||
|
||||
class SearchEventFilter(django_filters.FilterSet):
|
||||
tags = django_filters.CharFilter(lookup_expr="icontains")
|
||||
title = django_filters.CharFilter(method="hl_filter_contains")
|
||||
location = django_filters.CharFilter(method="hl_filter_contains")
|
||||
description = django_filters.CharFilter(method="hl_filter_contains")
|
||||
start_day = django_filters.DateFromToRangeFilter(
|
||||
widget=django_filters.widgets.RangeWidget(attrs={"type": "date"})
|
||||
)
|
||||
status = django_filters.MultipleChoiceFilter(
|
||||
label="Filtrer par status",
|
||||
choices=Event.STATUS.choices,
|
||||
field_name="status",
|
||||
widget=forms.CheckboxSelectMultiple,
|
||||
)
|
||||
|
||||
o = django_filters.OrderingFilter(
|
||||
# tuple-mapping retains order
|
||||
fields=(
|
||||
("title", "title"),
|
||||
("description", "description"),
|
||||
("start_day", "start_day"),
|
||||
),
|
||||
)
|
||||
|
||||
def hl_filter_contains(self, queryset, name, value):
|
||||
# first check if it contains
|
||||
filter_contains = {name + "__contains": value}
|
||||
queryset = queryset.filter(**filter_contains)
|
||||
|
||||
# then hightlight the result
|
||||
search_query = SearchQuery(value, config="french")
|
||||
params = {
|
||||
name
|
||||
+ "_hl": SearchHeadline(
|
||||
name,
|
||||
search_query,
|
||||
start_sel='<span class="highlight">',
|
||||
stop_sel="</span>",
|
||||
config="french",
|
||||
)
|
||||
}
|
||||
return queryset.annotate(**params)
|
||||
|
||||
class Meta:
|
||||
model = Event
|
||||
fields = ["title", "location", "description", "category", "tags", "start_day"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if not kwargs["request"].user.is_authenticated:
|
||||
self.form.fields.pop("status")
|
||||
|
||||
|
||||
class DuplicatedEventsFilter(django_filters.FilterSet):
|
||||
fixed = django_filters.BooleanFilter(
|
||||
label="Résolu", field_name="representative", method="fixed_qs"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = DuplicatedEvents
|
||||
fields = []
|
||||
|
||||
def get_cleaned_data(self, name):
|
||||
try:
|
||||
return self.form.cleaned_data[name]
|
||||
except AttributeError:
|
||||
return {}
|
||||
except KeyError:
|
||||
return {}
|
||||
|
||||
def get_fixed_url(self):
|
||||
if "fixed" in self.form.cleaned_data:
|
||||
if self.form.cleaned_data["fixed"] is None:
|
||||
return "fixed=unknown"
|
||||
else:
|
||||
if self.form.cleaned_data["fixed"]:
|
||||
return "fixed=true"
|
||||
else:
|
||||
return "fixed=false"
|
||||
else:
|
||||
return ""
|
||||
|
||||
def fixed_qs(self, queryset, name, value):
|
||||
return DuplicatedEvents.not_fixed_qs(queryset, value)
|
||||
|
||||
|
||||
class RecurrentImportFilter(django_filters.FilterSet):
|
||||
|
||||
name = django_filters.ModelMultipleChoiceFilter(
|
||||
label="Filtrer par nom",
|
||||
field_name="name",
|
||||
queryset=RecurrentImport.objects.all().order_by("name__unaccent"),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = RecurrentImport
|
||||
fields = ["name"]
|
||||
@@ -13,215 +13,32 @@ from django.forms import (
|
||||
BooleanField,
|
||||
HiddenInput,
|
||||
ModelChoiceField,
|
||||
EmailField,
|
||||
)
|
||||
from django.forms import formset_factory
|
||||
|
||||
from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget
|
||||
|
||||
from .models import (
|
||||
Event,
|
||||
RecurrentImport,
|
||||
CategorisationRule,
|
||||
ModerationAnswer,
|
||||
ModerationQuestion,
|
||||
Place,
|
||||
Category,
|
||||
Tag,
|
||||
Message,
|
||||
)
|
||||
from django.conf import settings
|
||||
from django.core.files import File
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from string import ascii_uppercase as auc
|
||||
from .templatetags.utils_extra import int_to_abc
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.timezone import localtime
|
||||
from django.utils.formats import localize
|
||||
from .templatetags.event_extra import event_field_verbose_name, field_to_html
|
||||
import os
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GroupFormMixin:
|
||||
|
||||
template_name = "agenda_culturel/forms/div_group.html"
|
||||
|
||||
class FieldGroup:
|
||||
|
||||
def __init__(
|
||||
self, id, label, display_label=False, maskable=False, default_masked=True
|
||||
):
|
||||
self.id = id
|
||||
self.label = label
|
||||
self.display_label = display_label
|
||||
self.maskable = maskable
|
||||
self.default_masked = default_masked
|
||||
|
||||
def toggle_field_name(self):
|
||||
return "group_" + self.id
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.groups = []
|
||||
|
||||
def add_group(self, *args, **kwargs):
|
||||
self.groups.append(GroupFormMixin.FieldGroup(*args, **kwargs))
|
||||
if self.groups[-1].maskable:
|
||||
self.fields[self.groups[-1].toggle_field_name()] = BooleanField(
|
||||
required=False
|
||||
)
|
||||
self.fields[self.groups[-1].toggle_field_name()].toggle_group = True
|
||||
|
||||
def get_fields_in_group(self, g):
|
||||
return [
|
||||
f
|
||||
for f in self.visible_fields()
|
||||
if not hasattr(f.field, "toggle_group")
|
||||
and hasattr(f.field, "group_id")
|
||||
and f.field.group_id == g.id
|
||||
]
|
||||
|
||||
def get_no_group_fields(self):
|
||||
return [
|
||||
f
|
||||
for f in self.visible_fields()
|
||||
if not hasattr(f.field, "toggle_group")
|
||||
and (not hasattr(f.field, "group_id") or f.field.group_id is None)
|
||||
]
|
||||
|
||||
def fields_by_group(self):
|
||||
return [(g, self.get_fields_in_group(g)) for g in self.groups] + [
|
||||
(GroupFormMixin.FieldGroup("other", _("Other")), self.get_no_group_fields())
|
||||
]
|
||||
|
||||
def clean(self):
|
||||
result = super().clean()
|
||||
|
||||
if result:
|
||||
data = dict(self.data)
|
||||
# for each masked group, we remove data
|
||||
for g in self.groups:
|
||||
if g.maskable and g.toggle_field_name() not in data:
|
||||
fields = self.get_fields_in_group(g)
|
||||
for f in fields:
|
||||
self.cleaned_data[f.name] = None
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class TagForm(ModelForm):
|
||||
required_css_class = "required"
|
||||
|
||||
class Meta:
|
||||
model = Tag
|
||||
fields = [
|
||||
"name",
|
||||
"description",
|
||||
"in_included_suggestions",
|
||||
"in_excluded_suggestions",
|
||||
"principal",
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if "name" in kwargs["initial"]:
|
||||
self.fields["name"].widget = HiddenInput()
|
||||
|
||||
|
||||
class TagRenameForm(Form):
|
||||
required_css_class = "required"
|
||||
|
||||
name = CharField(label=_("Name of new tag"), required=True)
|
||||
|
||||
force = BooleanField(
|
||||
label=_(
|
||||
"Force renaming despite the existence of events already using the chosen tag."
|
||||
),
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
force = kwargs.pop("force", False)
|
||||
name = kwargs.pop("name", None)
|
||||
super().__init__(*args, **kwargs)
|
||||
if not (force or (not len(args) == 0 and "force" in args[0])):
|
||||
del self.fields["force"]
|
||||
if name is not None and self.fields["name"].initial is None:
|
||||
self.fields["name"].initial = name
|
||||
|
||||
def is_force(self):
|
||||
return "force" in self.fields and self.cleaned_data["force"] is True
|
||||
|
||||
|
||||
class SimpleContactForm(GroupFormMixin, Form):
|
||||
email = EmailField(
|
||||
label=_("Your email"),
|
||||
help_text=_("Your email address"),
|
||||
max_length=254,
|
||||
required=False,
|
||||
)
|
||||
|
||||
comments = CharField(
|
||||
label=_("Comments"),
|
||||
help_text=_(
|
||||
"Your message for the moderation team (comments, clarifications, requests...)"
|
||||
),
|
||||
widget=Textarea,
|
||||
max_length=2048,
|
||||
required=False,
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
is_authenticated = "is_authenticated" in kwargs and kwargs["is_authenticated"]
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if not is_authenticated:
|
||||
self.add_group(
|
||||
"communication",
|
||||
_(
|
||||
"Receive notification of publication or leave a message for moderation"
|
||||
),
|
||||
maskable=True,
|
||||
default_masked=True,
|
||||
)
|
||||
self.fields["email"].group_id = "communication"
|
||||
self.fields["comments"].group_id = "communication"
|
||||
else:
|
||||
del self.fields["email"]
|
||||
del self.fields["comments"]
|
||||
|
||||
|
||||
class URLSubmissionForm(GroupFormMixin, Form):
|
||||
required_css_class = "required"
|
||||
|
||||
class EventSubmissionForm(Form):
|
||||
url = URLField(max_length=512)
|
||||
category = ModelChoiceField(
|
||||
label=_("Category"),
|
||||
queryset=Category.objects.all().order_by("name"),
|
||||
initial=None,
|
||||
required=False,
|
||||
)
|
||||
tags = MultipleChoiceField(
|
||||
label=_("Tags"), initial=None, choices=[], required=False
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.pop("is_authenticated", False)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["tags"].choices = Tag.get_tag_groups(all=True)
|
||||
|
||||
self.add_group("event", _("Event"))
|
||||
self.fields["url"].group_id = "event"
|
||||
self.fields["category"].group_id = "event"
|
||||
self.fields["tags"].group_id = "event"
|
||||
|
||||
|
||||
class URLSubmissionFormWithContact(SimpleContactForm, URLSubmissionForm):
|
||||
pass
|
||||
|
||||
|
||||
URLSubmissionFormSet = formset_factory(URLSubmissionForm, extra=9, min_num=1)
|
||||
|
||||
|
||||
class DynamicArrayWidgetURLs(DynamicArrayWidget):
|
||||
@@ -233,52 +50,28 @@ class DynamicArrayWidgetTags(DynamicArrayWidget):
|
||||
|
||||
|
||||
class RecurrentImportForm(ModelForm):
|
||||
required_css_class = "required"
|
||||
|
||||
defaultTags = MultipleChoiceField(
|
||||
label=_("Tags"), initial=None, choices=[], required=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = RecurrentImport
|
||||
fields = "__all__"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["defaultTags"].choices = Tag.get_tag_groups(all=True)
|
||||
widgets = {
|
||||
"defaultTags": DynamicArrayWidgetTags(),
|
||||
}
|
||||
|
||||
|
||||
class CategorisationRuleImportForm(ModelForm):
|
||||
required_css_class = "required"
|
||||
|
||||
class Meta:
|
||||
model = CategorisationRule
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class EventForm(GroupFormMixin, ModelForm):
|
||||
required_css_class = "required"
|
||||
|
||||
old_local_image = CharField(widget=HiddenInput(), required=False)
|
||||
simple_cloning = CharField(widget=HiddenInput(), required=False)
|
||||
cloning = CharField(widget=HiddenInput(), required=False)
|
||||
|
||||
tags = MultipleChoiceField(
|
||||
label=_("Tags"), initial=None, choices=[], required=False
|
||||
)
|
||||
|
||||
class EventForm(ModelForm):
|
||||
class Meta:
|
||||
model = Event
|
||||
exclude = [
|
||||
"possibly_duplicated",
|
||||
"imported_date",
|
||||
"modified_date",
|
||||
"moderated_date",
|
||||
"import_sources",
|
||||
"image",
|
||||
"moderated_by_user",
|
||||
"modified_by_user",
|
||||
"created_by_user",
|
||||
"imported_by_user",
|
||||
]
|
||||
widgets = {
|
||||
"start_day": TextInput(
|
||||
@@ -297,83 +90,17 @@ class EventForm(GroupFormMixin, ModelForm):
|
||||
),
|
||||
"end_day": TextInput(attrs={"type": "date"}),
|
||||
"end_time": TextInput(attrs={"type": "time"}),
|
||||
"other_versions": HiddenInput(),
|
||||
"uuids": MultipleHiddenInput(),
|
||||
"import_sources": MultipleHiddenInput(),
|
||||
"reference_urls": DynamicArrayWidgetURLs(),
|
||||
"tags": DynamicArrayWidgetTags(),
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
is_authenticated = kwargs.pop("is_authenticated", False)
|
||||
self.cloning = kwargs.pop("is_cloning", False)
|
||||
self.simple_cloning = kwargs.pop("is_simple_cloning", False)
|
||||
super().__init__(*args, **kwargs)
|
||||
if not is_authenticated:
|
||||
del self.fields["status"]
|
||||
del self.fields["organisers"]
|
||||
self.fields["category"].queryset = self.fields["category"].queryset.order_by(
|
||||
"name"
|
||||
)
|
||||
self.fields["category"].empty_label = None
|
||||
self.fields["category"].initial = Category.get_default_category()
|
||||
self.fields["tags"].choices = Tag.get_tag_groups(all=True)
|
||||
|
||||
# set groups
|
||||
self.add_group("main", _("Main fields"))
|
||||
self.fields["title"].group_id = "main"
|
||||
|
||||
self.add_group("start", _("Start of event"))
|
||||
self.fields["start_day"].group_id = "start"
|
||||
self.fields["start_time"].group_id = "start"
|
||||
|
||||
self.add_group("end", _("End of event"))
|
||||
self.fields["end_day"].group_id = "end"
|
||||
self.fields["end_time"].group_id = "end"
|
||||
|
||||
self.add_group(
|
||||
"recurrences",
|
||||
_("This is a recurring event"),
|
||||
maskable=True,
|
||||
default_masked=not (
|
||||
self.instance
|
||||
and self.instance.recurrences
|
||||
and self.instance.recurrences.rrules
|
||||
and len(self.instance.recurrences.rrules) > 0
|
||||
),
|
||||
)
|
||||
|
||||
self.fields["recurrences"].group_id = "recurrences"
|
||||
|
||||
self.add_group("details", _("Details"))
|
||||
self.fields["description"].group_id = "details"
|
||||
if is_authenticated:
|
||||
self.fields["organisers"].group_id = "details"
|
||||
|
||||
self.add_group("location", _("Location"))
|
||||
self.fields["location"].group_id = "location"
|
||||
self.fields["exact_location"].group_id = "location"
|
||||
|
||||
self.add_group("illustration", _("Illustration"))
|
||||
self.fields["local_image"].group_id = "illustration"
|
||||
self.fields["image_alt"].group_id = "illustration"
|
||||
|
||||
self.add_group("urls", _("URLs"))
|
||||
self.fields["reference_urls"].group_id = "urls"
|
||||
|
||||
if is_authenticated:
|
||||
self.add_group("meta-admin", _("Meta information"))
|
||||
self.fields["category"].group_id = "meta-admin"
|
||||
self.fields["tags"].group_id = "meta-admin"
|
||||
self.fields["status"].group_id = "meta-admin"
|
||||
else:
|
||||
self.add_group("meta", _("Meta information"))
|
||||
self.fields["category"].group_id = "meta"
|
||||
self.fields["tags"].group_id = "meta"
|
||||
|
||||
def is_clone_from_url(self):
|
||||
return self.cloning
|
||||
|
||||
def is_simple_clone_from_url(self):
|
||||
return self.simple_cloning
|
||||
|
||||
def clean_end_day(self):
|
||||
start_day = self.cleaned_data.get("start_day")
|
||||
@@ -401,84 +128,8 @@ class EventForm(GroupFormMixin, ModelForm):
|
||||
|
||||
return end_time
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# when cloning an existing event, we need to copy the local image
|
||||
if (
|
||||
(
|
||||
("local_image" not in self.cleaned_data)
|
||||
or (self.cleaned_data["local_image"] is None)
|
||||
)
|
||||
and self.cleaned_data["old_local_image"] is not None
|
||||
and self.cleaned_data["old_local_image"] != ""
|
||||
):
|
||||
basename = self.cleaned_data["old_local_image"]
|
||||
old = settings.MEDIA_ROOT + "/" + basename
|
||||
if os.path.isfile(old):
|
||||
self.cleaned_data["local_image"] = File(
|
||||
name=basename, file=open(old, "rb")
|
||||
)
|
||||
|
||||
|
||||
class EventFormWithContact(SimpleContactForm, EventForm):
|
||||
pass
|
||||
|
||||
|
||||
class MultipleChoiceFieldAcceptAll(MultipleChoiceField):
|
||||
def validate(self, value):
|
||||
pass
|
||||
|
||||
|
||||
class EventModerateForm(ModelForm):
|
||||
required_css_class = "required"
|
||||
|
||||
tags = MultipleChoiceField(
|
||||
label=_("Tags"), help_text=_("Select tags from existing ones."), required=False
|
||||
)
|
||||
|
||||
new_tags = MultipleChoiceFieldAcceptAll(
|
||||
label=_("New tags"),
|
||||
help_text=_(
|
||||
"Create new labels (sparingly). Note: by starting your tag with the characters “TW:”, you"
|
||||
"ll create a “trigger warning” tag, and the associated events will be announced as such."
|
||||
),
|
||||
widget=DynamicArrayWidget(),
|
||||
required=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Event
|
||||
fields = ["status", "category", "organisers", "exact_location", "tags"]
|
||||
widgets = {"status": RadioSelect}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["category"].queryset = self.fields["category"].queryset.order_by(
|
||||
"name"
|
||||
)
|
||||
self.fields["category"].empty_label = None
|
||||
self.fields["category"].initial = Category.get_default_category()
|
||||
self.fields["tags"].choices = Tag.get_tag_groups(all=True)
|
||||
|
||||
def clean_new_tags(self):
|
||||
return list(set(self.cleaned_data.get("new_tags")))
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
if self.cleaned_data["tags"] is None:
|
||||
self.cleaned_data["tags"] = []
|
||||
|
||||
if self.cleaned_data.get("new_tags") is not None:
|
||||
self.cleaned_data["tags"] += self.cleaned_data.get("new_tags")
|
||||
|
||||
self.cleaned_data["tags"] = list(set(self.cleaned_data["tags"]))
|
||||
|
||||
|
||||
class BatchImportationForm(Form):
|
||||
required_css_class = "required"
|
||||
|
||||
json = CharField(
|
||||
label="JSON",
|
||||
widget=Textarea(attrs={"rows": "10"}),
|
||||
@@ -488,63 +139,54 @@ class BatchImportationForm(Form):
|
||||
|
||||
|
||||
class FixDuplicates(Form):
|
||||
required_css_class = "required"
|
||||
|
||||
action = ChoiceField()
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
edup = kwargs.pop("edup", None)
|
||||
events = edup.get_duplicated()
|
||||
len(events)
|
||||
nb_events = kwargs.pop("nb_events", None)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
choices = []
|
||||
initial = None
|
||||
for i, e in enumerate(events):
|
||||
if e.status != Event.STATUS.TRASH or e.modified():
|
||||
msg = ""
|
||||
if e.local_version():
|
||||
msg = _(" (locally modified version)")
|
||||
if e.status != Event.STATUS.TRASH:
|
||||
initial = "Select-" + str(e.pk)
|
||||
if e.pure_import():
|
||||
msg = _(" (synchronized on import version)")
|
||||
if nb_events == 2:
|
||||
choices = [("NotDuplicates", "Ces événements sont différents")]
|
||||
choices += [
|
||||
(
|
||||
"SelectA",
|
||||
"Ces événements sont identiques, on garde A et on met B à la corbeille",
|
||||
)
|
||||
]
|
||||
choices += [
|
||||
(
|
||||
"SelectB",
|
||||
"Ces événements sont identiques, on garde B et on met A à la corbeille",
|
||||
)
|
||||
]
|
||||
choices += [
|
||||
("Merge", "Ces événements sont identiques, on fusionne à la main")
|
||||
]
|
||||
else:
|
||||
choices = [("NotDuplicates", "Ces événements sont tous différents")]
|
||||
for i in auc[0:nb_events]:
|
||||
choices += [
|
||||
(
|
||||
"Select-" + str(e.pk),
|
||||
_("Select {} as representative version.").format(auc[i] + msg),
|
||||
"Remove" + i,
|
||||
"L'événement "
|
||||
+ i
|
||||
+ " n'est pas identique aux autres, on le rend indépendant",
|
||||
)
|
||||
]
|
||||
|
||||
for i, e in enumerate(events):
|
||||
if e.status != Event.STATUS.TRASH and e.local_version():
|
||||
for i in auc[0:nb_events]:
|
||||
choices += [
|
||||
(
|
||||
"Update-" + str(e.pk),
|
||||
_(
|
||||
"Update {} using some fields from other versions (interactive mode)."
|
||||
).format(auc[i]),
|
||||
"Select" + i,
|
||||
"Ces événements sont identiques, on garde "
|
||||
+ i
|
||||
+ " et on met les autres à la corbeille",
|
||||
)
|
||||
]
|
||||
|
||||
extra = ""
|
||||
if edup.has_local_version():
|
||||
extra = _(" Warning: a version is already locally modified.")
|
||||
|
||||
if initial is None:
|
||||
initial = "Merge"
|
||||
choices += [
|
||||
("Merge", _("Create a new version by merging (interactive mode).") + extra)
|
||||
]
|
||||
for i, e in enumerate(events):
|
||||
if e.status != Event.STATUS.TRASH:
|
||||
choices += [
|
||||
("Remove-" + str(e.pk), _("Make {} independent.").format(auc[i]))
|
||||
]
|
||||
choices += [("NotDuplicates", _("Make all versions independent."))]
|
||||
choices += [
|
||||
("Merge", "Ces événements sont identiques, on fusionne à la main")
|
||||
]
|
||||
|
||||
self.fields["action"].choices = choices
|
||||
self.fields["action"].initial = initial
|
||||
|
||||
def is_action_no_duplicates(self):
|
||||
return self.cleaned_data["action"] == "NotDuplicates"
|
||||
@@ -552,134 +194,85 @@ class FixDuplicates(Form):
|
||||
def is_action_select(self):
|
||||
return self.cleaned_data["action"].startswith("Select")
|
||||
|
||||
def is_action_update(self):
|
||||
return self.cleaned_data["action"].startswith("Update")
|
||||
|
||||
def is_action_remove(self):
|
||||
return self.cleaned_data["action"].startswith("Remove")
|
||||
|
||||
def get_selected_event_code(self):
|
||||
if (
|
||||
self.is_action_select()
|
||||
or self.is_action_remove()
|
||||
or self.is_action_update()
|
||||
):
|
||||
return int(self.cleaned_data["action"].split("-")[-1])
|
||||
if self.is_action_select() or self.is_action_remove():
|
||||
return self.cleaned_data["action"][-1]
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_selected_event(self, edup):
|
||||
def get_selected_event_id(self):
|
||||
selected = self.get_selected_event_code()
|
||||
for e in edup.get_duplicated():
|
||||
if e.pk == selected:
|
||||
return e
|
||||
return None
|
||||
if selected is None:
|
||||
return None
|
||||
else:
|
||||
return auc.rfind(selected)
|
||||
|
||||
def get_selected_event(self, edup):
|
||||
selected = self.get_selected_event_id()
|
||||
return edup.get_duplicated()[selected]
|
||||
|
||||
|
||||
class SelectEventInList(Form):
|
||||
required_css_class = "required"
|
||||
|
||||
event = ChoiceField(label=_("Event"))
|
||||
event = ChoiceField()
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
events = kwargs.pop("events", None)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.fields["event"].choices = [
|
||||
(
|
||||
e.pk,
|
||||
(e.start_time.strftime("%H:%M") + " : " if e.start_time else "")
|
||||
+ e.title
|
||||
+ ((", " + e.location) if e.location else ""),
|
||||
)
|
||||
for e in events
|
||||
(e.pk, str(e.start_day) + " " + e.title + ", " + e.location) for e in events
|
||||
]
|
||||
|
||||
|
||||
class MergeDuplicates(Form):
|
||||
required_css_class = "required"
|
||||
|
||||
checkboxes_fields = ["reference_urls", "description", "tags"]
|
||||
checkboxes_fields = ["reference_urls", "description"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.duplicates = kwargs.pop("duplicates", None)
|
||||
self.event = kwargs.pop("event", None)
|
||||
self.events = list(self.duplicates.get_duplicated())
|
||||
len(self.events)
|
||||
nb_events = self.duplicates.nb_duplicated()
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if self.event:
|
||||
choices = [
|
||||
(
|
||||
("event_" + str(e.pk), _("Value of version {}").format(e.pk))
|
||||
if e != self.event
|
||||
else ("event_" + str(e.pk), _("Value of the selected version"))
|
||||
)
|
||||
for e in self.events
|
||||
]
|
||||
initial = "event_" + str(self.event.pk)
|
||||
else:
|
||||
choices = [
|
||||
("event_" + str(e.pk), _("Value of version {}").format(e.pk))
|
||||
for e in self.events
|
||||
]
|
||||
initial = choices[0][0]
|
||||
choices = [
|
||||
("event" + i, "Valeur de l'évenement " + i) for i in auc[0:nb_events]
|
||||
]
|
||||
|
||||
for f in self.duplicates.get_items_comparison():
|
||||
if not f["similar"]:
|
||||
if f["key"] in MergeDuplicates.checkboxes_fields:
|
||||
self.fields[f["key"]] = MultipleChoiceField(choices=choices)
|
||||
self.fields[f["key"]].initial = initial
|
||||
self.fields[f["key"]].initial = choices[0][0]
|
||||
else:
|
||||
self.fields[f["key"]] = ChoiceField(
|
||||
widget=RadioSelect, choices=choices
|
||||
)
|
||||
self.fields[f["key"]].initial = initial
|
||||
self.fields[f["key"]].initial = choices[0][0]
|
||||
|
||||
def as_grid(self):
|
||||
result = '<div class="grid">'
|
||||
for i, e in enumerate(self.events):
|
||||
for i, e in enumerate(self.duplicates.get_duplicated()):
|
||||
result += '<div class="grid entete-badge">'
|
||||
result += '<div class="badge-large">' + int_to_abc(i) + "</div>"
|
||||
result += "<ul>"
|
||||
result += (
|
||||
'<li><a href="' + e.get_absolute_url() + '">' + e.title + "</a></li>"
|
||||
)
|
||||
for step in e.chronology_dates():
|
||||
if step["data"] == "created_date":
|
||||
result += (
|
||||
"<li><em>Création</em> le "
|
||||
+ localize(step["timestamp"])
|
||||
+ " par "
|
||||
+ str(step["user"])
|
||||
+ "</li>"
|
||||
)
|
||||
if step["data"] == "modified_date":
|
||||
result += "<li><em>Dernière modification</em> le " + localize(
|
||||
step["timestamp"]
|
||||
)
|
||||
if e.modified_by_user:
|
||||
result += " par " + e.modified_by_user.username
|
||||
else:
|
||||
result += " par import récurrent"
|
||||
result += "</li>"
|
||||
|
||||
if step["data"] == "moderated_date":
|
||||
result += "<li><em>Dernière modération</em> le " + localize(
|
||||
step["timestamp"]
|
||||
)
|
||||
if e.moderated_by_user:
|
||||
result += " par " + e.moderated_by_user.username
|
||||
result += "</li>"
|
||||
if step["data"] == "imported_date":
|
||||
result += "<li><em>Dernière importation</em> le " + localize(
|
||||
step["timestamp"]
|
||||
)
|
||||
if e.imported_by_user:
|
||||
result += " par " + e.imported_by_user.username
|
||||
else:
|
||||
result += " par import récurrent"
|
||||
result += "</li>"
|
||||
|
||||
result += (
|
||||
"<li>Création : " + localize(localtime(e.created_date)) + "</li>"
|
||||
)
|
||||
result += (
|
||||
"<li>Dernière modification : "
|
||||
+ localize(localtime(e.modified_date))
|
||||
+ "</li>"
|
||||
)
|
||||
if e.imported_date:
|
||||
result += (
|
||||
"<li>Dernière importation : "
|
||||
+ localize(localtime(e.imported_date))
|
||||
+ "</li>"
|
||||
)
|
||||
result += "</ul>"
|
||||
result += "</div>"
|
||||
result += "</div>"
|
||||
@@ -695,83 +288,100 @@ class MergeDuplicates(Form):
|
||||
)
|
||||
else:
|
||||
result += "<fieldset>"
|
||||
if key in self.errors:
|
||||
result += '<div class="message error"><ul>'
|
||||
for err in self.errors[key]:
|
||||
result += "<li>" + err + "</li>"
|
||||
result += "</ul></div>"
|
||||
result += '<div class="grid comparison-item">'
|
||||
if hasattr(self, "cleaned_data"):
|
||||
checked = self.cleaned_data.get(key)
|
||||
else:
|
||||
checked = self.fields[key].initial
|
||||
|
||||
for i, (v, radio, ev) in enumerate(
|
||||
zip(e["values"], self.fields[e["key"]].choices, self.events)
|
||||
for i, (v, radio) in enumerate(
|
||||
zip(e["values"], self.fields[e["key"]].choices)
|
||||
):
|
||||
result += self.comparison_item(key, i, v, radio, ev, checked)
|
||||
result += '<div class="duplicated">'
|
||||
id = "id_" + key + "_" + str(i)
|
||||
value = "event" + auc[i]
|
||||
|
||||
result += '<input id="' + id + '" name="' + key + '"'
|
||||
if key in MergeDuplicates.checkboxes_fields:
|
||||
result += ' type="checkbox"'
|
||||
if value in checked:
|
||||
result += " checked"
|
||||
else:
|
||||
result += ' type="radio"'
|
||||
if checked == value:
|
||||
result += " checked"
|
||||
result += ' value="' + value + '"'
|
||||
result += ">"
|
||||
result += (
|
||||
'<div class="badge-small">'
|
||||
+ int_to_abc(i)
|
||||
+ "</div>"
|
||||
+ str(field_to_html(v, e["key"]))
|
||||
+ "</div>"
|
||||
)
|
||||
result += "</div></fieldset>"
|
||||
|
||||
return mark_safe(result)
|
||||
|
||||
def comparison_item(self, key, i, v, radio, ev, checked):
|
||||
result = '<div class="duplicated">'
|
||||
id = "id_" + key + "_" + str(ev.pk)
|
||||
value = "event_" + str(ev.pk)
|
||||
|
||||
result += '<input id="' + id + '" name="' + key + '"'
|
||||
if key in MergeDuplicates.checkboxes_fields:
|
||||
result += ' type="checkbox"'
|
||||
if checked and value in checked:
|
||||
result += " checked"
|
||||
else:
|
||||
result += ' type="radio"'
|
||||
if checked == value:
|
||||
result += " checked"
|
||||
result += ' value="' + value + '"'
|
||||
result += ">"
|
||||
result += '<div class="badge-small">' + int_to_abc(i) + "</div>"
|
||||
result += "<div>"
|
||||
if key == "image":
|
||||
result += str(field_to_html(ev.local_image, "local_image")) + "</div>"
|
||||
result += "<div>Lien d'import : "
|
||||
|
||||
result += str(field_to_html(v, key)) + "</div>"
|
||||
result += "</div>"
|
||||
return result
|
||||
|
||||
def get_selected_events(self, key):
|
||||
def get_selected_events_id(self, key):
|
||||
value = self.cleaned_data.get(key)
|
||||
if key not in self.fields:
|
||||
return None
|
||||
else:
|
||||
if isinstance(value, list):
|
||||
selected = [int(v.split("_")[-1]) for v in value]
|
||||
result = []
|
||||
for s in selected:
|
||||
for e in self.duplicates.get_duplicated():
|
||||
if e.pk == s:
|
||||
result.append(e)
|
||||
break
|
||||
return result
|
||||
return [auc.rfind(v[-1]) for v in value]
|
||||
else:
|
||||
selected = int(value.split("_")[-1])
|
||||
for e in self.duplicates.get_duplicated():
|
||||
if e.pk == selected:
|
||||
return e
|
||||
return auc.rfind(value[-1])
|
||||
|
||||
return None
|
||||
|
||||
class ModerationQuestionForm(ModelForm):
|
||||
class Meta:
|
||||
model = ModerationQuestion
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class ModerationAnswerForm(ModelForm):
|
||||
class Meta:
|
||||
model = ModerationAnswer
|
||||
exclude = ["question"]
|
||||
widgets = {
|
||||
"adds_tags": DynamicArrayWidgetTags(),
|
||||
"removes_tags": DynamicArrayWidgetTags(),
|
||||
}
|
||||
|
||||
|
||||
class ModerateForm(ModelForm):
|
||||
class Meta:
|
||||
model = Event
|
||||
fields = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
mqs = ModerationQuestion.objects.all()
|
||||
mas = ModerationAnswer.objects.all()
|
||||
|
||||
for q in mqs:
|
||||
self.fields[q.complete_id()] = ChoiceField(
|
||||
widget=RadioSelect,
|
||||
label=q.question,
|
||||
choices=[(a.pk, a.html_description()) for a in mas if a.question == q],
|
||||
required=True,
|
||||
)
|
||||
for a in mas:
|
||||
if a.question == q and a.valid_event(self.instance):
|
||||
self.fields[q.complete_id()].initial = a.pk
|
||||
break
|
||||
|
||||
|
||||
class CategorisationForm(Form):
|
||||
required_css_class = "required"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "events" in kwargs:
|
||||
events = kwargs.pop("events", None)
|
||||
else:
|
||||
events = []
|
||||
for f in args[0]:
|
||||
logger.warning("fff: " + f)
|
||||
if "_" not in f:
|
||||
if f + "_cat" in args[0]:
|
||||
events.append(
|
||||
@@ -796,8 +406,6 @@ class CategorisationForm(Form):
|
||||
|
||||
|
||||
class EventAddPlaceForm(Form):
|
||||
required_css_class = "required"
|
||||
|
||||
place = ModelChoiceField(
|
||||
label=_("Place"),
|
||||
queryset=Place.objects.all().order_by("name"),
|
||||
@@ -823,20 +431,15 @@ class EventAddPlaceForm(Form):
|
||||
if self.cleaned_data.get("place"):
|
||||
place = self.cleaned_data.get("place")
|
||||
self.instance.exact_location = place
|
||||
self.instance.save(update_fields=["exact_location"])
|
||||
self.instance.save()
|
||||
if self.cleaned_data.get("add_alias"):
|
||||
if place.aliases:
|
||||
place.aliases.append(self.instance.location.strip())
|
||||
else:
|
||||
place.aliases = [self.instance.location.strip()]
|
||||
place.aliases.append(self.instance.location)
|
||||
place.save()
|
||||
|
||||
return self.instance
|
||||
|
||||
|
||||
class PlaceForm(GroupFormMixin, ModelForm):
|
||||
required_css_class = "required"
|
||||
|
||||
class PlaceForm(ModelForm):
|
||||
apply_to_all = BooleanField(
|
||||
initial=True,
|
||||
label=_(
|
||||
@@ -850,73 +453,13 @@ class PlaceForm(GroupFormMixin, ModelForm):
|
||||
fields = "__all__"
|
||||
widgets = {"location": TextInput()}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.add_group("header", _("Header"))
|
||||
self.fields["name"].group_id = "header"
|
||||
|
||||
self.add_group("address", _("Address"))
|
||||
self.fields["address"].group_id = "address"
|
||||
self.fields["postcode"].group_id = "address"
|
||||
self.fields["city"].group_id = "address"
|
||||
self.fields["location"].group_id = "address"
|
||||
|
||||
self.add_group("meta", _("Meta"))
|
||||
self.fields["aliases"].group_id = "meta"
|
||||
|
||||
self.add_group("information", _("Information"))
|
||||
self.fields["description"].group_id = "information"
|
||||
|
||||
def as_grid(self):
|
||||
result = (
|
||||
return mark_safe(
|
||||
'<div class="grid"><div>'
|
||||
+ super().as_p()
|
||||
+ """</div><div><div class="map-widget">
|
||||
<div id="map_location" style="width: 100%; aspect-ratio: 16/9"></div>
|
||||
<p>Cliquez pour ajuster la position GPS</p></div>
|
||||
<input type="checkbox" role="switch" id="lock_position">Verrouiller la position</lock>
|
||||
<script>
|
||||
document.getElementById("lock_position").onclick = function() {
|
||||
const field = document.getElementById("id_location");
|
||||
if (this.checked)
|
||||
field.setAttribute("readonly", true);
|
||||
else
|
||||
field.removeAttribute("readonly");
|
||||
}
|
||||
</script>
|
||||
</div></div>"""
|
||||
+ '</div><div><div class="map-widget">'
|
||||
+ '<div id="map_location" style="width: 100%; aspect-ratio: 16/9"></div><p>Cliquez pour ajuster la position GPS</p></div></div></div>'
|
||||
)
|
||||
|
||||
return mark_safe(result)
|
||||
|
||||
def apply(self):
|
||||
return self.cleaned_data.get("apply_to_all")
|
||||
|
||||
|
||||
class MessageForm(ModelForm):
|
||||
|
||||
class Meta:
|
||||
model = Message
|
||||
fields = ["subject", "name", "email", "message", "related_event"]
|
||||
widgets = {"related_event": HiddenInput(), "user": HiddenInput()}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.event = kwargs.pop("event", False)
|
||||
self.internal = kwargs.pop("internal", False)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["related_event"].required = False
|
||||
if self.internal:
|
||||
self.fields.pop("name")
|
||||
self.fields.pop("email")
|
||||
|
||||
|
||||
class MessageEventForm(ModelForm):
|
||||
|
||||
class Meta:
|
||||
model = Message
|
||||
fields = ["message"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["message"].label = _("Add a comment")
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
from ..extractor import Extractor
|
||||
import json
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse
|
||||
import pytz
|
||||
import html
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
# A class dedicated to get events from les amis du temps des cerises
|
||||
# Website https://amisdutempsdescerises.org/
|
||||
class CExtractor(Extractor):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.data = b'------toto\r\nContent-Disposition: form-data; name="p"\r\n\r\nfutur\r\n------toto--\r\n'
|
||||
self.content_type = "multipart/form-data; boundary=----toto"
|
||||
|
||||
def extract(
|
||||
self, content, url, url_human=None, default_values=None, published=False
|
||||
):
|
||||
self.set_header(url)
|
||||
self.clear_events()
|
||||
|
||||
root_url = "https://" + urlparse(url).netloc + "/"
|
||||
images_basename = root_url + "images/"
|
||||
from_timezone = pytz.utc
|
||||
to_timezone = pytz.timezone("Europe/Paris")
|
||||
|
||||
events = json.loads(content)
|
||||
for e in events:
|
||||
tags = []
|
||||
start_day = e["ev_date"].split(" ")[0]
|
||||
start_time = e["ev_time"]
|
||||
title = html.unescape(e["ev_titre"])
|
||||
if "ev_sstitre" in e and e["ev_sstitre"] != "":
|
||||
title = title + " - " + html.unescape(e["ev_sstitre"])
|
||||
|
||||
soup = BeautifulSoup(e["ev_info"], "html.parser")
|
||||
description = soup.text
|
||||
location = html.unescape(e["li_nom"]) if "li_nom" in e else None
|
||||
if "ev_canceled" in e and e["ev_canceled"] != "0":
|
||||
tags += ["annulé"]
|
||||
|
||||
image = None
|
||||
if "ev_img" in e and e["ev_img"] != "":
|
||||
image = images_basename + e["ev_img"]
|
||||
|
||||
naive_dt = datetime.strptime(e["ev_date"], "%Y-%m-%d %H:%M:%S")
|
||||
|
||||
from_dt = from_timezone.localize(naive_dt)
|
||||
dt = to_timezone.normalize(from_dt)
|
||||
ts = int(datetime.timestamp(dt)) * 1000
|
||||
|
||||
event_url = root_url + "#" + str(ts)
|
||||
|
||||
self.add_event(
|
||||
default_values,
|
||||
title,
|
||||
None,
|
||||
start_day,
|
||||
location,
|
||||
description,
|
||||
tags,
|
||||
uuids=[event_url],
|
||||
recurrences=None,
|
||||
url_human=event_url,
|
||||
start_time=start_time,
|
||||
published=published,
|
||||
image=image,
|
||||
)
|
||||
|
||||
return self.get_structure()
|
||||
@@ -1,141 +0,0 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractorNoPause
|
||||
from ..extractor import Extractor
|
||||
from bs4 import BeautifulSoup
|
||||
import re
|
||||
from datetime import datetime, timedelta, date
|
||||
|
||||
|
||||
# A class dedicated to get events from Arachnée Concert
|
||||
# URL: https://www.arachnee-concerts.com/agenda-des-concerts/
|
||||
class CExtractor(TwoStepsExtractorNoPause):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.possible_dates = {}
|
||||
self.theater = None
|
||||
|
||||
def extract(
|
||||
self,
|
||||
content,
|
||||
url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
only_future=True,
|
||||
ignore_404=True,
|
||||
):
|
||||
match = re.match(r".*\&theatres=([^&]*)&.*", url)
|
||||
if match:
|
||||
self.theater = match[1]
|
||||
|
||||
return super().extract(
|
||||
content, url, url_human, default_values, published, only_future, ignore_404
|
||||
)
|
||||
|
||||
def build_event_url_list(self, content, infuture_days=180):
|
||||
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
containers = soup.select("ul.event_container>li")
|
||||
if containers:
|
||||
for c in containers:
|
||||
d = Extractor.parse_french_date(c.select_one(".date").text)
|
||||
la = c.select_one(".event_auditory").text
|
||||
if (
|
||||
self.theater is None or (la.startswith(self.theater))
|
||||
) and d < date.today() + timedelta(days=infuture_days):
|
||||
t = Extractor.parse_french_time(c.select_one(".time").text)
|
||||
e_url = c.select_one(".info a")["href"]
|
||||
if e_url not in self.possible_dates:
|
||||
self.possible_dates[e_url] = []
|
||||
self.possible_dates[e_url].append((str(d) + " " + str(t)))
|
||||
self.add_event_url(e_url)
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
title = ", ".join(
|
||||
[
|
||||
x.text
|
||||
for x in [
|
||||
soup.select_one(y) for y in [".page_title", ".artiste-subtitle"]
|
||||
]
|
||||
if x
|
||||
]
|
||||
)
|
||||
|
||||
image = soup.select_one(".entry-image .image_wrapper img")
|
||||
if image is not None:
|
||||
image = image["src"]
|
||||
|
||||
descs = soup.select(".entry-content p")
|
||||
if descs:
|
||||
description = "\n".join([d.text for d in descs])
|
||||
else:
|
||||
description = None
|
||||
|
||||
category = soup.select_one(".event_category").text
|
||||
first_cat = Extractor.remove_accents(category.split(",")[0].lower())
|
||||
tags = []
|
||||
if first_cat in ["grand spectacle"]:
|
||||
category = "Spectacles"
|
||||
tags.append("💃 danse")
|
||||
elif first_cat in ["theatre", "humour / one man show"]:
|
||||
category = "Spectacles"
|
||||
tags.append("🎭 théâtre")
|
||||
elif first_cat in [
|
||||
"chanson francaise",
|
||||
"musique du monde",
|
||||
"pop / rock",
|
||||
"rap",
|
||||
"rnb",
|
||||
"raggae",
|
||||
"variete",
|
||||
]:
|
||||
category = "Fêtes & Concerts"
|
||||
tags.append("🎵 concert")
|
||||
elif first_cat in [
|
||||
"comedie musicale",
|
||||
"humour / one man show",
|
||||
"spectacle equestre",
|
||||
]:
|
||||
category = "Spectacles"
|
||||
elif first_cat in ["spectacle pour enfant"]:
|
||||
tags = ["🎈 jeune public"]
|
||||
category = None
|
||||
else:
|
||||
category = None
|
||||
|
||||
dates = soup.select("#event_ticket_content>ul>li")
|
||||
for d in dates:
|
||||
dt = datetime.fromisoformat(d.select_one(".date")["content"])
|
||||
date = dt.date()
|
||||
time = dt.time()
|
||||
if str(date) + " " + str(time) in self.possible_dates[event_url]:
|
||||
location = d.select_one(".event_auditory").text
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
category,
|
||||
date,
|
||||
location,
|
||||
description,
|
||||
tags,
|
||||
recurrences=None,
|
||||
uuids=[event_url + "?d=" + str(date) + "&t=" + str(time)],
|
||||
url_human=event_url,
|
||||
start_time=time,
|
||||
end_day=None,
|
||||
end_time=None,
|
||||
published=published,
|
||||
image=image,
|
||||
)
|
||||
@@ -1,198 +0,0 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractor
|
||||
from ..extractor import Extractor
|
||||
from bs4 import BeautifulSoup
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
# A class dedicated to get events from La Cour des 3 Coquins and Graines de spectacle
|
||||
# URL: https://billetterie-c3c.clermont-ferrand.fr//
|
||||
class CExtractor(TwoStepsExtractor):
|
||||
|
||||
def extract(
|
||||
self,
|
||||
content,
|
||||
url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
only_future=True,
|
||||
ignore_404=True,
|
||||
):
|
||||
self.root_address = "https://" + urlparse(url).netloc + "/"
|
||||
return super().extract(
|
||||
content, url, url_human, default_values, published, only_future, ignore_404
|
||||
)
|
||||
|
||||
def category_agenda(self, category):
|
||||
if not category:
|
||||
return None
|
||||
mapping = {
|
||||
"Théâtre": "Spectacles",
|
||||
"Concert": "Fêtes & Concerts",
|
||||
"Projection": "Cinéma",
|
||||
}
|
||||
mapping_tag = {
|
||||
"Théâtre": "🎭 théâtre",
|
||||
"Concert": "🎵 concert",
|
||||
"Projection": None,
|
||||
}
|
||||
if category in mapping:
|
||||
return mapping[category], mapping_tag[category]
|
||||
else:
|
||||
return None, None
|
||||
|
||||
def build_event_url_list(self, content):
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
events = soup.select("div.fiche-info")
|
||||
|
||||
for e in events:
|
||||
e_url = e.select_one("a.btn.lien_savoir_plus")["href"]
|
||||
if e_url != "":
|
||||
e_url = self.url + "/" + e_url
|
||||
self.add_event_url(e_url)
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
|
||||
title = soup.select_one("h1")
|
||||
if title:
|
||||
title = title.text
|
||||
|
||||
image = soup.select_one("#media .swiper-slide img")
|
||||
if image:
|
||||
image = image["src"]
|
||||
else:
|
||||
image = None
|
||||
|
||||
description = soup.select_one(".presentation").get_text()
|
||||
duration = soup.select_one("#criteres .DUREE-V .valeur-critere li")
|
||||
if duration is not None:
|
||||
duration = Extractor.parse_french_time(duration.text)
|
||||
|
||||
location = soup.select_one("#criteres .LIEU-V .valeur-critere li")
|
||||
if location is not None:
|
||||
location = location.text
|
||||
|
||||
categories = []
|
||||
tags = []
|
||||
for t in soup.select(".sous-titre span"):
|
||||
classes = t.get("class")
|
||||
if classes and len(classes) > 0:
|
||||
if classes[0].startswith("LIEU-"):
|
||||
location = t.text
|
||||
elif classes[0].startswith("THEMATIQUE-"):
|
||||
cat, tag = self.category_agenda(t.text)
|
||||
if cat:
|
||||
categories.append(cat)
|
||||
if tag:
|
||||
tags.append(tag)
|
||||
|
||||
# TODO: parser les dates, récupérer les heures ()
|
||||
dates = [o.get("value") for o in soup.select("select.datedleb_resa option")]
|
||||
|
||||
patternCodeSite = re.compile(
|
||||
r'.*gsw_vars\["CODEPRESTATAIRE"\] = "(.*?)";.*', flags=re.DOTALL
|
||||
)
|
||||
patternCodeObject = re.compile(
|
||||
r'.*gsw_vars\["CODEPRESTATION"\] = "(.*?)";.*', flags=re.DOTALL
|
||||
)
|
||||
patternCodeMoteur = re.compile(
|
||||
r".*Resa.init_moteur_resa\(\'([0-9]+)\'\);.*", flags=re.DOTALL
|
||||
)
|
||||
scripts = soup.find_all("script")
|
||||
codeSite = ""
|
||||
idObject = ""
|
||||
moteur = ""
|
||||
for script in scripts:
|
||||
if patternCodeSite.match(str(script.string)):
|
||||
data = patternCodeSite.match(script.string)
|
||||
codeSite = data.groups()[0]
|
||||
if patternCodeObject.match(str(script.string)):
|
||||
data = patternCodeObject.match(script.string)
|
||||
idObject = data.groups()[0]
|
||||
if patternCodeMoteur.match(str(script.string)):
|
||||
data = patternCodeMoteur.match(script.string)
|
||||
moteur = data.groups()[0]
|
||||
|
||||
pause = self.downloader.pause
|
||||
self.downloader.pause = False
|
||||
|
||||
# get exact schedule need two supplementary requests
|
||||
datetimes = []
|
||||
if codeSite != "" and idObject != "" and moteur != "":
|
||||
for date in dates:
|
||||
# the first page is required such that the server knows the selected date
|
||||
self.downloader.get_content(
|
||||
self.root_address
|
||||
+ "/booking?action=searchAjax&cid="
|
||||
+ moteur
|
||||
+ "&afficheDirectDispo="
|
||||
+ date
|
||||
+ "&type_prestataire=V&cle_fiche=PRESTATION-V-"
|
||||
+ codeSite
|
||||
+ "-"
|
||||
+ idObject
|
||||
+ "&datedeb="
|
||||
+ date
|
||||
)
|
||||
# then we get the form with hours
|
||||
page2 = self.downloader.get_content(
|
||||
self.root_address
|
||||
+ "/booking?action=detailTarifsPrestationAjax&prestation=V-"
|
||||
+ codeSite
|
||||
+ "-"
|
||||
+ idObject
|
||||
)
|
||||
soup2 = BeautifulSoup(page2, "html.parser")
|
||||
times = [o.text for o in soup2.select("#quart_en_cours_spec option")]
|
||||
for t in times:
|
||||
startdate = Extractor.parse_french_date(date)
|
||||
starttime = Extractor.parse_french_time(t)
|
||||
start = datetime.combine(startdate, starttime)
|
||||
enddate = None
|
||||
endtime = None
|
||||
if duration is not None:
|
||||
end = start + timedelta(
|
||||
hours=duration.hour,
|
||||
minutes=duration.minute,
|
||||
seconds=duration.second,
|
||||
)
|
||||
enddate = end.date()
|
||||
endtime = end.time()
|
||||
datetimes.append((startdate, starttime, enddate, endtime))
|
||||
self.downloader.pause = pause
|
||||
|
||||
category = None
|
||||
if len(categories) > 0:
|
||||
category = categories[0]
|
||||
|
||||
for dt in datetimes:
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
category,
|
||||
dt[0],
|
||||
location,
|
||||
description,
|
||||
tags,
|
||||
recurrences=None,
|
||||
uuids=[event_url],
|
||||
url_human=url_human,
|
||||
start_time=dt[1],
|
||||
end_day=dt[2],
|
||||
end_time=dt[3],
|
||||
published=published,
|
||||
image=image,
|
||||
)
|
||||
@@ -1,62 +1,34 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractor
|
||||
from ..generic_extractors import *
|
||||
import json5
|
||||
from bs4 import BeautifulSoup
|
||||
from datetime import datetime, date
|
||||
|
||||
|
||||
# A class dedicated to get events from La Coopérative de Mai:
|
||||
# URL: https://lacomediedeclermont.com/saison23-24/wp-admin/admin-ajax.php?action=load_dates_existantes
|
||||
# URL pour les humains: https://lacomediedeclermont.com/saison24-25/
|
||||
# URL pour les humains: https://lacomediedeclermont.com/saison23-24/
|
||||
class CExtractor(TwoStepsExtractor):
|
||||
nom_lieu = "La Comédie de Clermont"
|
||||
url_referer = "https://lacomediedeclermont.com/saison24-25/"
|
||||
|
||||
def is_to_import_from_url(self, url):
|
||||
if any(
|
||||
keyword in url
|
||||
for keyword in [
|
||||
"podcast",
|
||||
"on-debriefe",
|
||||
"popcorn",
|
||||
"rencontreautour",
|
||||
"rencontre-autour",
|
||||
"les-cles-du-spectacle",
|
||||
]
|
||||
):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def category_comedie2agenda(self, category):
|
||||
mapping = {
|
||||
"Théâtre": "Spectacles",
|
||||
"Danse": "Spectacles",
|
||||
"Rencontre": "Rencontres & Débats",
|
||||
"Sortie de résidence": "Sans catégorie",
|
||||
"PopCorn Live": "Sans catégorie",
|
||||
}
|
||||
mapping_tag = {
|
||||
"Théâtre": "🎭 théâtre",
|
||||
"Danse": "💃 danse",
|
||||
"Rencontre": None,
|
||||
"Sortie de résidence": "sortie de résidence",
|
||||
"PopCorn Live": None,
|
||||
"Théâtre": "Théâtre",
|
||||
"Danse": "Danse",
|
||||
"Rencontre": "Autre",
|
||||
"Sortie de résidence": "Autre",
|
||||
"PopCorn Live": "Autre",
|
||||
}
|
||||
if category in mapping:
|
||||
return mapping[category], mapping_tag[category]
|
||||
return mapping[category]
|
||||
else:
|
||||
return None, None
|
||||
return None
|
||||
|
||||
def build_event_url_list(self, content):
|
||||
dates = json5.loads(content)["data"][0]
|
||||
|
||||
url = self.url.split("?")[0]
|
||||
for d in list(set(dates)):
|
||||
if not self.only_future or self.now <= date.fromisoformat(d):
|
||||
if not self.only_future or self.now <= datetime.date.fromisoformat(d):
|
||||
events = self.downloader.get_content(
|
||||
url,
|
||||
post={"action": "load_evenements_jour", "jour": d},
|
||||
referer="https://lacomediedeclermont.com/saison24-25/",
|
||||
url, post={"action": "load_evenements_jour", "jour": d}
|
||||
)
|
||||
if events:
|
||||
events = json5.loads(events)
|
||||
@@ -68,35 +40,29 @@ class CExtractor(TwoStepsExtractor):
|
||||
e_url = (
|
||||
e.select("a")[0]["href"] + "#" + d
|
||||
) # a "fake" url specific for each day of this show
|
||||
|
||||
if self.is_to_import_from_url(e_url):
|
||||
self.add_event_url(e_url)
|
||||
self.add_event_start_day(e_url, d)
|
||||
t = (
|
||||
str(e.select("div#datecal")[0])
|
||||
.split(" ")[-1]
|
||||
.split("<")[0]
|
||||
self.add_event_url(e_url)
|
||||
self.add_event_start_day(e_url, d)
|
||||
t = (
|
||||
str(e.select("div#datecal")[0])
|
||||
.split(" ")[-1]
|
||||
.split("<")[0]
|
||||
)
|
||||
self.add_event_start_time(e_url, t)
|
||||
title = e.select("a")[0].contents[0]
|
||||
self.add_event_title(e_url, title)
|
||||
category = e.select("div#lieuevtcal span")
|
||||
if len(category) > 0:
|
||||
category = self.category_comedie2agenda(
|
||||
category[-1].contents[0]
|
||||
)
|
||||
self.add_event_start_time(e_url, t)
|
||||
title = e.select("a")[0].contents[0]
|
||||
self.add_event_title(e_url, title)
|
||||
category = e.select("div#lieuevtcal span")
|
||||
if len(category) > 0:
|
||||
category, tag = self.category_comedie2agenda(
|
||||
category[-1].contents[0]
|
||||
)
|
||||
if category:
|
||||
self.add_event_category(e_url, category)
|
||||
if tag:
|
||||
self.add_event_tag(e_url, tag)
|
||||
location = (
|
||||
e.select("div#lieuevtcal")[0]
|
||||
.contents[-1]
|
||||
.split("•")[-1]
|
||||
)
|
||||
if location.replace(" ", "") == "":
|
||||
location = self.nom_lieu
|
||||
self.add_event_location(e_url, location)
|
||||
if category is not None:
|
||||
self.add_event_category(e_url, category)
|
||||
location = (
|
||||
e.select("div#lieuevtcal")[0]
|
||||
.contents[-1]
|
||||
.split("•")[-1]
|
||||
)
|
||||
self.add_event_location(e_url, location)
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
@@ -109,31 +75,16 @@ class CExtractor(TwoStepsExtractor):
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
|
||||
image = soup.select("#imgspec img")
|
||||
if image and len(image) > 0:
|
||||
if image:
|
||||
image = image[0]["src"]
|
||||
else:
|
||||
image = None
|
||||
|
||||
description = soup.select("#descspec")
|
||||
if description and len(description) > 0:
|
||||
description = description[0].get_text().replace("Lire plus...", "")
|
||||
# on ajoute éventuellement les informations complémentaires
|
||||
|
||||
d_suite = ""
|
||||
for d in ["#typespec", "#dureespec", "#lieuspec", ".lkuncontdroitespec"]:
|
||||
comp_desc = soup.select(d)
|
||||
if comp_desc and len(comp_desc) > 0:
|
||||
for desc in comp_desc:
|
||||
d_suite += "\n\n" + desc.get_text()
|
||||
if d_suite != "":
|
||||
description += "\n\n> Informations complémentaires:" + d_suite
|
||||
else:
|
||||
description = None
|
||||
description = soup.select("#descspec")[0].get_text().replace("Lire plus...", "")
|
||||
|
||||
url_human = event_url
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
None,
|
||||
None,
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractor
|
||||
from ..generic_extractors.ggcal_link import GGCalendar
|
||||
from ..generic_extractors import *
|
||||
import re
|
||||
import json5
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
# A class dedicated to get events from La Coopérative de Mai:
|
||||
@@ -24,7 +22,7 @@ class CExtractor(TwoStepsExtractor):
|
||||
for e in data["events"]:
|
||||
self.add_event_url(e["url"])
|
||||
if e["tag"] == "Gratuit":
|
||||
self.add_event_tag(e["url"], "💶 gratuit")
|
||||
self.add_event_tag(e["url"], "gratuit")
|
||||
|
||||
else:
|
||||
raise Exception("Cannot extract events from javascript")
|
||||
@@ -40,7 +38,7 @@ class CExtractor(TwoStepsExtractor):
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
|
||||
title = soup.find("h1").contents[0]
|
||||
category = "Fêtes & Concerts"
|
||||
category = "Concert"
|
||||
image = soup.find("meta", property="og:image")
|
||||
if image:
|
||||
image = image["content"]
|
||||
@@ -55,7 +53,7 @@ class CExtractor(TwoStepsExtractor):
|
||||
if description is None:
|
||||
description = ""
|
||||
|
||||
tags = ["🎵 concert"]
|
||||
tags = []
|
||||
|
||||
link_calendar = soup.select('a[href^="https://calendar.google.com/calendar/"]')
|
||||
if len(link_calendar) == 0:
|
||||
@@ -70,7 +68,6 @@ class CExtractor(TwoStepsExtractor):
|
||||
url_human = event_url
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
category,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractor
|
||||
from ..extractor import Extractor
|
||||
from bs4 import BeautifulSoup
|
||||
from ..generic_extractors import *
|
||||
import re
|
||||
|
||||
|
||||
# A class dedicated to get events from La puce à l'oreille
|
||||
@@ -15,7 +14,12 @@ class CExtractor(TwoStepsExtractor):
|
||||
for e in events:
|
||||
e_url = e.find("a")
|
||||
if e_url:
|
||||
self.add_event_url(e_url["href"])
|
||||
if self.add_event_url(e_url["href"]):
|
||||
title = e.select("div[data-testid=richTextElement] h1.font_0 span")
|
||||
if title:
|
||||
title = title[0].contents[0].get_text().replace("\n", " ")
|
||||
title = re.sub(" +", " ", title)
|
||||
self.add_event_title(e_url["href"], title)
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
@@ -27,12 +31,9 @@ class CExtractor(TwoStepsExtractor):
|
||||
):
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
|
||||
title = soup.select("h2")[0].get_text()
|
||||
|
||||
start_day = Extractor.parse_french_date(
|
||||
soup.select("h2")[1].get_text()
|
||||
start_day = self.parse_french_date(
|
||||
soup.find("h2").get_text()
|
||||
) # pas parfait, mais bordel que ce site est mal construit
|
||||
print(soup.select("h2")[1].get_text())
|
||||
|
||||
spans = soup.select("div[data-testid=richTextElement] span")
|
||||
start_time = None
|
||||
@@ -42,13 +43,13 @@ class CExtractor(TwoStepsExtractor):
|
||||
for span in spans:
|
||||
txt = span.get_text()
|
||||
if txt.lstrip().startswith("DÉBUT"):
|
||||
start_time = Extractor.parse_french_time(txt.split(":")[-1])
|
||||
start_time = self.parse_french_time(txt.split(":")[-1])
|
||||
end_time = None
|
||||
elif txt.lstrip().startswith("HORAIRES :"):
|
||||
hs = txt.split(":")[-1].split("-")
|
||||
start_time = Extractor.parse_french_time(hs[0])
|
||||
start_time = self.parse_french_time(hs[0])
|
||||
if len(hs) > 1:
|
||||
end_time = Extractor.parse_french_time(hs[1])
|
||||
end_time = self.parse_french_time(hs[1])
|
||||
else:
|
||||
end_time = None
|
||||
elif txt.lstrip().startswith("LIEU :") and not location:
|
||||
@@ -56,10 +57,10 @@ class CExtractor(TwoStepsExtractor):
|
||||
|
||||
if not location:
|
||||
location = self.nom_lieu
|
||||
end_day = Extractor.guess_end_day(start_day, start_time, end_time)
|
||||
end_day = self.guess_end_day(start_day, start_time, end_time)
|
||||
|
||||
url_human = event_url
|
||||
tags = ["🎵 concert"]
|
||||
tags = []
|
||||
|
||||
image = soup.select("wow-image img[fetchpriority=high]")
|
||||
if image:
|
||||
@@ -77,10 +78,9 @@ class CExtractor(TwoStepsExtractor):
|
||||
description = None
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
"Fêtes & Concerts",
|
||||
None,
|
||||
"Concert",
|
||||
start_day,
|
||||
location,
|
||||
description,
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractorNoPause
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
# A class dedicated to get events from Raymond Bar
|
||||
# URL: https://www.raymondbar.net/
|
||||
class CExtractor(TwoStepsExtractorNoPause):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def build_event_url_list(self, content, infuture_days=180):
|
||||
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
links = soup.select(".showsList .showMore")
|
||||
if links:
|
||||
for lk in links:
|
||||
self.add_event_url(lk["href"])
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
|
||||
title = soup.select_one(".showDesc h4 a.summary").text
|
||||
start_day = soup.select_one(".showDate .value-title")
|
||||
start_time = None
|
||||
|
||||
if start_day is not None:
|
||||
start_day = start_day["title"]
|
||||
if start_day is not None:
|
||||
start_day = start_day.split("T")[0]
|
||||
|
||||
description = soup.select_one(".showDetails.description").text
|
||||
image = soup.select(".showDetails.description img")
|
||||
if image is not None:
|
||||
image_alt = image[-1]["alt"]
|
||||
image = image[-1]["src"]
|
||||
|
||||
if start_time is None:
|
||||
title += " - Attention: l'heure n'a pu être extraite"
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
None,
|
||||
start_day,
|
||||
None,
|
||||
description,
|
||||
[],
|
||||
recurrences=None,
|
||||
uuids=[event_url],
|
||||
url_human=event_url,
|
||||
start_time=start_time,
|
||||
end_day=None,
|
||||
end_time=None,
|
||||
published=published,
|
||||
image=image,
|
||||
image_alt=image_alt,
|
||||
)
|
||||
@@ -1,6 +1,4 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractor
|
||||
from ..extractor import Extractor
|
||||
from bs4 import BeautifulSoup
|
||||
from ..generic_extractors import *
|
||||
|
||||
|
||||
# A class dedicated to get events from Le Fotomat'
|
||||
@@ -11,12 +9,11 @@ class CExtractor(TwoStepsExtractor):
|
||||
def category_fotomat2agenda(self, category):
|
||||
if not category:
|
||||
return None
|
||||
mapping = {"Concerts": "Fêtes & Concerts"}
|
||||
mapping_tag = {"Concerts": "🎵 concert"}
|
||||
mapping = {"Concerts": "Concert"}
|
||||
if category in mapping:
|
||||
return mapping[category], mapping_tag[category]
|
||||
return mapping[category]
|
||||
else:
|
||||
return None, None
|
||||
return None
|
||||
|
||||
def build_event_url_list(self, content):
|
||||
soup = BeautifulSoup(content, "xml")
|
||||
@@ -29,11 +26,9 @@ class CExtractor(TwoStepsExtractor):
|
||||
title = e.find("title").contents[0]
|
||||
self.add_event_title(e_url, title)
|
||||
|
||||
category, tag = self.category_fotomat2agenda(e.find("category").contents[0])
|
||||
category = self.category_fotomat2agenda(e.find("category").contents[0])
|
||||
if category:
|
||||
self.add_event_category(e_url, category)
|
||||
if tag:
|
||||
self.add_event_tag(e_url, tag)
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
@@ -50,10 +45,10 @@ class CExtractor(TwoStepsExtractor):
|
||||
else:
|
||||
image = None
|
||||
desc = soup.select("head meta[name=description]")[0]["content"]
|
||||
start_day = Extractor.parse_french_date(desc.split("-")[0])
|
||||
start_time = Extractor.parse_french_time(desc.split("-")[1])
|
||||
end_time = Extractor.parse_french_time(desc.split("-")[2])
|
||||
end_day = Extractor.guess_end_day(start_day, start_time, end_time)
|
||||
start_day = self.parse_french_date(desc.split("-")[0])
|
||||
start_time = self.parse_french_time(desc.split("-")[1])
|
||||
end_time = self.parse_french_time(desc.split("-")[2])
|
||||
end_day = self.guess_end_day(start_day, start_time, end_time)
|
||||
|
||||
location = self.nom_lieu
|
||||
descriptions = soup.select("div.vce-col-content")
|
||||
@@ -74,7 +69,6 @@ class CExtractor(TwoStepsExtractor):
|
||||
url_human = event_url
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
None,
|
||||
None,
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractorNoPause
|
||||
from ..extractor import Extractor
|
||||
from bs4 import BeautifulSoup
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
# A class dedicated to get events from Cinéma Le Rio (Clermont-Ferrand)
|
||||
# URL: https://www.cinemalerio.com/evenements/
|
||||
class CExtractor(TwoStepsExtractorNoPause):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.possible_dates = {}
|
||||
self.theater = None
|
||||
|
||||
def build_event_url_list(self, content, infuture_days=180):
|
||||
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
links = soup.select("td.seance_link a")
|
||||
if links:
|
||||
for lk in links:
|
||||
self.add_event_url(lk["href"])
|
||||
|
||||
def to_text_select_one(soup, filter):
|
||||
e = soup.select_one(filter)
|
||||
if e is None:
|
||||
return None
|
||||
else:
|
||||
return e.text
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
|
||||
title = soup.select_one("h1").text
|
||||
|
||||
alerte_date = CExtractor.to_text_select_one(soup, ".alerte_date")
|
||||
if alerte_date is None:
|
||||
return
|
||||
dh = alerte_date.split("à")
|
||||
# if date is not found, we skip
|
||||
if len(dh) != 2:
|
||||
return
|
||||
|
||||
date = Extractor.parse_french_date(dh[0], default_year=datetime.now().year)
|
||||
time = Extractor.parse_french_time(dh[1])
|
||||
|
||||
synopsis = CExtractor.to_text_select_one(soup, ".synopsis_bloc")
|
||||
special_titre = CExtractor.to_text_select_one(soup, ".alerte_titre")
|
||||
special = CExtractor.to_text_select_one(soup, ".alerte_text")
|
||||
|
||||
# it's not a specific event: we skip it
|
||||
special_lines = None if special is None else special.split("\n")
|
||||
if (
|
||||
special is None
|
||||
or len(special_lines) == 0
|
||||
or (
|
||||
len(special_lines) == 1
|
||||
and special_lines[0].strip().startswith("En partenariat")
|
||||
)
|
||||
):
|
||||
return
|
||||
|
||||
description = "\n\n".join(
|
||||
[x for x in [synopsis, special_titre, special] if x is not None]
|
||||
)
|
||||
|
||||
image = soup.select_one(".col1 img")
|
||||
image_alt = None
|
||||
if image is not None:
|
||||
image_alt = image["alt"]
|
||||
image = image["src"]
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
None,
|
||||
date,
|
||||
None,
|
||||
description,
|
||||
[],
|
||||
recurrences=None,
|
||||
uuids=[event_url],
|
||||
url_human=event_url,
|
||||
start_time=time,
|
||||
end_day=None,
|
||||
end_time=None,
|
||||
published=published,
|
||||
image=image,
|
||||
image_alt=image_alt,
|
||||
)
|
||||
@@ -1,215 +0,0 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractorNoPause
|
||||
from ..extractor import Extractor
|
||||
from bs4 import BeautifulSoup
|
||||
from datetime import date
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
# A class dedicated to get events from Mille formes
|
||||
# URL: https://www.milleformes.fr/programme
|
||||
class CExtractor(TwoStepsExtractorNoPause):
|
||||
|
||||
def extract(
|
||||
self,
|
||||
content,
|
||||
url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
only_future=True,
|
||||
ignore_404=True,
|
||||
):
|
||||
self.root_address = "https://" + urlparse(url).netloc + "/"
|
||||
self.today = date.today()
|
||||
return super().extract(
|
||||
content, url, url_human, default_values, published, only_future, ignore_404
|
||||
)
|
||||
|
||||
def parse_category(self, cat):
|
||||
cat = cat.replace("\n", "").strip()
|
||||
if "exposition" in cat or "dispositif artistique interactif" in cat:
|
||||
result = "Visites & Expositions"
|
||||
elif "atelier" in cat:
|
||||
result = "Animations & Ateliers"
|
||||
elif cat in ["buffet"]:
|
||||
result = "Rendez-vous locaux"
|
||||
elif "ciné" in cat:
|
||||
result = "Cinéma"
|
||||
elif "concert" in cat:
|
||||
result = "Fêtes & Concerts"
|
||||
elif "rencontre" in cat:
|
||||
result = "Rencontres & Débats"
|
||||
elif "spectacle" in cat:
|
||||
result = "Spectacles"
|
||||
else:
|
||||
result = "Sans catégorie"
|
||||
|
||||
return result
|
||||
|
||||
# this method is not perfect, but dates and hours are not structured
|
||||
def parse_dates(self, date):
|
||||
dl = date.replace(" à ", "\n").split("\n")
|
||||
result = []
|
||||
|
||||
for d in dl:
|
||||
# only lines with a digit
|
||||
if sum(c.isdigit() for c in d) != 0:
|
||||
# split subparts
|
||||
for d2 in d.replace(" et ", ", ").split(", "):
|
||||
d2 = d2.strip()
|
||||
dd = Extractor.parse_french_date(
|
||||
d2, default_year_by_proximity=self.today
|
||||
)
|
||||
if dd is None:
|
||||
hh = Extractor.parse_french_time(d2)
|
||||
for i, r in enumerate(result):
|
||||
result[i][1].append(hh)
|
||||
else:
|
||||
result.append([dd, []])
|
||||
|
||||
if "De" in date and " à " in date:
|
||||
for i, r in enumerate(result):
|
||||
result[i].append(True)
|
||||
|
||||
return result
|
||||
|
||||
def build_event_url_list(self, content, infuture_days=180):
|
||||
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
links = soup.select(".cell a.evenement")
|
||||
for lk in links:
|
||||
self.add_event_url(self.root_address + lk["href"])
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
title = soup.select_one("h1").text.replace("\n", "").strip().title()
|
||||
|
||||
image = soup.select_one(".slide img")
|
||||
if image is None:
|
||||
image_alt = ""
|
||||
else:
|
||||
image_alt = image["alt"]
|
||||
image = self.root_address + image["src"]
|
||||
|
||||
soustitre = soup.select_one(".sous-titre")
|
||||
if soustitre is not None:
|
||||
soustitre = soustitre.text.strip()
|
||||
|
||||
description = soup.select_one(".texte-full").text.strip()
|
||||
infos = soup.select_one(".champ .infos")
|
||||
if infos is not None:
|
||||
infos = infos.text
|
||||
|
||||
location = soup.select_one(".champ .taxo.espace").text.strip()
|
||||
|
||||
soup.select_one(".champ.taxo-age").text
|
||||
category = self.parse_category(soup.select_one(".champ.categorie").text)
|
||||
|
||||
date = soup.select_one(".champ.date-libre").text
|
||||
|
||||
description = "\n\n".join(
|
||||
[x for x in [soustitre, description, date, infos] if x is not None]
|
||||
)
|
||||
|
||||
if (
|
||||
" au " in date
|
||||
or date.startswith("Du")
|
||||
or date.lower().strip() == "en continu"
|
||||
or date.startswith("Les")
|
||||
):
|
||||
return
|
||||
|
||||
dates = self.parse_dates(date)
|
||||
|
||||
for d in dates:
|
||||
if len(d) >= 2:
|
||||
start_day = d[0]
|
||||
|
||||
if len(d) == 3 and len(d[1]) == 2:
|
||||
start_time = d[1][0]
|
||||
end_time = d[1][1]
|
||||
uuid = (
|
||||
event_url
|
||||
+ "?date="
|
||||
+ str(start_day)
|
||||
+ "&hour="
|
||||
+ str(start_time)
|
||||
)
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
location,
|
||||
description,
|
||||
[],
|
||||
recurrences=None,
|
||||
uuids=[uuid],
|
||||
url_human=event_url,
|
||||
start_time=start_time,
|
||||
end_day=start_day,
|
||||
end_time=end_time,
|
||||
published=published,
|
||||
image=image,
|
||||
image_alt=image_alt,
|
||||
)
|
||||
else:
|
||||
end_time = None
|
||||
if len(d[1]) == 0:
|
||||
start_time = None
|
||||
uuid = event_url + "?date=" + str(start_day)
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
location,
|
||||
description,
|
||||
[],
|
||||
recurrences=None,
|
||||
uuids=[uuid],
|
||||
url_human=event_url,
|
||||
start_time=start_time,
|
||||
end_day=start_day,
|
||||
end_time=end_time,
|
||||
published=published,
|
||||
image=image,
|
||||
image_alt=image_alt,
|
||||
)
|
||||
for t in d[1]:
|
||||
start_time = t
|
||||
uuid = (
|
||||
event_url
|
||||
+ "?date="
|
||||
+ str(start_day)
|
||||
+ "&hour="
|
||||
+ str(start_time)
|
||||
)
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
location,
|
||||
description,
|
||||
[],
|
||||
recurrences=None,
|
||||
uuids=[uuid],
|
||||
url_human=event_url,
|
||||
start_time=start_time,
|
||||
end_day=start_day,
|
||||
end_time=end_time,
|
||||
published=published,
|
||||
image=image,
|
||||
image_alt=image_alt,
|
||||
)
|
||||
@@ -1,40 +1,27 @@
|
||||
from urllib.parse import urlencode
|
||||
import urllib.request
|
||||
from urllib.request import Request
|
||||
import os
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.chrome.service import Service
|
||||
from selenium.webdriver.chrome.options import Options
|
||||
from selenium.common.exceptions import (
|
||||
StaleElementReferenceException,
|
||||
NoSuchElementException,
|
||||
TimeoutException,
|
||||
WebDriverException,
|
||||
SessionNotCreatedException,
|
||||
)
|
||||
from abc import ABC, abstractmethod
|
||||
import time
|
||||
|
||||
|
||||
class Downloader(ABC):
|
||||
def __init__(self):
|
||||
self.support_2nd_extract = False
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def download(self, url, post=None):
|
||||
pass
|
||||
|
||||
def get_content(
|
||||
self, url, cache=None, referer=None, post=None, content_type=None, data=None
|
||||
):
|
||||
def get_content(self, url, cache=None, post=None):
|
||||
if cache and os.path.exists(cache):
|
||||
print("Loading cache ({})".format(cache))
|
||||
with open(cache) as f:
|
||||
content = "\n".join(f.readlines())
|
||||
else:
|
||||
content = self.download(
|
||||
url, referer=referer, post=post, content_type=content_type, data=data
|
||||
)
|
||||
content = self.download(url, post)
|
||||
|
||||
if cache:
|
||||
print("Saving cache ({})".format(cache))
|
||||
@@ -50,121 +37,38 @@ class SimpleDownloader(Downloader):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def download(self, url, referer=None, post=None, content_type=None, data=None):
|
||||
print("Downloading {} referer: {} post: {}".format(url, referer, post))
|
||||
try:
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:126.0) Gecko/20100101 Firefox/126.0",
|
||||
}
|
||||
if referer is not None:
|
||||
headers["Referer"] = referer
|
||||
if content_type is not None:
|
||||
headers["Content-Type"] = content_type
|
||||
req = Request(url, headers=headers, data=data)
|
||||
if post:
|
||||
post_args = urlencode(post).encode("utf-8")
|
||||
resource = urllib.request.urlopen(req, post_args)
|
||||
else:
|
||||
resource = urllib.request.urlopen(req)
|
||||
charset = resource.headers.get_content_charset()
|
||||
if charset:
|
||||
data = resource.read().decode(charset)
|
||||
else:
|
||||
data = resource.read().decode()
|
||||
return data
|
||||
def download(self, url, post=None):
|
||||
print("Downloading {}".format(url))
|
||||
|
||||
try:
|
||||
if post:
|
||||
post_args = urlencode(post).encode()
|
||||
resource = urllib.request.urlopen(url, post_args)
|
||||
else:
|
||||
resource = urllib.request.urlopen(url)
|
||||
data = resource.read().decode(resource.headers.get_content_charset())
|
||||
return data
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise Exception("Error during download: " + str(e)[:64] + "...")
|
||||
return None
|
||||
|
||||
|
||||
class ChromiumHeadlessDownloader(Downloader):
|
||||
def __init__(self, pause=True, noimage=True, proxy=False):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.support_2nd_extract = True
|
||||
|
||||
self.pause = pause
|
||||
self.proxy = proxy
|
||||
self.options = Options()
|
||||
self.options.add_argument("--headless=new")
|
||||
self.options.add_argument("--disable-dev-shm-usage")
|
||||
self.options.add_argument("--no-sandbox")
|
||||
self.options.add_argument("start-maximized")
|
||||
self.options.add_argument("enable-automation")
|
||||
self.options.add_argument("--disable-dev-shm-usage")
|
||||
self.options.add_argument("--disable-browser-side-navigation")
|
||||
self.options.add_argument("--disable-gpu")
|
||||
if self.proxy:
|
||||
self.options.add_argument("--proxy-server=socks5://127.0.0.1:12345")
|
||||
|
||||
if noimage:
|
||||
self.options.add_experimental_option(
|
||||
"prefs",
|
||||
{
|
||||
# block image loading
|
||||
"profile.managed_default_content_settings.images": 2,
|
||||
},
|
||||
)
|
||||
|
||||
self.service = Service("/usr/bin/chromedriver")
|
||||
self.driver = webdriver.Chrome(service=self.service, options=self.options)
|
||||
|
||||
def __del__(self):
|
||||
self.driver.quit()
|
||||
|
||||
def screenshot(self, url, path_image):
|
||||
print("Screenshot {}".format(url))
|
||||
try:
|
||||
self.driver.get(url)
|
||||
if self.pause:
|
||||
time.sleep(2)
|
||||
self.driver.save_screenshot(path_image)
|
||||
except Exception:
|
||||
print(f">> Exception: {url}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def download(self, url, referer=None, post=None, content_type=None, data=None):
|
||||
def download(self, url, post=None):
|
||||
if post:
|
||||
raise Exception("POST method with Chromium headless not yet implemented")
|
||||
if referer:
|
||||
raise Exception(
|
||||
"Referer parameter with Chromium headless not yet implemented"
|
||||
)
|
||||
if data:
|
||||
raise Exception("Data content with Chromium headless not yet implemented")
|
||||
if content_type:
|
||||
raise Exception(
|
||||
"Content-type parameter with Chromium headless not yet implemented"
|
||||
)
|
||||
print("Download {}".format(url))
|
||||
self.driver = webdriver.Chrome(service=self.service, options=self.options)
|
||||
|
||||
try:
|
||||
self.driver.get(url)
|
||||
if self.pause:
|
||||
time.sleep(2)
|
||||
doc = self.driver.page_source
|
||||
|
||||
except StaleElementReferenceException as e:
|
||||
print(f">> {type(e).__name__}: {e.args}")
|
||||
raise Exception("Error during download: " + str(e)[:64] + "...")
|
||||
except NoSuchElementException as e:
|
||||
print(f">> {type(e).__name__}: {e.args}")
|
||||
raise Exception("Error during download: " + str(e)[:64] + "...")
|
||||
except TimeoutException as e:
|
||||
print(f">> {type(e).__name__}: {e.args}")
|
||||
raise Exception("Error during download: " + str(e)[:64] + "...")
|
||||
except WebDriverException as e:
|
||||
print(f">> {type(e).__name__}: {e.args}")
|
||||
raise Exception("Error during download: " + str(e)[:64] + "...")
|
||||
except SessionNotCreatedException as e:
|
||||
print(f">> {type(e).__name__}: {e.args}")
|
||||
raise Exception("Error during download: " + str(e)[:64] + "...")
|
||||
except Exception as e:
|
||||
print(
|
||||
f">> {type(e).__name__} line {e.__traceback__.tb_lineno} of {__file__}: {e.args}"
|
||||
)
|
||||
raise Exception("Error during download: " + str(e)[:64] + "...")
|
||||
|
||||
self.driver.get(url)
|
||||
doc = self.driver.page_source
|
||||
self.driver.quit()
|
||||
return doc
|
||||
|
||||
@@ -1,44 +1,21 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from enum import IntEnum
|
||||
from datetime import datetime, time, date, timedelta
|
||||
import re
|
||||
import unicodedata
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
def remove_accents(input_str):
|
||||
nfkd_form = unicodedata.normalize("NFKD", input_str)
|
||||
return "".join([c for c in nfkd_form if not unicodedata.combining(c)])
|
||||
|
||||
|
||||
class Extractor(ABC):
|
||||
class Warning(IntEnum):
|
||||
NO_TITLE = 1
|
||||
NO_START_DATE = 2
|
||||
NOT_FOUND = 3
|
||||
|
||||
url_referer = None
|
||||
|
||||
def __init__(self):
|
||||
self.header = {}
|
||||
self.events = []
|
||||
self.downloader = None
|
||||
self.has_2nd_method = False
|
||||
|
||||
# defined it to true in herited classes will
|
||||
# avoid the importer to use the downloader on the url
|
||||
# (used for extractors that are self-sufficient)
|
||||
self.no_downloader = False
|
||||
|
||||
# parameters used by the downloader to get the content
|
||||
self.referer = ""
|
||||
self.data = None
|
||||
self.content_type = None
|
||||
|
||||
def prepare_2nd_extract(self):
|
||||
pass
|
||||
|
||||
def remove_accents(input_str):
|
||||
nfkd_form = unicodedata.normalize("NFKD", input_str)
|
||||
return "".join([c for c in nfkd_form if not unicodedata.combining(c)])
|
||||
|
||||
def guess_end_day(start_day, start_time, end_time):
|
||||
def guess_end_day(self, start_day, start_time, end_time):
|
||||
if end_time:
|
||||
if end_time > start_time:
|
||||
return start_day
|
||||
@@ -47,7 +24,7 @@ class Extractor(ABC):
|
||||
else:
|
||||
return start_day
|
||||
|
||||
def guess_month(text):
|
||||
def guess_month(self, text):
|
||||
mths = [
|
||||
"jan",
|
||||
"fe",
|
||||
@@ -62,74 +39,46 @@ class Extractor(ABC):
|
||||
"nov",
|
||||
"dec",
|
||||
]
|
||||
t = Extractor.remove_accents(text).lower()
|
||||
t = remove_accents(text).lower()
|
||||
for i, m in enumerate(mths):
|
||||
if t.startswith(m):
|
||||
return i + 1
|
||||
return None
|
||||
|
||||
def parse_french_date(text, default_year=None, default_year_by_proximity=None):
|
||||
def parse_french_date(self, text):
|
||||
# format NomJour Numero Mois Année
|
||||
m = re.search(
|
||||
"[a-zA-ZéÉûÛ:.]+[ ]*([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)[ ]*([0-9]+)", text
|
||||
)
|
||||
if m:
|
||||
day = m.group(1)
|
||||
month = Extractor.guess_month(m.group(2))
|
||||
month = self.guess_month(m.group(2))
|
||||
year = m.group(3)
|
||||
else:
|
||||
# format Numero Mois Annee
|
||||
m = re.search("([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)[ ]*([0-9]+)", text)
|
||||
if m:
|
||||
day = m.group(1)
|
||||
month = Extractor.guess_month(m.group(2))
|
||||
month = self.guess_month(m.group(2))
|
||||
year = m.group(3)
|
||||
else:
|
||||
# format Numero Mois Annee
|
||||
m = re.search("([0-9]+)/([0-9]+)/([0-9]+)", text)
|
||||
if m:
|
||||
day = m.group(1)
|
||||
month = int(m.group(2))
|
||||
year = m.group(3)
|
||||
else:
|
||||
# format Numero Mois Annee
|
||||
m = re.search("([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)", text)
|
||||
if m:
|
||||
day = m.group(1)
|
||||
month = Extractor.guess_month(m.group(2))
|
||||
year = default_year
|
||||
else:
|
||||
# TODO: consolider les cas non satisfaits
|
||||
return None
|
||||
# TODO: consolider les cas non satisfaits
|
||||
return None
|
||||
|
||||
if month is None:
|
||||
return None
|
||||
try:
|
||||
day = int(day)
|
||||
if year is not None:
|
||||
year = int(year)
|
||||
except Exception:
|
||||
year = int(year)
|
||||
except:
|
||||
return None
|
||||
if day >= 32:
|
||||
return None
|
||||
|
||||
# by proximity
|
||||
if year is None and default_year_by_proximity is not None:
|
||||
dates = [
|
||||
date(default_year_by_proximity.year + x, month, day) for x in [-1, 0, 1]
|
||||
]
|
||||
dates = [(abs((d - default_year_by_proximity).days), d) for d in dates]
|
||||
d = min(dates, key=lambda x: x[0])
|
||||
return d[1]
|
||||
|
||||
if year is None:
|
||||
return None
|
||||
|
||||
if year < 100:
|
||||
year = 2000 + year
|
||||
if day >= 32:
|
||||
return None
|
||||
return date(year, month, day)
|
||||
|
||||
def parse_french_time(text):
|
||||
def parse_french_time(self, text):
|
||||
# format heures minutes secondes
|
||||
m = re.search("([0-9]+)[ a-zA-Z:.]+([0-9]+)[ a-zA-Z:.]+([0-9]+)", text)
|
||||
if m:
|
||||
@@ -145,26 +94,19 @@ class Extractor(ABC):
|
||||
s = "0"
|
||||
else:
|
||||
# format heures
|
||||
m = re.search("([0-9]+)[ ]*[Hh:.]", text)
|
||||
m = re.search("([0-9]+)[ Hh:.]", text)
|
||||
if m:
|
||||
h = m.group(1)
|
||||
m = "0"
|
||||
s = "0"
|
||||
else:
|
||||
# format minutes
|
||||
m = re.search("([0-9]+)[ ]*(?:mn|min|Min|Mn)", text)
|
||||
if m:
|
||||
h = "0"
|
||||
m = m.group(1)
|
||||
s = "0"
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
try:
|
||||
h = int(h)
|
||||
m = int(m)
|
||||
s = int(s)
|
||||
except Exception:
|
||||
except:
|
||||
return None
|
||||
if h >= 24 or m >= 60 or s >= 60:
|
||||
return None
|
||||
@@ -179,8 +121,9 @@ class Extractor(ABC):
|
||||
def set_downloader(self, downloader):
|
||||
self.downloader = downloader
|
||||
|
||||
def is_known_url(url):
|
||||
return False
|
||||
@abstractmethod
|
||||
def clean_url(url):
|
||||
pass
|
||||
|
||||
def set_header(self, url):
|
||||
self.header["url"] = url
|
||||
@@ -191,7 +134,6 @@ class Extractor(ABC):
|
||||
|
||||
def add_event(
|
||||
self,
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@@ -208,56 +150,26 @@ class Extractor(ABC):
|
||||
published=False,
|
||||
image=None,
|
||||
image_alt=None,
|
||||
not_found=False,
|
||||
):
|
||||
comments = ""
|
||||
warnings = []
|
||||
if title is None:
|
||||
print("WARNING: cannot publish an event without name")
|
||||
published = False
|
||||
title = _("Unknown title")
|
||||
warnings.append(Extractor.Warning.NO_TITLE)
|
||||
print("ERROR: cannot import an event without name")
|
||||
return
|
||||
if start_day is None:
|
||||
print("WARNING: cannot publish an event without start day")
|
||||
published = False
|
||||
start_day = datetime.now().date().strftime("%Y-%m-%d")
|
||||
warnings.append(Extractor.Warning.NO_START_DATE)
|
||||
if not_found:
|
||||
warnings.append(Extractor.Warning.NOT_FOUND)
|
||||
|
||||
tags_default = self.default_value_if_exists(default_values, "tags")
|
||||
if not tags_default:
|
||||
tags_default = []
|
||||
print("ERROR: cannot import an event without start day")
|
||||
return
|
||||
|
||||
event = {
|
||||
"title": title,
|
||||
"category": (
|
||||
category
|
||||
if category
|
||||
else self.default_value_if_exists(default_values, "category")
|
||||
),
|
||||
"category": category,
|
||||
"start_day": start_day,
|
||||
"uuids": uuids,
|
||||
"location": (
|
||||
location
|
||||
if location
|
||||
else self.default_value_if_exists(default_values, "location")
|
||||
),
|
||||
"organisers": self.default_value_if_exists(default_values, "organisers"),
|
||||
"location": location,
|
||||
"description": description,
|
||||
"tags": tags + tags_default,
|
||||
"tags": tags,
|
||||
"published": published,
|
||||
"image": image,
|
||||
"image_alt": image_alt,
|
||||
"email": self.default_value_if_exists(default_values, "email"),
|
||||
"comments": self.default_value_if_exists(default_values, "comments"),
|
||||
"warnings": warnings,
|
||||
}
|
||||
if event["comments"] is None:
|
||||
event["comments"] = comments
|
||||
else:
|
||||
event["comments"] += "\n" + comments
|
||||
|
||||
# TODO: pourquoi url_human et non reference_url
|
||||
if url_human is not None:
|
||||
event["url_human"] = url_human
|
||||
@@ -284,14 +196,11 @@ class Extractor(ABC):
|
||||
)
|
||||
|
||||
def get_structure(self):
|
||||
if len(self.events) == 0:
|
||||
return {}
|
||||
else:
|
||||
return {"header": self.header, "events": self.events}
|
||||
return {"header": self.header, "events": self.events}
|
||||
|
||||
def clean_url(url):
|
||||
from .generic_extractors.ical import ICALExtractor
|
||||
from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor
|
||||
from .extractor_ical import ICALExtractor
|
||||
from .extractor_facebook import FacebookEventExtractor
|
||||
|
||||
result = url
|
||||
for e in [ICALExtractor, FacebookEventExtractor]:
|
||||
@@ -299,52 +208,10 @@ class Extractor(ABC):
|
||||
return result
|
||||
|
||||
def get_default_extractors(single_event=False):
|
||||
from .generic_extractors.ical import ICALExtractor
|
||||
from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor
|
||||
from .generic_extractors.ggcal_link import (
|
||||
CExtractor as GoogleCalendarLinkEventExtractor,
|
||||
)
|
||||
from .extractor_ical import ICALExtractor
|
||||
from .extractor_facebook import FacebookEventExtractor
|
||||
|
||||
if single_event:
|
||||
return [
|
||||
FacebookEventExtractor(),
|
||||
GoogleCalendarLinkEventExtractor(),
|
||||
EventNotFoundExtractor(),
|
||||
]
|
||||
return [FacebookEventExtractor(single_event=True)]
|
||||
else:
|
||||
return [
|
||||
ICALExtractor(),
|
||||
FacebookEventExtractor(),
|
||||
GoogleCalendarLinkEventExtractor(),
|
||||
EventNotFoundExtractor(),
|
||||
]
|
||||
|
||||
|
||||
# A class that only produce a not found event
|
||||
class EventNotFoundExtractor(Extractor):
|
||||
|
||||
def extract(
|
||||
self, content, url, url_human=None, default_values=None, published=False
|
||||
):
|
||||
self.set_header(url)
|
||||
self.clear_events()
|
||||
|
||||
self.add_event(
|
||||
default_values,
|
||||
"événement sans titre depuis " + url,
|
||||
None,
|
||||
timezone.now().date(),
|
||||
None,
|
||||
"l'import a échoué, la saisie doit se faire manuellement à partir de l'url source "
|
||||
+ url,
|
||||
[],
|
||||
[url],
|
||||
published=False,
|
||||
url_human=url,
|
||||
not_found=True,
|
||||
)
|
||||
|
||||
return self.get_structure()
|
||||
|
||||
def clean_url(url):
|
||||
return url
|
||||
return [ICALExtractor(), FacebookEventExtractor(single_event=False)]
|
||||
|
||||
238
src/agenda_culturel/import_tasks/extractor_facebook.py
Normal file
238
src/agenda_culturel/import_tasks/extractor_facebook.py
Normal file
@@ -0,0 +1,238 @@
|
||||
from datetime import datetime
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .extractor import *
|
||||
import json
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FacebookEventExtractor(Extractor):
|
||||
class SimpleFacebookEvent:
|
||||
def __init__(self, data):
|
||||
self.elements = {}
|
||||
|
||||
for key in ["id", "start_timestamp", "end_timestamp"]:
|
||||
self.elements[key] = data[key] if key in data else None
|
||||
|
||||
if "parent_event" in data:
|
||||
self.parent = FacebookEventExtractor.SimpleFacebookEvent(
|
||||
data["parent_event"]
|
||||
)
|
||||
|
||||
class FacebookEvent:
|
||||
name = "event"
|
||||
keys = [
|
||||
[
|
||||
"start_time_formatted",
|
||||
"start_timestamp",
|
||||
"is_past",
|
||||
"name",
|
||||
"price_info",
|
||||
"cover_media_renderer",
|
||||
"event_creator",
|
||||
"id",
|
||||
"day_time_sentence",
|
||||
"event_place",
|
||||
"comet_neighboring_siblings",
|
||||
],
|
||||
["event_description"],
|
||||
["start_timestamp", "end_timestamp"],
|
||||
]
|
||||
rules = {
|
||||
"event_description": {"description": ["text"]},
|
||||
"cover_media_renderer": {
|
||||
"image_alt": ["cover_photo", "photo", "accessibility_caption"],
|
||||
"image": ["cover_photo", "photo", "full_image", "uri"],
|
||||
},
|
||||
"event_creator": {
|
||||
"event_creator_name": ["name"],
|
||||
"event_creator_url": ["url"],
|
||||
},
|
||||
"event_place": {"event_place_name": ["name"]},
|
||||
}
|
||||
|
||||
def __init__(self, i, event):
|
||||
self.fragments = {}
|
||||
self.elements = {}
|
||||
self.neighbor_events = None
|
||||
self.possible_end_timestamp = []
|
||||
self.add_fragment(i, event)
|
||||
|
||||
def get_element(self, key):
|
||||
return self.elements[key] if key in self.elements else None
|
||||
|
||||
def get_element_date(self, key):
|
||||
v = self.get_element(key)
|
||||
return (
|
||||
datetime.fromtimestamp(v).date() if v is not None and v != 0 else None
|
||||
)
|
||||
|
||||
def get_element_time(self, key):
|
||||
v = self.get_element(key)
|
||||
return (
|
||||
datetime.fromtimestamp(v).strftime("%H:%M")
|
||||
if v is not None and v != 0
|
||||
else None
|
||||
)
|
||||
|
||||
def add_fragment(self, i, event):
|
||||
self.fragments[i] = event
|
||||
|
||||
if FacebookEventExtractor.FacebookEvent.keys[i] == [
|
||||
"start_timestamp",
|
||||
"end_timestamp",
|
||||
]:
|
||||
self.get_possible_end_timestamp(i, event)
|
||||
else:
|
||||
for k in FacebookEventExtractor.FacebookEvent.keys[i]:
|
||||
if k == "comet_neighboring_siblings":
|
||||
self.get_neighbor_events(event[k])
|
||||
elif k in FacebookEventExtractor.FacebookEvent.rules:
|
||||
for nk, rule in FacebookEventExtractor.FacebookEvent.rules[
|
||||
k
|
||||
].items():
|
||||
error = False
|
||||
c = event[k]
|
||||
for ki in rule:
|
||||
if c is not None:
|
||||
c = c[ki]
|
||||
else:
|
||||
error = True
|
||||
if not error:
|
||||
self.elements[nk] = c
|
||||
else:
|
||||
self.elements[k] = event[k]
|
||||
|
||||
def get_possible_end_timestamp(self, i, data):
|
||||
self.possible_end_timestamp.append(
|
||||
dict((k, data[k]) for k in FacebookEventExtractor.FacebookEvent.keys[i])
|
||||
)
|
||||
|
||||
def get_neighbor_events(self, data):
|
||||
self.neighbor_events = [
|
||||
FacebookEventExtractor.SimpleFacebookEvent(d) for d in data
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
str(self.elements)
|
||||
+ "\n Neighbors: "
|
||||
+ ", ".join([ne.elements["id"] for ne in self.neighbor_events])
|
||||
)
|
||||
|
||||
def consolidate_current_event(self):
|
||||
if (
|
||||
self.neighbor_events is not None
|
||||
and "id" in self.elements
|
||||
and "end_timestamp" not in self.elements
|
||||
):
|
||||
if self.neighbor_events is not None and "id" in self.elements:
|
||||
id = self.elements["id"]
|
||||
for ne in self.neighbor_events:
|
||||
if ne.elements["id"] == id:
|
||||
self.elements["end_timestamp"] = ne.elements[
|
||||
"end_timestamp"
|
||||
]
|
||||
|
||||
if (
|
||||
"end_timestamp" not in self.elements
|
||||
and len(self.possible_end_timestamp) != 0
|
||||
):
|
||||
for s in self.possible_end_timestamp:
|
||||
if (
|
||||
"start_timestamp" in s
|
||||
and "start_timestamp" in self.elements
|
||||
and s["start_timestamp"] == self.elements["start_timestamp"]
|
||||
):
|
||||
self.elements["end_timestamp"] = s["end_timestamp"]
|
||||
break
|
||||
|
||||
def find_event_fragment_in_array(array, event, first=True):
|
||||
if isinstance(array, dict):
|
||||
seen = False
|
||||
for i, ks in enumerate(FacebookEventExtractor.FacebookEvent.keys):
|
||||
if len(ks) == len([k for k in ks if k in array]):
|
||||
seen = True
|
||||
if event is None:
|
||||
event = FacebookEventExtractor.FacebookEvent(i, array)
|
||||
else:
|
||||
event.add_fragment(i, array)
|
||||
# only consider the first of FacebookEvent.keys
|
||||
break
|
||||
if not seen:
|
||||
for k in array:
|
||||
event = FacebookEventExtractor.FacebookEvent.find_event_fragment_in_array(
|
||||
array[k], event, False
|
||||
)
|
||||
elif isinstance(array, list):
|
||||
for e in array:
|
||||
event = FacebookEventExtractor.FacebookEvent.find_event_fragment_in_array(
|
||||
e, event, False
|
||||
)
|
||||
|
||||
if event is not None and first:
|
||||
event.consolidate_current_event()
|
||||
return event
|
||||
|
||||
def build_event(self, url):
|
||||
self.get_element("image")
|
||||
|
||||
return {
|
||||
"title": self.get_element("name"),
|
||||
"category": None,
|
||||
"start_day": self.get_element_date("start_timestamp"),
|
||||
"location": self.get_element("event_place_name"),
|
||||
"description": self.get_element("description"),
|
||||
"tags": [],
|
||||
"uuids": [url],
|
||||
"url_human": url,
|
||||
"start_time": self.get_element_time("start_timestamp"),
|
||||
"end_day": self.get_element_date("end_timestamp"),
|
||||
"end_time": self.get_element_time("end_timestamp"),
|
||||
"image": self.get_element("image"),
|
||||
"image_alt": self.get_element("image"),
|
||||
}
|
||||
|
||||
def __init__(self, single_event=False):
|
||||
self.single_event = single_event
|
||||
super().__init__()
|
||||
|
||||
def clean_url(url):
|
||||
if FacebookEventExtractor.is_known_url(url):
|
||||
u = urlparse(url)
|
||||
return "https://www.facebook.com" + u.path
|
||||
else:
|
||||
return url
|
||||
|
||||
def is_known_url(url):
|
||||
u = urlparse(url)
|
||||
return u.netloc in ["facebook.com", "www.facebook.com", "m.facebook.com"]
|
||||
|
||||
def extract(
|
||||
self, content, url, url_human=None, default_values=None, published=False
|
||||
):
|
||||
# NOTE: this method does not use url_human = None and default_values = None
|
||||
|
||||
# get step by step all information from the content
|
||||
fevent = None
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
for json_script in soup.find_all("script", type="application/json"):
|
||||
json_txt = json_script.get_text()
|
||||
json_struct = json.loads(json_txt)
|
||||
fevent = FacebookEventExtractor.FacebookEvent.find_event_fragment_in_array(
|
||||
json_struct, fevent
|
||||
)
|
||||
|
||||
if fevent is not None:
|
||||
self.set_header(url)
|
||||
event = fevent.build_event(url)
|
||||
logger.warning("published: " + str(published))
|
||||
event["published"] = published
|
||||
self.add_event(**event)
|
||||
return self.get_structure()
|
||||
|
||||
return None
|
||||
@@ -5,10 +5,8 @@ import bbcode
|
||||
|
||||
from datetime import datetime, date, timedelta
|
||||
from bs4 import BeautifulSoup, MarkupResemblesLocatorWarning
|
||||
import pytz
|
||||
|
||||
|
||||
from ..extractor import Extractor
|
||||
from .extractor import *
|
||||
|
||||
from celery.utils.log import get_task_logger
|
||||
|
||||
@@ -18,8 +16,6 @@ logger = get_task_logger(__name__)
|
||||
class ICALExtractor(Extractor):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.naive_timezone = False
|
||||
self.to_timezone = pytz.timezone("Europe/Paris")
|
||||
|
||||
def get_item_from_vevent(self, event, name, raw=False):
|
||||
try:
|
||||
@@ -28,24 +24,9 @@ class ICALExtractor(Extractor):
|
||||
return r
|
||||
else:
|
||||
return r.decode()
|
||||
except Exception:
|
||||
except:
|
||||
return None
|
||||
|
||||
def guess_image_from_vevent(self, event):
|
||||
item = self.get_item_from_vevent(event, "ATTACH", raw=True)
|
||||
if item is None:
|
||||
return None
|
||||
|
||||
# it seems that FMTTYPE is not available through python-icalendar
|
||||
if isinstance(item, list):
|
||||
for i in item:
|
||||
if ".jpg" in str(i).lower():
|
||||
return str(i)
|
||||
else:
|
||||
if ".jpg" in str(item).lower():
|
||||
return str(item)
|
||||
return None
|
||||
|
||||
def get_dt_item_from_vevent(self, event, name):
|
||||
item = self.get_item_from_vevent(event, name, raw=True)
|
||||
|
||||
@@ -54,8 +35,6 @@ class ICALExtractor(Extractor):
|
||||
|
||||
if item is not None:
|
||||
if isinstance(item, datetime):
|
||||
if not self.naive_timezone:
|
||||
item = self.to_timezone.normalize(item)
|
||||
day = item.date()
|
||||
time = item.time()
|
||||
elif isinstance(item, date):
|
||||
@@ -84,7 +63,7 @@ class ICALExtractor(Extractor):
|
||||
|
||||
for event in calendar.walk("VEVENT"):
|
||||
title = self.get_item_from_vevent(event, "SUMMARY")
|
||||
category = None
|
||||
category = self.default_value_if_exists(default_values, "category")
|
||||
|
||||
start_day, start_time = self.get_dt_item_from_vevent(event, "DTSTART")
|
||||
|
||||
@@ -97,8 +76,8 @@ class ICALExtractor(Extractor):
|
||||
end_day = end_day + timedelta(days=-1)
|
||||
|
||||
location = self.get_item_from_vevent(event, "LOCATION")
|
||||
if (location is not None) and location.replace(" ", "") == "":
|
||||
location = None
|
||||
if location is None:
|
||||
location = self.default_value_if_exists(default_values, "location")
|
||||
|
||||
description = self.get_item_from_vevent(event, "DESCRIPTION")
|
||||
if description is not None:
|
||||
@@ -133,16 +112,10 @@ class ICALExtractor(Extractor):
|
||||
)
|
||||
# possible limitation: if the ordering is not original then related
|
||||
|
||||
tags = []
|
||||
tags = self.default_value_if_exists(default_values, "tags")
|
||||
|
||||
last_modified = self.get_item_from_vevent(event, "LAST-MODIFIED", raw=True)
|
||||
|
||||
image = self.guess_image_from_vevent(event)
|
||||
|
||||
url_event = self.get_item_from_vevent(event, "URL", True)
|
||||
if url_event:
|
||||
url_human = url_event
|
||||
|
||||
recurrence_entries = {}
|
||||
for e in ["RRULE", "EXRULE", "EXDATE", "RDATE"]:
|
||||
i = self.get_item_from_vevent(event, e, raw=True)
|
||||
@@ -168,7 +141,6 @@ class ICALExtractor(Extractor):
|
||||
if uuidrel is not None:
|
||||
luuids += [uuidrel]
|
||||
self.add_event(
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@@ -183,7 +155,6 @@ class ICALExtractor(Extractor):
|
||||
end_time=end_time,
|
||||
last_modified=last_modified,
|
||||
published=published,
|
||||
image=image,
|
||||
)
|
||||
|
||||
return self.get_structure()
|
||||
@@ -193,7 +164,6 @@ class ICALExtractor(Extractor):
|
||||
class ICALNoBusyExtractor(ICALExtractor):
|
||||
def add_event(
|
||||
self,
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@@ -211,9 +181,8 @@ class ICALNoBusyExtractor(ICALExtractor):
|
||||
image=None,
|
||||
image_alt=None,
|
||||
):
|
||||
if title != "Busy" and title != "Accueils bénévoles" and title != "Occupé":
|
||||
if title != "Busy":
|
||||
super().add_event(
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@@ -254,7 +223,6 @@ class ICALNoVCExtractor(ICALExtractor):
|
||||
|
||||
def add_event(
|
||||
self,
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@@ -273,7 +241,6 @@ class ICALNoVCExtractor(ICALExtractor):
|
||||
image_alt=None,
|
||||
):
|
||||
super().add_event(
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@@ -291,9 +258,3 @@ class ICALNoVCExtractor(ICALExtractor):
|
||||
image,
|
||||
image_alt,
|
||||
)
|
||||
|
||||
|
||||
class ICALNaiveTimezone(ICALExtractor):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.naive_timezone = True
|
||||
@@ -1,10 +1,45 @@
|
||||
from abc import abstractmethod
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import parse_qs
|
||||
|
||||
import logging
|
||||
from .extractor import Extractor
|
||||
|
||||
from .extractor import *
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from dateutil import parser
|
||||
import datetime
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class GGCalendar:
|
||||
def __init__(self, url):
|
||||
self.url = url
|
||||
self.extract_info()
|
||||
|
||||
def extract_info(self):
|
||||
parsed_url = urlparse(self.url.replace("#", "%23"))
|
||||
params = parse_qs(parsed_url.query)
|
||||
|
||||
self.location = params["location"][0] if "location" in params else None
|
||||
self.title = params["text"][0] if "text" in params else None
|
||||
if "dates" in params:
|
||||
dates = [x.replace(" ", "+") for x in params["dates"][0].split("/")]
|
||||
if len(dates) > 0:
|
||||
date = parser.parse(dates[0])
|
||||
self.start_day = date.date()
|
||||
self.start_time = date.time()
|
||||
if len(dates) == 2:
|
||||
date = parser.parse(dates[1])
|
||||
self.end_day = date.date()
|
||||
self.end_time = date.time()
|
||||
else:
|
||||
self.end_day = None
|
||||
self.end_time = None
|
||||
|
||||
else:
|
||||
raise Exception("Unable to find a date in google calendar URL")
|
||||
self.start_day = None
|
||||
self.start_time = None
|
||||
self.end_day = None
|
||||
self.end_time = None
|
||||
|
||||
|
||||
# A class to extract events from URL with two steps:
|
||||
@@ -12,10 +47,8 @@ logger = logging.getLogger(__name__)
|
||||
# - then for each document downloaded from these urls, build the events
|
||||
# This class is an abstract class
|
||||
class TwoStepsExtractor(Extractor):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.has_2nd_method_in_list = False
|
||||
self.event_urls = None
|
||||
self.event_properties = {}
|
||||
|
||||
@@ -29,22 +62,20 @@ class TwoStepsExtractor(Extractor):
|
||||
self.event_urls.append(url)
|
||||
return True
|
||||
|
||||
def add_event_property(self, url, key, value):
|
||||
def add_event_start_day(self, url, start_day):
|
||||
if url not in self.event_properties:
|
||||
self.event_properties[url] = {}
|
||||
self.event_properties[url][key] = value
|
||||
|
||||
def add_event_url_human(self, url, url_human):
|
||||
self.add_event_property(url, "url_human", url_human)
|
||||
|
||||
def add_event_start_day(self, url, start_day):
|
||||
self.add_event_property(url, "start_day", start_day)
|
||||
self.event_properties[url]["start_day"] = start_day
|
||||
|
||||
def add_event_start_time(self, url, start_time):
|
||||
self.add_event_property(url, "start_time", start_time)
|
||||
if url not in self.event_properties:
|
||||
self.event_properties[url] = {}
|
||||
self.event_properties[url]["start_time"] = start_time
|
||||
|
||||
def add_event_title(self, url, title):
|
||||
self.add_event_property(url, "title", title)
|
||||
if url not in self.event_properties:
|
||||
self.event_properties[url] = {}
|
||||
self.event_properties[url]["title"] = title
|
||||
|
||||
def add_event_tag(self, url, tag):
|
||||
if url not in self.event_properties:
|
||||
@@ -54,14 +85,17 @@ class TwoStepsExtractor(Extractor):
|
||||
self.event_properties[url]["tags"].append(tag)
|
||||
|
||||
def add_event_category(self, url, cat):
|
||||
self.add_event_property(url, "category", cat)
|
||||
if url not in self.event_properties:
|
||||
self.event_properties[url] = {}
|
||||
self.event_properties[url]["category"] = cat
|
||||
|
||||
def add_event_location(self, url, loc):
|
||||
self.add_event_property(url, "location", loc)
|
||||
if url not in self.event_properties:
|
||||
self.event_properties[url] = {}
|
||||
self.event_properties[url]["location"] = loc
|
||||
|
||||
def add_event_with_props(
|
||||
self,
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
category,
|
||||
@@ -93,11 +127,8 @@ class TwoStepsExtractor(Extractor):
|
||||
category = self.event_properties[event_url]["category"]
|
||||
if "location" in self.event_properties[event_url]:
|
||||
location = self.event_properties[event_url]["location"]
|
||||
if "url_human" in self.event_properties[event_url]:
|
||||
url_human = self.event_properties[event_url]["url_human"]
|
||||
|
||||
self.add_event(
|
||||
default_values,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
@@ -131,9 +162,6 @@ class TwoStepsExtractor(Extractor):
|
||||
):
|
||||
pass
|
||||
|
||||
def prepare_2nd_extract_in_list(self):
|
||||
pass
|
||||
|
||||
def extract(
|
||||
self,
|
||||
content,
|
||||
@@ -142,18 +170,13 @@ class TwoStepsExtractor(Extractor):
|
||||
default_values=None,
|
||||
published=False,
|
||||
only_future=True,
|
||||
ignore_404=True,
|
||||
first=True,
|
||||
):
|
||||
|
||||
first = True
|
||||
self.only_future = only_future
|
||||
self.now = datetime.datetime.now().date()
|
||||
self.set_header(url)
|
||||
self.clear_events()
|
||||
|
||||
self.url = url
|
||||
self.url_human = url_human
|
||||
self.event_urls = []
|
||||
self.event_properties.clear()
|
||||
|
||||
@@ -171,70 +194,10 @@ class TwoStepsExtractor(Extractor):
|
||||
# first download the content associated with this link
|
||||
content_event = self.downloader.get_content(event_url)
|
||||
if content_event is None:
|
||||
msg = "Cannot extract event from url {}".format(event_url)
|
||||
if ignore_404:
|
||||
logger.error(msg)
|
||||
else:
|
||||
raise Exception(msg)
|
||||
else:
|
||||
# then extract event information from this html document
|
||||
try:
|
||||
self.add_event_from_content(
|
||||
content_event, event_url, url_human, default_values, published
|
||||
)
|
||||
except Exception as e:
|
||||
# some website (FB) sometime need a second step
|
||||
if (
|
||||
first
|
||||
and self.has_2nd_method_in_list
|
||||
and self.downloader.support_2nd_extract
|
||||
):
|
||||
logger.info("Using cookie trick on a facebook event")
|
||||
first = False
|
||||
# TMP: on trace ce qui se passe
|
||||
from agenda_culturel.import_tasks.generic_extractors import (
|
||||
fbevents,
|
||||
)
|
||||
|
||||
fbevents.CExtractor.dump_content_for_debug(
|
||||
content_event, event_url
|
||||
)
|
||||
self.prepare_2nd_extract_in_list()
|
||||
content_event = self.downloader.get_content(event_url)
|
||||
if content_event is not None:
|
||||
self.add_event_from_content(
|
||||
content_event,
|
||||
event_url,
|
||||
url_human,
|
||||
default_values,
|
||||
published,
|
||||
)
|
||||
else:
|
||||
raise e
|
||||
raise Exception(_("Cannot extract event from url {}").format(event_url))
|
||||
# then extract event information from this html document
|
||||
self.add_event_from_content(
|
||||
content_event, event_url, url_human, default_values, published
|
||||
)
|
||||
|
||||
return self.get_structure()
|
||||
|
||||
|
||||
class TwoStepsExtractorNoPause(TwoStepsExtractor):
|
||||
|
||||
def extract(
|
||||
self,
|
||||
content,
|
||||
url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
only_future=True,
|
||||
ignore_404=True,
|
||||
):
|
||||
if hasattr(self.downloader, "pause"):
|
||||
pause = self.downloader.pause
|
||||
else:
|
||||
pause = False
|
||||
self.downloader.pause = False
|
||||
result = super().extract(
|
||||
content, url, url_human, default_values, published, only_future, ignore_404
|
||||
)
|
||||
self.downloader.pause = pause
|
||||
|
||||
return result
|
||||
@@ -1,7 +0,0 @@
|
||||
from os.path import dirname, basename, isfile, join
|
||||
import glob
|
||||
|
||||
modules = glob.glob(join(dirname(__file__), "*.py"))
|
||||
__all__ = [
|
||||
basename(f)[:-3] for f in modules if isfile(f) and not f.endswith("__init__.py")
|
||||
]
|
||||
@@ -1,114 +0,0 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractorNoPause
|
||||
from ..extractor import Extractor
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
# A class dedicated to get events from apidae-tourisme widgets
|
||||
class CExtractor(TwoStepsExtractorNoPause):
|
||||
|
||||
def build_event_url_list(self, content, infuture_days=180):
|
||||
|
||||
# Get line starting with wrapper.querySelector(".results_agenda").innerHTML = "
|
||||
# split using "=" and keep the end
|
||||
# strip it, and remove the first character (") and the two last ones (";)
|
||||
# remove the escapes and parse the contained html
|
||||
for line in content.split("\n"):
|
||||
if line.startswith(
|
||||
'wrapper.querySelector(".results_agenda").innerHTML = "'
|
||||
):
|
||||
html = (
|
||||
('"'.join(line.split('"')[3:]))
|
||||
.replace('\\"', '"')
|
||||
.replace("\\n", "\n")
|
||||
.replace("\\/", "/")
|
||||
)
|
||||
soup = BeautifulSoup(html, "html.parser")
|
||||
links = soup.select("a.widgit_result")
|
||||
for lk in links:
|
||||
self.add_event_url(lk["data-w-href"])
|
||||
break
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
# check for htag
|
||||
for line in event_content.split("\n"):
|
||||
if line.strip().startswith("window.location.hash"):
|
||||
ref = line.split('"')[1]
|
||||
break
|
||||
|
||||
# check for content
|
||||
for line in event_content.split("\n"):
|
||||
if line.startswith("detailsWrapper.innerHTML ="):
|
||||
html = (
|
||||
('"'.join(line.split('"')[1:]))
|
||||
.replace('\\"', '"')
|
||||
.replace("\\n", "\n")
|
||||
.replace("\\/", "/")
|
||||
)
|
||||
|
||||
soup = BeautifulSoup(html, "html.parser")
|
||||
title = soup.select_one("h2.widgit_title").text.strip()
|
||||
image = soup.select_one("img")
|
||||
image_alt = image["alt"]
|
||||
image = image["src"]
|
||||
description = soup.select("div.desc")
|
||||
description = "\n".join([d.text for d in description])
|
||||
openings = soup.select_one(".openings .mts").text.strip().split("\n")[0]
|
||||
start_time = None
|
||||
end_time = None
|
||||
if "tous les" in openings:
|
||||
start_day = None
|
||||
else:
|
||||
start_day = Extractor.parse_french_date(openings)
|
||||
details = openings.split("de")
|
||||
if len(details) > 1:
|
||||
hours = details[1].split("à")
|
||||
start_time = Extractor.parse_french_time(hours[0])
|
||||
if len(hours) > 1:
|
||||
end_time = Extractor.parse_french_time(hours[1])
|
||||
|
||||
contact = soup.select_one(".contact")
|
||||
sa = False
|
||||
location = []
|
||||
for c in contact.children:
|
||||
if c.name == "h2" and c.text.strip() == "Adresse":
|
||||
sa = True
|
||||
else:
|
||||
if c.name == "h2" and sa:
|
||||
break
|
||||
if c.name == "p" and sa:
|
||||
e = c.text.strip()
|
||||
if e != "":
|
||||
location.append(e)
|
||||
|
||||
location = ", ".join(location)
|
||||
|
||||
soup.select("a.website")
|
||||
event_url = url_human + "#" + ref
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
None,
|
||||
start_day,
|
||||
location,
|
||||
description,
|
||||
[],
|
||||
recurrences=None,
|
||||
uuids=[event_url],
|
||||
url_human=event_url,
|
||||
start_time=start_time,
|
||||
end_day=start_day,
|
||||
end_time=end_time,
|
||||
published=published,
|
||||
image=image,
|
||||
image_alt=image_alt,
|
||||
)
|
||||
return
|
||||
@@ -1,336 +0,0 @@
|
||||
from datetime import datetime
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse
|
||||
import time as t
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
import re
|
||||
|
||||
from ..extractor import Extractor
|
||||
import json
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SimpleFacebookEvent:
|
||||
def __init__(self, data):
|
||||
self.elements = {}
|
||||
|
||||
for key in ["id", "start_timestamp", "end_timestamp"]:
|
||||
self.elements[key] = data[key] if key in data else None
|
||||
|
||||
if "parent_event" in data:
|
||||
self.parent = SimpleFacebookEvent(data["parent_event"])
|
||||
|
||||
|
||||
class FacebookEvent:
|
||||
name = "event"
|
||||
# keys corresponds to javascript elements that are containing interesting data
|
||||
keys = [
|
||||
[
|
||||
"start_time_formatted",
|
||||
"start_timestamp",
|
||||
"is_past",
|
||||
"name",
|
||||
"price_info",
|
||||
"cover_media_renderer",
|
||||
"id",
|
||||
"parent_if_exists_or_self",
|
||||
"day_time_sentence",
|
||||
"event_place",
|
||||
"comet_neighboring_siblings",
|
||||
],
|
||||
["event_description"],
|
||||
["start_timestamp", "end_timestamp"],
|
||||
]
|
||||
# rules are defined by a sub-key within intersesting data where elements will be find
|
||||
# each pair in the associated list is a key of our model and a path within FB data to
|
||||
# get the corresponding field
|
||||
rules = {
|
||||
"event_description": [("description", ["text"])],
|
||||
"cover_media_renderer": [
|
||||
("image_alt", ["cover_photo", "photo", "accessibility_caption"]),
|
||||
("image", ["cover_photo", "photo", "full_image", "uri"]),
|
||||
("image", ["cover_media", 0, "full_image", "uri"]),
|
||||
("image_alt", ["cover_media", 0, "accessibility_caption"]),
|
||||
],
|
||||
"event_creator": [
|
||||
("event_creator_name", ["name"]),
|
||||
("event_creator_url", ["url"]),
|
||||
],
|
||||
"event_place": [("event_place_name", ["name"])],
|
||||
}
|
||||
|
||||
def __init__(self, *args):
|
||||
if len(args) == 1:
|
||||
other = args[0]
|
||||
self.fragments = other.fragments
|
||||
self.elements = other.elements
|
||||
self.neighbor_events = None
|
||||
else:
|
||||
i = args[0]
|
||||
event = args[1]
|
||||
self.fragments = {}
|
||||
self.elements = {}
|
||||
self.neighbor_events = None
|
||||
self.possible_end_timestamp = []
|
||||
self.add_fragment(i, event)
|
||||
|
||||
def get_element(self, key):
|
||||
return self.elements[key] if key in self.elements else None
|
||||
|
||||
def get_element_date(self, key):
|
||||
v = self.get_element(key)
|
||||
return datetime.fromtimestamp(v).date() if v is not None and v != 0 else None
|
||||
|
||||
def get_element_time(self, key):
|
||||
v = self.get_element(key)
|
||||
return (
|
||||
datetime.fromtimestamp(v).strftime("%H:%M")
|
||||
if v is not None and v != 0
|
||||
else None
|
||||
)
|
||||
|
||||
def add_fragment(self, i, event):
|
||||
self.fragments[i] = event
|
||||
|
||||
if FacebookEvent.keys[i] == [
|
||||
"start_timestamp",
|
||||
"end_timestamp",
|
||||
]:
|
||||
self.get_possible_end_timestamp(i, event)
|
||||
else:
|
||||
for k in FacebookEvent.keys[i]:
|
||||
if k == "comet_neighboring_siblings":
|
||||
self.get_neighbor_events(event[k])
|
||||
elif k in FacebookEvent.rules:
|
||||
for nk, rule in FacebookEvent.rules[k]:
|
||||
error = False
|
||||
c = event[k]
|
||||
for ki in rule:
|
||||
if (
|
||||
c is not None
|
||||
and ki in c
|
||||
or (isinstance(c, list) and ki < len(c))
|
||||
):
|
||||
c = c[ki]
|
||||
else:
|
||||
error = True
|
||||
break
|
||||
if not error:
|
||||
self.elements[nk] = c
|
||||
else:
|
||||
self.elements[k] = event[k]
|
||||
|
||||
def get_possible_end_timestamp(self, i, data):
|
||||
self.possible_end_timestamp.append(
|
||||
dict((k, data[k]) for k in FacebookEvent.keys[i])
|
||||
)
|
||||
|
||||
def get_neighbor_events(self, data):
|
||||
self.neighbor_events = [SimpleFacebookEvent(d) for d in data]
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
str(self.elements)
|
||||
+ "\n Neighbors: "
|
||||
+ ", ".join([ne.elements["id"] for ne in self.neighbor_events])
|
||||
)
|
||||
|
||||
def consolidate_current_event(self):
|
||||
if (
|
||||
self.neighbor_events is not None
|
||||
and "id" in self.elements
|
||||
and "end_timestamp" not in self.elements
|
||||
):
|
||||
if self.neighbor_events is not None and "id" in self.elements:
|
||||
id = self.elements["id"]
|
||||
for ne in self.neighbor_events:
|
||||
if ne.elements["id"] == id:
|
||||
self.elements["end_timestamp"] = ne.elements["end_timestamp"]
|
||||
|
||||
if (
|
||||
"end_timestamp" not in self.elements
|
||||
and len(self.possible_end_timestamp) != 0
|
||||
):
|
||||
for s in self.possible_end_timestamp:
|
||||
if (
|
||||
"start_timestamp" in s
|
||||
and "start_timestamp" in self.elements
|
||||
and s["start_timestamp"] == self.elements["start_timestamp"]
|
||||
):
|
||||
self.elements["end_timestamp"] = s["end_timestamp"]
|
||||
break
|
||||
|
||||
def find_event_fragment_in_array(array, event, first=True):
|
||||
if isinstance(array, dict):
|
||||
seen = False
|
||||
for i, ks in enumerate(FacebookEvent.keys):
|
||||
# DEBUG: print([k for k in ks if k in array], "il manque", [k for k in ks if k not in array])
|
||||
if len(ks) == len([k for k in ks if k in array]):
|
||||
seen = True
|
||||
if event is None:
|
||||
event = FacebookEvent(i, array)
|
||||
else:
|
||||
event.add_fragment(i, array)
|
||||
# only consider the first of FacebookEvent.keys
|
||||
break
|
||||
if not seen:
|
||||
for k in array:
|
||||
event = FacebookEvent.find_event_fragment_in_array(
|
||||
array[k], event, False
|
||||
)
|
||||
elif isinstance(array, list):
|
||||
for e in array:
|
||||
event = FacebookEvent.find_event_fragment_in_array(e, event, False)
|
||||
|
||||
if event is not None and first:
|
||||
event.consolidate_current_event()
|
||||
return event
|
||||
|
||||
def build_event(self, url):
|
||||
return {
|
||||
"title": self.get_element("name"),
|
||||
"category": None,
|
||||
"start_day": self.get_element_date("start_timestamp"),
|
||||
"location": self.get_element("event_place_name"),
|
||||
"description": self.get_element("description"),
|
||||
"tags": [],
|
||||
"uuids": [url],
|
||||
"url_human": url,
|
||||
"start_time": self.get_element_time("start_timestamp"),
|
||||
"end_day": self.get_element_date("end_timestamp"),
|
||||
"end_time": self.get_element_time("end_timestamp"),
|
||||
"image": self.get_element("image"),
|
||||
"image_alt": self.get_element("image_alt"),
|
||||
}
|
||||
|
||||
def get_parent_id(self):
|
||||
return self.get_element("parent_if_exists_or_self")["id"]
|
||||
|
||||
def build_events(self, url):
|
||||
|
||||
if self.neighbor_events is None or len(self.neighbor_events) == 0:
|
||||
return [self.build_event(url)]
|
||||
else:
|
||||
url_base = "https://www.facebook.com/events/" + self.get_parent_id() + "/"
|
||||
result = []
|
||||
for nb_e in self.neighbor_events:
|
||||
# we create a copy of the event
|
||||
clone = FacebookEvent(self)
|
||||
# we set start and end timestamp accordnig to the neighbor
|
||||
clone.elements["start_timestamp"] = nb_e.elements["start_timestamp"]
|
||||
clone.elements["end_timestamp"] = nb_e.elements["end_timestamp"]
|
||||
## we generate the event
|
||||
result.append(clone.build_event(url_base + nb_e.elements["id"] + "/"))
|
||||
return result
|
||||
|
||||
|
||||
class CExtractor(Extractor):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.has_2nd_method = True
|
||||
|
||||
def prepare_2nd_extract_dler(downloader):
|
||||
if downloader.support_2nd_extract:
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.support.ui import WebDriverWait
|
||||
from selenium.webdriver.support import expected_conditions as EC
|
||||
|
||||
path = './/div[not(@aria-hidden)]/div[@aria-label="Allow all cookies"]'
|
||||
try:
|
||||
WebDriverWait(downloader.driver, 15).until(
|
||||
EC.visibility_of_element_located((By.XPATH, path))
|
||||
)
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
_(
|
||||
"Error while waiting for the cookie button to be visible: "
|
||||
+ e.__class__.__name__
|
||||
+ " "
|
||||
+ str(e)
|
||||
)
|
||||
)
|
||||
try:
|
||||
button = downloader.driver.find_element(By.XPATH, path)
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
_(
|
||||
"Error while getting the cookie button to be visible: "
|
||||
+ e.__class__.__name__
|
||||
+ " "
|
||||
+ str(e)
|
||||
)
|
||||
)
|
||||
try:
|
||||
button.click()
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
_(
|
||||
"Error while clicking on the cookie button to be visible: "
|
||||
+ e.__class__.__name__
|
||||
+ " "
|
||||
+ str(e)
|
||||
)
|
||||
)
|
||||
t.sleep(5)
|
||||
|
||||
def prepare_2nd_extract(self):
|
||||
CExtractor.prepare_2nd_extract_dler(self.downloader)
|
||||
|
||||
def clean_url(url):
|
||||
if CExtractor.is_known_url(url, False):
|
||||
u = urlparse(url)
|
||||
result = "https://www.facebook.com" + u.path
|
||||
|
||||
# remove name in the url
|
||||
match = re.match(
|
||||
r"(.*/events)/s/([a-zA-Z-][a-zA-Z-0-9-]+)/([0-9/]*)", result
|
||||
)
|
||||
if match:
|
||||
result = match[1] + "/" + match[3]
|
||||
|
||||
if result[-1] == "/":
|
||||
return result
|
||||
else:
|
||||
return result + "/"
|
||||
else:
|
||||
return url
|
||||
|
||||
def is_known_url(url, include_links=True):
|
||||
u = urlparse(url)
|
||||
url_list = ["facebook.com", "www.facebook.com", "m.facebook.com"]
|
||||
if include_links:
|
||||
url_list.append("fb.me")
|
||||
return u.netloc in url_list
|
||||
|
||||
def extract(
|
||||
self, content, url, url_human=None, default_values=None, published=False
|
||||
):
|
||||
# NOTE: this method does not use url_human = None and default_values = None
|
||||
|
||||
# get step by step all information from the content
|
||||
fevent = None
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
for json_script in soup.find_all("script", type="application/json"):
|
||||
json_txt = json_script.get_text()
|
||||
json_struct = json.loads(json_txt)
|
||||
fevent = FacebookEvent.find_event_fragment_in_array(json_struct, fevent)
|
||||
|
||||
if fevent is not None:
|
||||
self.set_header(url)
|
||||
for event in fevent.build_events(url):
|
||||
logger.info("published: " + str(published))
|
||||
event["published"] = published
|
||||
|
||||
if default_values and "category" in default_values:
|
||||
event["category"] = default_values["category"]
|
||||
self.add_event(default_values, **event)
|
||||
return self.get_structure()
|
||||
else:
|
||||
logger.warning("cannot find any event in page")
|
||||
raise Exception(_("Cannot get Facebook event from {}").format(url))
|
||||
|
||||
return None
|
||||
@@ -1,123 +0,0 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractor
|
||||
from .fbevent import FacebookEvent, CExtractor as FacebookEventExtractor
|
||||
from bs4 import BeautifulSoup
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# A class dedicated to get events from a facebook events page
|
||||
# such as https://www.facebook.com/laJeteeClermont/events
|
||||
class CExtractor(TwoStepsExtractor):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.has_2nd_method_in_list = True
|
||||
|
||||
def find_event_id_fragment_in_array(self, array):
|
||||
if isinstance(array, dict):
|
||||
if (
|
||||
"__typename" in array
|
||||
and array["__typename"] == "Event"
|
||||
and "id" in array
|
||||
):
|
||||
self.add_event_url(
|
||||
"https://www.facebook.com/events/" + array["id"] + "/"
|
||||
)
|
||||
self.found = True
|
||||
else:
|
||||
for k in array:
|
||||
if k == "pageItems":
|
||||
self.has_page_items = True
|
||||
self.find_event_id_fragment_in_array(array[k])
|
||||
elif isinstance(array, list):
|
||||
for e in array:
|
||||
self.find_event_id_fragment_in_array(e)
|
||||
|
||||
def find_in_js(self, soup):
|
||||
|
||||
for json_script in soup.find_all("script", type="application/json"):
|
||||
json_txt = json_script.get_text()
|
||||
json_struct = json.loads(json_txt)
|
||||
self.find_event_id_fragment_in_array(json_struct)
|
||||
|
||||
def prepare_2nd_extract_in_list(self):
|
||||
FacebookEventExtractor.prepare_2nd_extract_dler(self.downloader)
|
||||
|
||||
def build_event_url_list(self, content):
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
debug = True
|
||||
|
||||
self.found = False
|
||||
links = soup.find_all("a")
|
||||
for link in links:
|
||||
href = link.get("href")
|
||||
if href is not None and href.startswith("https://www.facebook.com/events/"):
|
||||
self.add_event_url(href.split("?")[0])
|
||||
self.found = True
|
||||
|
||||
self.has_page_items = False
|
||||
self.find_in_js(soup)
|
||||
|
||||
if not self.found:
|
||||
logger.warning(
|
||||
"cannot find any event link in events page. Save content page"
|
||||
)
|
||||
if debug:
|
||||
CExtractor.dump_content_for_debug(content, self.url)
|
||||
|
||||
if not self.has_page_items:
|
||||
raise Exception(
|
||||
_(
|
||||
"the page was not yet populated with events, so the loading time was probably too short"
|
||||
)
|
||||
)
|
||||
|
||||
def dump_content_for_debug(content, url):
|
||||
directory = "errors/"
|
||||
if not os.path.exists(directory):
|
||||
os.makedirs(directory)
|
||||
now = datetime.now()
|
||||
filename = directory + now.strftime("%Y%m%d_%H%M%S") + ".html"
|
||||
with open(filename, "w") as text_file:
|
||||
text_file.write("<!-- " + url + " -->\n\n")
|
||||
text_file.write(content)
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
debug = True
|
||||
|
||||
fevent = None
|
||||
soup = BeautifulSoup(event_content, "html.parser")
|
||||
for json_script in soup.find_all("script", type="application/json"):
|
||||
json_txt = json_script.get_text()
|
||||
json_struct = json.loads(json_txt)
|
||||
fevent = FacebookEvent.find_event_fragment_in_array(json_struct, fevent)
|
||||
|
||||
if fevent is not None:
|
||||
for event in fevent.build_events(event_url):
|
||||
event["published"] = published
|
||||
|
||||
# only add the event if its unknown
|
||||
if (
|
||||
len([e for e in self.events if event["uuids"][0] in e["uuids"]])
|
||||
== 0
|
||||
):
|
||||
self.add_event(default_values, **event)
|
||||
else:
|
||||
if debug:
|
||||
CExtractor.dump_content_for_debug(event_content, event_url)
|
||||
raise Exception(_("Cannot get Facebook event from {}").format(event_url))
|
||||
@@ -1,165 +0,0 @@
|
||||
from datetime import datetime
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
import dateutil.parser
|
||||
from ..extractor import Extractor
|
||||
import bbcode
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GGCalendar:
|
||||
def __init__(self, url):
|
||||
self.url = url
|
||||
self.extract_info()
|
||||
|
||||
def filter_keys(params):
|
||||
result = {}
|
||||
|
||||
for k, v in params.items():
|
||||
if k.startswith("e[0]"):
|
||||
result[k.replace("e[0][", "")[:-1]] = v
|
||||
else:
|
||||
result[k] = v
|
||||
|
||||
return result
|
||||
|
||||
def is_valid_event(self):
|
||||
return self.start_day is not None and self.title is not None
|
||||
|
||||
def extract_info(self):
|
||||
parsed_url = urlparse(self.url.replace("#", "%23"))
|
||||
params = parse_qs(parsed_url.query)
|
||||
|
||||
params = GGCalendar.filter_keys(params)
|
||||
|
||||
self.location = params["location"][0] if "location" in params else ""
|
||||
self.title = (
|
||||
params["text"][0]
|
||||
if "text" in params
|
||||
else params["title"][0] if "title" in params else ""
|
||||
)
|
||||
self.description = (
|
||||
params["description"][0]
|
||||
if "description" in params
|
||||
else params["details"][0] if "details" in params else ""
|
||||
)
|
||||
if self.description != "":
|
||||
self.description = BeautifulSoup(self.description, "html.parser").text
|
||||
if "dates" in params:
|
||||
dates = [x.replace(" ", "+") for x in params["dates"][0].split("/")]
|
||||
if len(dates) > 0:
|
||||
date = dateutil.parser.parse(dates[0])
|
||||
self.start_day = date.date()
|
||||
self.start_time = date.time()
|
||||
if len(dates) == 2:
|
||||
date = dateutil.parser.parse(dates[1])
|
||||
self.end_day = date.date()
|
||||
self.end_time = date.time()
|
||||
else:
|
||||
self.end_day = None
|
||||
self.end_time = None
|
||||
elif "date_start" in params:
|
||||
date = dateutil.parser.parse(params["date_start"][0])
|
||||
self.start_day = date.date()
|
||||
self.start_time = date.time()
|
||||
if "date_end" in params:
|
||||
dateend = dateutil.parser.parse(params["date_end"][0])
|
||||
if dateend != date:
|
||||
self.end_day = dateend.date()
|
||||
self.end_time = dateend.time()
|
||||
else:
|
||||
self.end_day = None
|
||||
self.end_time = None
|
||||
if self.start_time == datetime.time(0):
|
||||
self.start_time = None
|
||||
|
||||
else:
|
||||
self.end_day = None
|
||||
self.end_time = None
|
||||
else:
|
||||
raise Exception("Unable to find a date in google calendar URL")
|
||||
self.start_day = None
|
||||
self.start_time = None
|
||||
self.end_day = None
|
||||
self.end_time = None
|
||||
|
||||
|
||||
class CExtractor(Extractor):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.possible_urls = [
|
||||
"https://calendar.google.com/calendar/",
|
||||
"https://addtocalendar.com/",
|
||||
"https://www.google.com/calendar/event",
|
||||
]
|
||||
|
||||
def guess_image(self, soup, url):
|
||||
image = soup.find("meta", property="og:image")
|
||||
|
||||
if image is None:
|
||||
for img in soup.select("img"):
|
||||
if img.find_parent(name="nav"):
|
||||
continue
|
||||
image = img["src"]
|
||||
break
|
||||
else:
|
||||
image = image["content"]
|
||||
|
||||
if image.startswith("/"):
|
||||
root_url = "https://" + urlparse(url).netloc + "/"
|
||||
image = root_url + image
|
||||
|
||||
return image
|
||||
|
||||
def extract(
|
||||
self, content, url, url_human=None, default_values=None, published=False
|
||||
):
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
for ggu in self.possible_urls:
|
||||
|
||||
link_calendar = soup.select('a[href^="' + ggu + '"]')
|
||||
if len(link_calendar) != 0:
|
||||
|
||||
gg_cal = GGCalendar(link_calendar[0]["href"])
|
||||
|
||||
if gg_cal.is_valid_event():
|
||||
start_day = gg_cal.start_day
|
||||
start_time = gg_cal.start_time
|
||||
description = gg_cal.description.replace(" ", "")
|
||||
end_day = gg_cal.end_day
|
||||
end_time = gg_cal.end_time
|
||||
location = gg_cal.location
|
||||
title = gg_cal.title
|
||||
url_human = url
|
||||
|
||||
self.set_header(url)
|
||||
|
||||
image = self.guess_image(soup, url)
|
||||
|
||||
category = None
|
||||
|
||||
self.add_event(
|
||||
default_values,
|
||||
title=title,
|
||||
category=category,
|
||||
start_day=start_day,
|
||||
location=location,
|
||||
description=description,
|
||||
tags=[],
|
||||
uuids=[url],
|
||||
recurrences=None,
|
||||
url_human=url_human,
|
||||
start_time=start_time,
|
||||
end_day=end_day,
|
||||
end_time=end_time,
|
||||
published=published,
|
||||
image=image,
|
||||
)
|
||||
|
||||
break
|
||||
|
||||
return self.get_structure()
|
||||
@@ -1,126 +0,0 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractorNoPause
|
||||
from ..extractor import Extractor
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
# A class dedicated to get events from Raymond Bar
|
||||
# URL: https://www.raymondbar.net/
|
||||
class CExtractor(TwoStepsExtractorNoPause):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def guess_category(self, category):
|
||||
if "Cinéma" in category:
|
||||
return "Cinéma"
|
||||
if "Conférence" in category or "Rencontres" in category:
|
||||
return "Rencontres & débats"
|
||||
if "Lecture" in category or "Conte" in category:
|
||||
return "Spectacles"
|
||||
if "Atelier" in category or "Jeux" in category or "":
|
||||
return "Animations & Ateliers"
|
||||
if "Numérique" in category:
|
||||
return "Rendez-vous locaux"
|
||||
|
||||
return "Sans catégorie"
|
||||
|
||||
def guess_tags_from_category(self, category):
|
||||
tags = []
|
||||
if "Lecture" in category:
|
||||
tags.append("📖 lecture")
|
||||
if "Jeux" in category:
|
||||
tags.append("🎲 jeux")
|
||||
|
||||
return tags
|
||||
|
||||
def build_event_url_list(self, content, infuture_days=180):
|
||||
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
root_address_human = self.url_human.split("?")[0]
|
||||
root_address = self.url.split("Service")[0]
|
||||
|
||||
items = soup.select("li.listItem")
|
||||
if items:
|
||||
for item in items:
|
||||
elems = item["onclick"].split('"')
|
||||
v = elems[3].split("^")[1]
|
||||
contentItem = elems[1]
|
||||
multidate = item.select_one(".until.maindate").text != ""
|
||||
if not multidate:
|
||||
url_human = (
|
||||
root_address_human
|
||||
+ "?p=*&v="
|
||||
+ v
|
||||
+ "#contentitem="
|
||||
+ contentItem
|
||||
)
|
||||
url = (
|
||||
root_address
|
||||
+ "Service.PubItem.cls?action=get&instance=*&uuid="
|
||||
+ contentItem
|
||||
)
|
||||
self.add_event_url(url)
|
||||
self.add_event_url_human(url, url_human)
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
|
||||
root_address_human = "https://" + urlparse(self.url_human).netloc + "/"
|
||||
|
||||
soup = BeautifulSoup(event_content, "xml")
|
||||
|
||||
title = soup.select_one("Title").text
|
||||
content = soup.select_one("Content").text
|
||||
|
||||
soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
image = root_address_human + soup.select_one(".image img")["src"]
|
||||
description = soup.select_one(".rightcolumn .content").text
|
||||
location = soup.select_one(".infos .location").text
|
||||
public = soup.select_one(".infos .public").text
|
||||
start_day = Extractor.parse_french_date(
|
||||
soup.select_one(".infos .date .from").text
|
||||
)
|
||||
start_time = Extractor.parse_french_time(
|
||||
soup.select_one(".infos .date .time").text
|
||||
)
|
||||
acces = soup.select_one(".infos .acces").text
|
||||
category = soup.select_one(".rightcolumn .category").text
|
||||
infos = soup.select_one(".infos").text
|
||||
|
||||
description = description + "\n" + infos
|
||||
|
||||
tags = self.guess_tags_from_category(category)
|
||||
category = self.guess_category(category)
|
||||
if "Tout-petits" in public or "Jeunesse" in public:
|
||||
tags.append("🎈 jeune public")
|
||||
if "Accès libre" in acces:
|
||||
tags.append("💶 gratuit")
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
title,
|
||||
category,
|
||||
start_day,
|
||||
location,
|
||||
description,
|
||||
tags,
|
||||
recurrences=None,
|
||||
uuids=[event_url],
|
||||
url_human=event_url,
|
||||
start_time=start_time,
|
||||
end_day=None,
|
||||
end_time=None,
|
||||
published=published,
|
||||
image=image,
|
||||
image_alt="",
|
||||
)
|
||||
@@ -1,197 +0,0 @@
|
||||
from ..extractor import Extractor
|
||||
import dateutil.parser
|
||||
from datetime import datetime, timezone
|
||||
import requests
|
||||
from urllib.parse import urlparse
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# A class dedicated to get events from les amis du temps des cerises
|
||||
# Website https://amisdutempsdescerises.org/
|
||||
class CExtractor(Extractor):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.no_downloader = True
|
||||
|
||||
# Source code adapted from https://framagit.org/Marc-AntoineA/mobilizon-client-python
|
||||
def _request(self, body, data):
|
||||
|
||||
headers = {}
|
||||
|
||||
response = requests.post(
|
||||
url=self._api_end_point,
|
||||
json={"query": body, "variables": data},
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
response_json = response.json()
|
||||
if "errors" in response_json:
|
||||
raise Exception(
|
||||
f'Errors while requesting { body }. { str(response_json["errors"]) }'
|
||||
)
|
||||
|
||||
return response_json["data"]
|
||||
else:
|
||||
raise Exception(
|
||||
f"Error while requesting. Status code: { response.status_code }"
|
||||
)
|
||||
|
||||
def _oncoming_events_number(self):
|
||||
|
||||
query = """
|
||||
query($preferredUsername: String!, $afterDatetime: DateTime) {
|
||||
group(preferredUsername: $preferredUsername) {
|
||||
organizedEvents(afterDatetime: $afterDatetime) {
|
||||
total,
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
today = datetime.now(timezone.utc).isoformat()
|
||||
data = {"preferredUsername": self._group_id, "afterDatetime": today}
|
||||
r = self._request(query, data)
|
||||
return r["group"]["organizedEvents"]["total"]
|
||||
|
||||
def _oncoming_events(self):
|
||||
def _oncoming_events_page(page):
|
||||
query = """
|
||||
query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
|
||||
group(preferredUsername: $preferredUsername) {
|
||||
organizedEvents(afterDatetime: $afterDatetime, page: $page) {
|
||||
elements {
|
||||
id,
|
||||
title,
|
||||
url,
|
||||
beginsOn,
|
||||
endsOn,
|
||||
options {
|
||||
showStartTime,
|
||||
showEndTime,
|
||||
timezone
|
||||
},
|
||||
attributedTo {
|
||||
avatar {
|
||||
url,
|
||||
}
|
||||
name,
|
||||
preferredUsername,
|
||||
},
|
||||
description,
|
||||
onlineAddress,
|
||||
physicalAddress {
|
||||
locality,
|
||||
description,
|
||||
region
|
||||
},
|
||||
tags {
|
||||
title,
|
||||
id,
|
||||
slug
|
||||
},
|
||||
picture {
|
||||
url
|
||||
},
|
||||
status
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
today = datetime.now(timezone.utc).isoformat()
|
||||
data = {
|
||||
"preferredUsername": self._group_id,
|
||||
"afterDatetime": today,
|
||||
"page": page,
|
||||
}
|
||||
r = self._request(query, data)
|
||||
return r["group"]["organizedEvents"]["elements"]
|
||||
|
||||
number_events = self._oncoming_events_number()
|
||||
|
||||
events = []
|
||||
page = 1
|
||||
while len(events) < number_events:
|
||||
events.extend(_oncoming_events_page(page))
|
||||
page += 1
|
||||
return events
|
||||
|
||||
def extract(
|
||||
self, content, url, url_human=None, default_values=None, published=False
|
||||
):
|
||||
self.set_header(url)
|
||||
self.clear_events()
|
||||
|
||||
if "@" in url:
|
||||
|
||||
# TODO: quand on a
|
||||
# https://mobilizon.fr/@xr_clermont_ferrand@mobilizon.extinctionrebellion.fr/events
|
||||
# on doit retourner :
|
||||
# https://mobilizon.extinctionrebellion.fr/@xr_clermont_ferrand/events
|
||||
|
||||
# split url to identify server url and actor id
|
||||
elems = [x for x in url.split("/") if len(x) > 0 and x[0] == "@"]
|
||||
if len(elems) == 1:
|
||||
params = elems[0].split("@")
|
||||
if len(params) == 2:
|
||||
self._api_end_point = "https://" + urlparse(url).netloc + "/api"
|
||||
self._group_id = params[1]
|
||||
else:
|
||||
self._api_end_point = "https://" + params[2] + "/api"
|
||||
self._group_id = params[1]
|
||||
|
||||
events = self._oncoming_events()
|
||||
|
||||
for e in events:
|
||||
title = e["title"]
|
||||
event_url = e["url"]
|
||||
image = e["picture"]["url"]
|
||||
location = (
|
||||
e["physicalAddress"]["description"]
|
||||
+ ", "
|
||||
+ e["physicalAddress"]["locality"]
|
||||
)
|
||||
soup = BeautifulSoup(e["description"], "html.parser")
|
||||
|
||||
description = soup.text
|
||||
start = (
|
||||
dateutil.parser.isoparse(e["beginsOn"])
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.astimezone(tz=None)
|
||||
)
|
||||
end = (
|
||||
dateutil.parser.isoparse(e["endsOn"])
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.astimezone(tz=None)
|
||||
)
|
||||
|
||||
start_day = start.date()
|
||||
start_time = start.time() if e["options"]["showStartTime"] else None
|
||||
end_day = end.date()
|
||||
end_time = end.time() if e["options"]["showEndTime"] else None
|
||||
|
||||
self.add_event(
|
||||
default_values,
|
||||
title,
|
||||
None,
|
||||
start_day,
|
||||
location,
|
||||
description,
|
||||
[],
|
||||
uuids=[event_url],
|
||||
recurrences=None,
|
||||
url_human=event_url,
|
||||
start_time=start_time,
|
||||
published=published,
|
||||
image=image,
|
||||
end_day=end_day,
|
||||
end_time=end_time,
|
||||
)
|
||||
|
||||
return self.get_structure()
|
||||
@@ -1,114 +0,0 @@
|
||||
from ..twosteps_extractor import TwoStepsExtractor
|
||||
from ..extractor import Extractor
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
# A class dedicated to get events from MEC Wordpress plugin
|
||||
# URL: https://webnus.net/modern-events-calendar/
|
||||
class CExtractor(TwoStepsExtractor):
|
||||
|
||||
def local2agendaCategory(self, category):
|
||||
mapping = {
|
||||
"Musique": "Fêtes & Concerts",
|
||||
"CONCERT": "Fêtes & Concerts",
|
||||
"VISITE": "Visites & Expositions",
|
||||
"Spectacle": "Spectacles",
|
||||
"Rencontre": "Rencontres & Débats",
|
||||
"Atelier": "Animations & Ateliers",
|
||||
"Projection": "Cinéma",
|
||||
}
|
||||
mapping_tag = {
|
||||
"Musique": "concert",
|
||||
"CONCERT": "concert",
|
||||
"VISITE": None,
|
||||
"Spectacle": "rhéâtre",
|
||||
"Rencontre": None,
|
||||
"Atelier": "atelier",
|
||||
"Projection": None,
|
||||
}
|
||||
|
||||
if category in mapping:
|
||||
return mapping[category], mapping_tag[category]
|
||||
else:
|
||||
return None, None
|
||||
|
||||
def build_event_url_list(self, content):
|
||||
soup = BeautifulSoup(content, "xml")
|
||||
|
||||
events = soup.select("div.mec-tile-event-content")
|
||||
for e in events:
|
||||
link = e.select("h4.mec-event-title a")
|
||||
if len(link) == 1:
|
||||
url = link[0]["href"]
|
||||
title = link[0].get_text()
|
||||
|
||||
if self.add_event_url(url):
|
||||
print(url, title)
|
||||
self.add_event_title(url, title)
|
||||
|
||||
categories = e.select(".mec-label-normal")
|
||||
if len(categories) == 0:
|
||||
categories = e.select(".mec-category")
|
||||
if len(categories) > 0:
|
||||
category, tag = self.local2agendaCategory(categories[0].get_text())
|
||||
if category:
|
||||
self.add_event_category(url, category)
|
||||
if tag:
|
||||
self.add_event_category(url, tag)
|
||||
|
||||
def add_event_from_content(
|
||||
self,
|
||||
event_content,
|
||||
event_url,
|
||||
url_human=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
):
|
||||
soup = BeautifulSoup(event_content, "xml")
|
||||
|
||||
start_day = soup.select(".mec-start-date-label")
|
||||
if start_day and len(start_day) > 0:
|
||||
start_day = Extractor.parse_french_date(start_day[0].get_text())
|
||||
else:
|
||||
start_day = None
|
||||
t = soup.select(".mec-single-event-time .mec-events-abbr")
|
||||
if t:
|
||||
t = t[0].get_text().split("-")
|
||||
start_time = Extractor.parse_french_time(t[0])
|
||||
if len(t) > 1:
|
||||
end_time = Extractor.parse_french_time(t[1])
|
||||
else:
|
||||
end_time = None
|
||||
else:
|
||||
start_time = None
|
||||
end_time = None
|
||||
|
||||
image = soup.select(".mec-events-event-image img")
|
||||
if image:
|
||||
image = image[0]["src"]
|
||||
else:
|
||||
image = None
|
||||
description = soup.select(".mec-event-content .mec-single-event-description")[
|
||||
0
|
||||
].get_text(separator=" ")
|
||||
|
||||
url_human = event_url
|
||||
|
||||
self.add_event_with_props(
|
||||
default_values,
|
||||
event_url,
|
||||
None,
|
||||
None,
|
||||
start_day,
|
||||
None,
|
||||
description,
|
||||
[],
|
||||
recurrences=None,
|
||||
uuids=[event_url],
|
||||
url_human=url_human,
|
||||
start_time=start_time,
|
||||
end_day=None,
|
||||
end_time=end_time,
|
||||
published=published,
|
||||
image=image,
|
||||
)
|
||||
@@ -1,10 +1,5 @@
|
||||
from .downloader import SimpleDownloader
|
||||
from .extractor import Extractor
|
||||
from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from .downloader import *
|
||||
from .extractor import *
|
||||
|
||||
|
||||
class URL2Events:
|
||||
@@ -16,29 +11,9 @@ class URL2Events:
|
||||
self.single_event = single_event
|
||||
|
||||
def process(
|
||||
self,
|
||||
url,
|
||||
url_human=None,
|
||||
cache=None,
|
||||
default_values=None,
|
||||
published=False,
|
||||
first=True,
|
||||
self, url, url_human=None, cache=None, default_values=None, published=False
|
||||
):
|
||||
referer = ""
|
||||
data = None
|
||||
content_type = None
|
||||
content = None
|
||||
if self.extractor:
|
||||
referer = self.extractor.url_referer
|
||||
data = self.extractor.data
|
||||
content_type = self.extractor.content_type
|
||||
if self.extractor.no_downloader:
|
||||
content = ""
|
||||
|
||||
if content is None:
|
||||
content = self.downloader.get_content(
|
||||
url, cache, referer=referer, content_type=content_type, data=data
|
||||
)
|
||||
content = self.downloader.get_content(url, cache)
|
||||
|
||||
if content is None:
|
||||
return None
|
||||
@@ -51,25 +26,8 @@ class URL2Events:
|
||||
else:
|
||||
# if the extractor is not defined, use a list of default extractors
|
||||
for e in Extractor.get_default_extractors(self.single_event):
|
||||
logger.info("Extractor::" + type(e).__name__)
|
||||
e.set_downloader(self.downloader)
|
||||
try:
|
||||
events = e.extract(
|
||||
content, url, url_human, default_values, published
|
||||
)
|
||||
if events is not None:
|
||||
if len(events) > 0:
|
||||
return events
|
||||
except Exception:
|
||||
if (
|
||||
first
|
||||
and FacebookEventExtractor.is_known_url(url)
|
||||
and self.downloader.support_2nd_extract
|
||||
and e.has_2nd_method
|
||||
):
|
||||
logger.info("Using cookie trick on a facebook event")
|
||||
e.prepare_2nd_extract()
|
||||
return self.process(
|
||||
url, url_human, cache, default_values, published, False
|
||||
)
|
||||
events = e.extract(content, url, url_human, default_values, published)
|
||||
if events is not None:
|
||||
return events
|
||||
return None
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
79
src/agenda_culturel/migrations/0001_initial.py
Normal file
79
src/agenda_culturel/migrations/0001_initial.py
Normal file
@@ -0,0 +1,79 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-15 14:57
|
||||
|
||||
import ckeditor.fields
|
||||
import colorfield.fields
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django_better_admin_arrayfield.models.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Category',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(help_text='Category name', max_length=512, verbose_name='Name')),
|
||||
('alt_name', models.CharField(help_text='Alternative name used with a time period', max_length=512, verbose_name='Alternative Name')),
|
||||
('codename', models.CharField(help_text='Short name of the category', max_length=3, verbose_name='Short name')),
|
||||
('color', colorfield.fields.ColorField(blank=True, default=None, help_text='Color used as background for the category', image_field=None, max_length=25, null=True, samples=None, verbose_name='Color')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Category',
|
||||
'verbose_name_plural': 'Categories',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='EventSubmissionForm',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('url', models.URLField(help_text='URL where this event can be found.', max_length=512, verbose_name='URL')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Event submission form',
|
||||
'verbose_name_plural': 'Event submissions forms',
|
||||
'db_table': 'eventsubmissionform',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='StaticContent',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(help_text='Category name', max_length=512, unique=True, verbose_name='Name')),
|
||||
('text', ckeditor.fields.RichTextField(help_text='Text as shown to the visitors', verbose_name='Content')),
|
||||
('url_path', models.CharField(help_text='URL path where the content is included.', verbose_name='URL path')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Event',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created_date', models.DateTimeField(auto_now_add=True)),
|
||||
('modified_date', models.DateTimeField(auto_now=True)),
|
||||
('title', models.CharField(help_text='Short title', max_length=512, verbose_name='Title')),
|
||||
('status', models.CharField(choices=[('published', 'Published'), ('trash', 'Trash'), ('draft', 'Draft')], default='published', max_length=20, verbose_name='Status')),
|
||||
('start_day', models.DateField(help_text='Day of the event', verbose_name='Day of the event')),
|
||||
('start_time', models.TimeField(blank=True, help_text='Starting time', null=True, verbose_name='Starting time')),
|
||||
('end_day', models.DateField(blank=True, help_text='End day of the event, only required if different from the start day.', null=True, verbose_name='End day of the event')),
|
||||
('end_time', models.TimeField(blank=True, help_text='Final time', null=True, verbose_name='Final time')),
|
||||
('location', models.CharField(help_text='Address of the event', max_length=512, verbose_name='Location')),
|
||||
('description', models.TextField(blank=True, help_text='General description of the event', null=True, verbose_name='Description')),
|
||||
('local_image', models.ImageField(blank=True, help_text='Illustration image stored in the agenda server', max_length=1024, null=True, upload_to='', verbose_name='Illustration (local image)')),
|
||||
('image', models.URLField(blank=True, help_text='URL of the illustration image', max_length=1024, null=True, verbose_name='Illustration')),
|
||||
('image_alt', models.CharField(blank=True, help_text='Alternative text used by screen readers for the image', max_length=1024, null=True, verbose_name='Illustration description')),
|
||||
('reference_urls', django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.URLField(max_length=512), blank=True, help_text='List of all the urls where this event can be found.', null=True, size=None, verbose_name='URLs')),
|
||||
('tags', django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=64), blank=True, help_text='A list of tags that describe the event.', null=True, size=None, verbose_name='Tags')),
|
||||
('category', models.ForeignKey(default=None, help_text='Category of the event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Event',
|
||||
'verbose_name_plural': 'Events',
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -1,974 +0,0 @@
|
||||
# Generated by Django 4.2.19 on 2025-03-02 11:46
|
||||
|
||||
import autoslug.fields
|
||||
import ckeditor.fields
|
||||
import colorfield.fields
|
||||
from django.conf import settings
|
||||
import django.contrib.gis.geos.point
|
||||
import django.contrib.postgres.operations
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.db.models.functions.text
|
||||
import django_better_admin_arrayfield.models.fields
|
||||
import django_ckeditor_5.fields
|
||||
import django_resized.forms
|
||||
import location_field.models.plain
|
||||
import location_field.models.spatial
|
||||
import recurrence.fields
|
||||
|
||||
def set_representative_from_fixed_masked(apps, cats):
|
||||
# get all duplicated events
|
||||
DuplicatedEvents = apps.get_model("agenda_culturel", "DuplicatedEvents")
|
||||
duplicated = DuplicatedEvents.objects.all().prefetch_related('event_set')
|
||||
|
||||
to_update = []
|
||||
for d in duplicated:
|
||||
# there is no representative
|
||||
d.representative = None
|
||||
# except if d is fixed
|
||||
if d.fixed:
|
||||
# and if there is at least one non masked (should be the case)
|
||||
e_not_masked = [e for e in d.event_set.all() if not e.masked]
|
||||
# keep the first one
|
||||
if len(e_not_masked) >= 1:
|
||||
d.representative = e_not_masked[0]
|
||||
to_update.append(d)
|
||||
|
||||
DuplicatedEvents.objects.bulk_update(to_update, fields=["representative"])
|
||||
|
||||
def set_fixed_masked_from_representative(apps, cats):
|
||||
Event = apps.get_model("agenda_culturel", "Event")
|
||||
events = Event.objects.all().prefetch_related("possibly_duplicated")
|
||||
|
||||
to_update = []
|
||||
for e in events:
|
||||
if not e.possibly_duplicated:
|
||||
e.masked = False
|
||||
else:
|
||||
e.masked = e.possibly_duplicated.representative and e.possibly_duplicated.representative == e
|
||||
to_update.append(e)
|
||||
|
||||
Event.objects.bulk_update(to_update, fields=["masked"])
|
||||
|
||||
# get all duplicated events
|
||||
DuplicatedEvents = apps.get_model("agenda_culturel", "DuplicatedEvents")
|
||||
duplicated = DuplicatedEvents.objects.all().prefetch_related('event_set')
|
||||
|
||||
# for each event
|
||||
to_update = []
|
||||
for d in duplicated:
|
||||
d.fixed = d.representative is not None
|
||||
to_update.append(d)
|
||||
|
||||
DuplicatedEvents.objects.bulk_update(to_update, fields=["fixed"])
|
||||
|
||||
def strip_place_aliases(apps, schema_editor):
|
||||
Place = apps.get_model("agenda_culturel", "Place")
|
||||
|
||||
places = Place.objects.all()
|
||||
|
||||
for p in places:
|
||||
if p.aliases is not None:
|
||||
p.aliases = [a.strip() for a in p.aliases]
|
||||
|
||||
Place.objects.bulk_update(places, fields=["aliases"])
|
||||
|
||||
def do_nothing(apps, schema_editor):
|
||||
pass
|
||||
|
||||
def migrate_data_slug_forward(apps, schema_editor):
|
||||
Category = apps.get_model("agenda_culturel", "Category")
|
||||
|
||||
for instance in Category.objects.all():
|
||||
print("Generating slug for %s"%instance)
|
||||
instance.save() # Will trigger slug update
|
||||
|
||||
def migrate_data_slug_backward(apps, schema_editor):
|
||||
pass
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Category',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(help_text='Category name', max_length=512, verbose_name='Name')),
|
||||
('color', colorfield.fields.ColorField(blank=True, default=None, help_text='Color used as background for the category', image_field=None, max_length=25, null=True, samples=None, verbose_name='Color')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Category',
|
||||
'verbose_name_plural': 'Categories',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='StaticContent',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(help_text='Category name', max_length=512, unique=True, verbose_name='Name')),
|
||||
('text', ckeditor.fields.RichTextField(help_text='Text as shown to the visitors', verbose_name='Content')),
|
||||
('url_path', models.CharField(help_text='URL path where the content is included.', verbose_name='URL path')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Message',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('subject', models.CharField(help_text='The subject of your message', max_length=512, verbose_name='Subject')),
|
||||
('name', models.CharField(blank=True, help_text='Your name', max_length=512, null=True, verbose_name='Name')),
|
||||
('date', models.DateTimeField(auto_now_add=True)),
|
||||
('closed', models.BooleanField(default=False, help_text='this message has been processed and no longer needs to be handled', verbose_name='Closed')),
|
||||
('message', ckeditor.fields.RichTextField(help_text='Your message', verbose_name='Message')),
|
||||
('email', models.EmailField(blank=True, help_text='Your email address', max_length=254, null=True, verbose_name='Email address')),
|
||||
('comments', ckeditor.fields.RichTextField(blank=True, default='', help_text='Comments on the message from the moderation team', null=True, verbose_name='Comments')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='BatchImportation',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created_date', models.DateTimeField(auto_now_add=True)),
|
||||
('source', models.URLField(blank=True, help_text='URL of the source document', max_length=1024, null=True, verbose_name='Source')),
|
||||
('browsable_url', models.URLField(blank=True, help_text='URL of the corresponding document that will be shown to visitors.', max_length=1024, null=True, verbose_name='Browsable url')),
|
||||
('status', models.CharField(choices=[('running', 'Running'), ('canceled', 'Canceled'), ('success', 'Success'), ('failed', 'Failed')], default='running', max_length=20, verbose_name='Status')),
|
||||
('error_message', models.CharField(blank=True, max_length=512, null=True, verbose_name='Error message')),
|
||||
('nb_imported', models.PositiveIntegerField(default=0, verbose_name='Number of imported events')),
|
||||
('nb_initial', models.PositiveIntegerField(default=0, verbose_name='Number of collected events')),
|
||||
('celery_id', models.CharField(default='', max_length=128)),
|
||||
('nb_removed', models.PositiveIntegerField(default=0, verbose_name='Number of removed events')),
|
||||
('nb_updated', models.PositiveIntegerField(default=0, verbose_name='Number of updated events')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='DuplicatedEvents',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Event',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created_date', models.DateTimeField(editable=False)),
|
||||
('modified_date', models.DateTimeField(blank=True, null=True)),
|
||||
('title', models.CharField(help_text='Short title', max_length=512, verbose_name='Title')),
|
||||
('status', models.CharField(choices=[('published', 'Published'), ('draft', 'Draft'), ('trash', 'Trash')], default='published', max_length=20, verbose_name='Status')),
|
||||
('start_day', models.DateField(help_text='Day of the event', verbose_name='Day of the event')),
|
||||
('start_time', models.TimeField(blank=True, help_text='Starting time', null=True, verbose_name='Starting time')),
|
||||
('end_day', models.DateField(blank=True, help_text='End day of the event, only required if different from the start day.', null=True, verbose_name='End day of the event')),
|
||||
('end_time', models.TimeField(blank=True, help_text='Final time', null=True, verbose_name='Final time')),
|
||||
('location', models.CharField(help_text='Address of the event', max_length=512, verbose_name='Location')),
|
||||
('description', models.TextField(blank=True, help_text='General description of the event', null=True, verbose_name='Description')),
|
||||
('local_image', models.ImageField(blank=True, help_text='Illustration image stored in the agenda server', max_length=1024, null=True, upload_to='', verbose_name='Illustration (local image)')),
|
||||
('image', models.URLField(blank=True, help_text='URL of the illustration image', max_length=1024, null=True, verbose_name='Illustration')),
|
||||
('image_alt', models.CharField(blank=True, help_text='Alternative text used by screen readers for the image', max_length=1024, null=True, verbose_name='Illustration description')),
|
||||
('reference_urls', django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.URLField(max_length=512), blank=True, help_text='List of all the urls where this event can be found.', null=True, size=None, verbose_name='URLs')),
|
||||
('tags', django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=64), blank=True, help_text='A list of tags that describe the event.', null=True, size=None, verbose_name='Tags')),
|
||||
('category', models.ForeignKey(default=None, help_text='Category of the event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category')),
|
||||
('uuids', django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=512), blank=True, help_text='UUIDs from import to detect duplicated entries.', null=True, size=None, verbose_name='UUIDs')),
|
||||
('imported_date', models.DateTimeField(blank=True, null=True)),
|
||||
('possibly_duplicated', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.duplicatedevents')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Event',
|
||||
'verbose_name_plural': 'Events',
|
||||
},
|
||||
),
|
||||
django.contrib.postgres.operations.TrigramExtension(
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='possibly_duplicated',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.duplicatedevents', verbose_name='Possibly duplicated'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='import_sources',
|
||||
field=django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=512), blank=True, help_text='Importation source used to detect removed entries.', null=True, size=None, verbose_name='Importation source'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('published', 'Published'), ('draft', 'Draft'), ('trash', 'Trash')], default='draft', max_length=20, verbose_name='Status'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='recurrence_dtend',
|
||||
field=models.DateTimeField(blank=True, editable=False, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='recurrence_dtstart',
|
||||
field=models.DateTimeField(blank=True, editable=False, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='recurrences',
|
||||
field=recurrence.fields.RecurrenceField(blank=True, null=True, verbose_name='Recurrence'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='RecurrentImport',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('processor', models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC')], default='ical', max_length=20, verbose_name='Processor')),
|
||||
('recurrence', models.CharField(choices=[('daily', 'daily'), ('weekly', 'weekly')], default='daily', max_length=10, verbose_name='Import recurrence')),
|
||||
('source', models.URLField(help_text='URL of the source document', max_length=1024, verbose_name='Source')),
|
||||
('browsable_url', models.URLField(blank=True, help_text='URL of the corresponding document that will be shown to visitors.', max_length=1024, null=True, verbose_name='Browsable url')),
|
||||
('defaultLocation', models.CharField(blank=True, help_text='Address for each imported event', max_length=512, null=True, verbose_name='Location')),
|
||||
('defaultTags', django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=64), blank=True, help_text='A list of tags that describe each imported event.', null=True, size=None, verbose_name='Tags for each imported event')),
|
||||
('defaultCategory', models.ForeignKey(blank=True, default=None, help_text='Category of each imported event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category')),
|
||||
('downloader', models.CharField(choices=[('simple', 'simple'), ('chromium headless', 'Headless Chromium')], default='simple', max_length=20, verbose_name='Downloader')),
|
||||
('defaultPublished', models.BooleanField(default=True, help_text='Status of each imported event (published or draft)', verbose_name='Published')),
|
||||
('name', models.CharField(default='', help_text='Recurrent import name', max_length=512, verbose_name='Name')),
|
||||
],
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='batchimportation',
|
||||
name='recurrentImport',
|
||||
field=models.ForeignKey(blank=True, editable=False, help_text='Reference to the recurrent import processing', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.recurrentimport', verbose_name='Recurrent import'),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='batchimportation',
|
||||
name='browsable_url',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='batchimportation',
|
||||
name='source',
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='CategorisationRule',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('weight', models.IntegerField(default=0, help_text='The lower is the weight, the earlier the filter is applied', verbose_name='Weight')),
|
||||
('description_contains', models.CharField(blank=True, help_text='Text contained in the description', max_length=512, null=True, verbose_name='Contained in the description')),
|
||||
('desc_exact', models.BooleanField(default=False, help_text='If checked, the extract will be searched for in the description using the exact form (capitals, accents).', verbose_name='Exact description extract')),
|
||||
('title_contains', models.CharField(blank=True, help_text='Text contained in the event title', max_length=512, null=True, verbose_name='Contained in the title')),
|
||||
('title_exact', models.BooleanField(default=False, help_text='If checked, the extract will be searched for in the title using the exact form (capitals, accents).', verbose_name='Exact title extract')),
|
||||
('category', models.ForeignKey(help_text='Category applied to the event', on_delete=django.db.models.deletion.CASCADE, to='agenda_culturel.category', verbose_name='Category')),
|
||||
],
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='location',
|
||||
field=models.CharField(default='', help_text='Address of the event', max_length=512, verbose_name='Location'),
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='batchimportation',
|
||||
options={'permissions': [('run_batchimportation', 'Can run a batch importation')]},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='categorisationrule',
|
||||
options={'permissions': [('apply_categorisationrules', 'Apply a categorisation rule')]},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='event',
|
||||
options={'permissions': [('set_duplicated_event', 'Can set an event as duplicated')], 'verbose_name': 'Event', 'verbose_name_plural': 'Events'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='recurrentimport',
|
||||
options={'permissions': [('run_recurrentimport', 'Can run a recurrent import')]},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ModerationQuestion',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('question', models.CharField(help_text='Text that will be shown to moderators', max_length=512, unique=True, verbose_name='Question')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ModerationAnswer',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('answer', models.CharField(help_text='Text that will be shown to moderators', max_length=512, unique=True, verbose_name='Answer')),
|
||||
('adds_tags', django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=64), blank=True, help_text='A list of tags that will be added if you choose this answer.', null=True, size=None, verbose_name='Adds tags')),
|
||||
('removes_tags', django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=64), blank=True, help_text='A list of tags that will be removed if you choose this answer.', null=True, size=None, verbose_name='Removes tags')),
|
||||
('description', models.ForeignKey(help_text='Associated question from moderation', on_delete=django.db.models.deletion.CASCADE, to='agenda_culturel.moderationquestion', verbose_name='Question')),
|
||||
],
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='moderationanswer',
|
||||
old_name='description',
|
||||
new_name='question',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='moderationanswer',
|
||||
name='answer',
|
||||
field=models.CharField(help_text='Text that will be shown to moderators', max_length=512, verbose_name='Answer'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='moderationanswer',
|
||||
name='question',
|
||||
field=models.ForeignKey(help_text='Associated question from moderation', on_delete=django.db.models.deletion.CASCADE, related_name='answers', to='agenda_culturel.moderationquestion', verbose_name='Question'),
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='batchimportation',
|
||||
options={'permissions': [('run_batchimportation', 'Can run a batch importation')], 'verbose_name': 'Batch importation', 'verbose_name_plural': 'Batch importations'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='categorisationrule',
|
||||
options={'permissions': [('apply_categorisationrules', 'Apply a categorisation rule')], 'verbose_name': 'Categorisation rule', 'verbose_name_plural': 'Categorisation rules'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='message',
|
||||
options={'verbose_name': 'Contact message', 'verbose_name_plural': 'Contact messages'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='moderationquestion',
|
||||
options={'permissions': [('use_moderation_question', 'Can use a moderation question to tag an event')], 'verbose_name': 'Moderation question', 'verbose_name_plural': 'Moderation questions'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='recurrentimport',
|
||||
options={'permissions': [('run_recurrentimport', 'Can run a recurrent import')], 'verbose_name': 'Recurrent import', 'verbose_name_plural': 'Recurrent imports'},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='moderated_date',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='categorisationrule',
|
||||
name='loc_exact',
|
||||
field=models.BooleanField(default=False, help_text='If checked, the extract will be searched for in the location using the exact form (capitals, accents).', verbose_name='Exact location extract'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='categorisationrule',
|
||||
name='location_contains',
|
||||
field=models.CharField(blank=True, help_text='Text contained in the event location', max_length=512, null=True, verbose_name='Contained in the location'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lephotomat', 'le photomat')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', 'la puce à loreille')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='name',
|
||||
field=models.CharField(default='', help_text='Recurrent import name. Be careful to choose a name that is easy to understand, as it will be public and displayed on the sites About page.', max_length=512, verbose_name='Name'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Place',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(help_text='Name of the place', verbose_name='Name')),
|
||||
('address', models.CharField(help_text='Address of this place (without city name)', verbose_name='Address')),
|
||||
('location', location_field.models.plain.PlainLocationField(max_length=63)),
|
||||
('aliases', django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=512), blank=True, help_text='Alternative names or addresses used to match a place with the free-form location of an event.', null=True, size=None, verbose_name='Alternative names')),
|
||||
('city', models.CharField(default='', help_text='City name', verbose_name='City')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Place',
|
||||
'verbose_name_plural': 'Places',
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='location',
|
||||
field=models.CharField(default='', help_text='Address of the event in case its not available in the already known places (free form)', max_length=512, verbose_name='Location (free form)'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='exact_location',
|
||||
field=models.ForeignKey(help_text='Address of the event', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.place', verbose_name='Location'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='place',
|
||||
name='address',
|
||||
field=models.CharField(blank=True, help_text='Address of this place (without city name)', null=True, verbose_name='Address'),
|
||||
),
|
||||
django.contrib.postgres.operations.UnaccentExtension(
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='duplicatedevents',
|
||||
options={'verbose_name': 'Duplicated events', 'verbose_name_plural': 'Duplicated events'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='staticcontent',
|
||||
options={'verbose_name': 'Static content', 'verbose_name_plural': 'Static contents'},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='exact_location',
|
||||
field=models.ForeignKey(blank=True, help_text='Address of the event', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.place', verbose_name='Location'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', 'la puce à loreille'), ('Plugin wordpress MEC', 'Plugin wordpress MEC')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='place',
|
||||
options={'ordering': ['name'], 'verbose_name': 'Place', 'verbose_name_plural': 'Places'},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='batchimportation',
|
||||
name='url_source',
|
||||
field=models.URLField(blank=True, editable=False, help_text='Source URL if no RecurrentImport is associated.', max_length=1024, null=True, verbose_name='URL (if not recurrent import)'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='categorisationrule',
|
||||
name='place',
|
||||
field=models.ForeignKey(blank=True, help_text='Location from place', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.place', verbose_name='Place'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page")], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='downloader',
|
||||
field=models.CharField(choices=[('simple', 'simple'), ('chromium headless', 'Headless Chromium'), ('chromium (pause)', 'Headless Chromium (pause)')], default='simple', max_length=20, verbose_name='Downloader'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='message',
|
||||
name='comments',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, default='', help_text='Comments on the message from the moderation team', null=True, verbose_name='Comments'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='message',
|
||||
name='message',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(help_text='Your message', verbose_name='Message'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='staticcontent',
|
||||
name='text',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(help_text='Text as shown to the visitors', verbose_name='Content'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='message',
|
||||
name='message',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Your message', verbose_name='Message'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='staticcontent',
|
||||
name='text',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Text as shown to the visitors', verbose_name='Content'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page"), ('cour3coquins', 'la cour des 3 coquins')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='location',
|
||||
field=models.CharField(blank=True, default='', help_text='Address of the event in case its not available in the already known places (free form)', max_length=512, null=True, verbose_name='Location (free form)'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='category',
|
||||
name='pictogram',
|
||||
field=models.FileField(blank=True, help_text='Pictogram of the category (svg format)', max_length=1024, null=True, upload_to='', verbose_name='Pictogram'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='category',
|
||||
name='position',
|
||||
field=models.IntegerField(default=0, verbose_name='Position for ordering categories'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='message',
|
||||
name='spam',
|
||||
field=models.BooleanField(default=False, help_text='This message is a spam.', verbose_name='Spam'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='place',
|
||||
name='location_pt',
|
||||
field=location_field.models.spatial.LocationField(default=django.contrib.gis.geos.point.Point(45.783329, 3.08333), srid=4326),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='place',
|
||||
name='location',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='place',
|
||||
old_name='location_pt',
|
||||
new_name='location',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='place',
|
||||
name='location',
|
||||
field=location_field.models.spatial.LocationField(default=django.contrib.gis.geos.point.Point(3.08333, 45.783329), srid=4326),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('cour3coquins', 'la cour des 3 coquins')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ReferenceLocation',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(help_text='Name of the location', unique=True, verbose_name='Name')),
|
||||
('location', location_field.models.spatial.LocationField(default=django.contrib.gis.geos.point.Point(3.08333, 45.783329), srid=4326)),
|
||||
('main', models.BooleanField(default=False, help_text='This location is one of the main locations (shown first).', verbose_name='Main')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Reference location',
|
||||
'verbose_name_plural': 'Reference locations',
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('cour3coquins', 'la cour des 3 coquins'), ('arachnee', 'Arachnée concert')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='duplicatedevents',
|
||||
name='fixed',
|
||||
field=models.BooleanField(blank=True, default=False, help_text='This duplicated events is fixed, ie exactly one of the listed events is not masked.', null=True, verbose_name='Fixed'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='masked',
|
||||
field=models.BooleanField(blank=True, default=False, help_text='This event is masked by a duplicated version.', null=True, verbose_name='Masked'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='categorisationrule',
|
||||
name='weight',
|
||||
field=models.IntegerField(default=1, help_text='The lower is the weight, the earlier the filter is applied', verbose_name='Weight'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Tag',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(help_text='Tag name', max_length=512, verbose_name='Name')),
|
||||
('description', models.TextField(blank=True, help_text='Description of the tag', null=True, verbose_name='Description')),
|
||||
('principal', models.BooleanField(default=True, help_text='This tag is highlighted as a main tag for visitors, particularly in the filter.', verbose_name='Principal')),
|
||||
],
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='description',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Description of the tag', null=True, verbose_name='Description'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='name',
|
||||
field=models.CharField(help_text='Tag name', max_length=512, unique=True, verbose_name='Name'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='tag',
|
||||
name='category',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='This tags corresponds to a sub-category of the given category', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='duplicatedevents',
|
||||
name='representative',
|
||||
field=models.ForeignKey(default=None, help_text='This event is the representative event of the duplicated events group', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.event', verbose_name='Representative event'),
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=set_representative_from_fixed_masked,
|
||||
reverse_code=set_fixed_masked_from_representative,
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='duplicatedevents',
|
||||
name='fixed',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='event',
|
||||
name='masked',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='event',
|
||||
old_name='possibly_duplicated',
|
||||
new_name='other_versions',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='other_versions',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.duplicatedevents', verbose_name='Other versions'),
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=strip_place_aliases,
|
||||
reverse_code=do_nothing,
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='ModerationAnswer',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='ModerationQuestion',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='tag',
|
||||
name='in_excluded_suggestions',
|
||||
field=models.BooleanField(default=False, help_text='This tag will be part of the excluded suggestions.', verbose_name='In excluded suggestions'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='tag',
|
||||
name='in_included_suggestions',
|
||||
field=models.BooleanField(default=False, help_text='This tag will be part of the included suggestions.', verbose_name='In included suggestions'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='referencelocation',
|
||||
name='main',
|
||||
field=models.IntegerField(default=0, help_text='This location is one of the main locations (shown first higher values).', verbose_name='Main'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='place',
|
||||
name='description',
|
||||
field=django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Description of the place, including accessibility.', null=True, verbose_name='Description'),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='tag',
|
||||
name='category',
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Organisation',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', models.CharField(help_text='Organisation name', max_length=512, unique=True, verbose_name='Name')),
|
||||
('website', models.URLField(blank=True, help_text='Website of the organisation', max_length=1024, null=True, verbose_name='Website')),
|
||||
('description', django_ckeditor_5.fields.CKEditor5Field(blank=True, help_text='Description of the organisation.', null=True, verbose_name='Description')),
|
||||
('principal_place', models.ForeignKey(blank=True, help_text='Place mainly associated with this organizer. Mainly used if there is a similarity in the name, to avoid redundant displays.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.place', verbose_name='Principal place')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Organisation',
|
||||
'verbose_name_plural': 'Organisations',
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='recurrentimport',
|
||||
name='defaultOrganiser',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='Organiser of each imported event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.organisation', verbose_name='Organiser'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='organisers',
|
||||
field=models.ManyToManyField(blank=True, help_text='list of event organisers. Organizers will only be displayed if one of them does not normally use the venue.', related_name='organised_events', to='agenda_culturel.organisation', verbose_name='Organisers'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='event',
|
||||
index=models.Index(fields=['start_day', 'start_time'], name='agenda_cult_start_d_68ab5f_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='event',
|
||||
index=models.Index(models.F('start_time'), django.db.models.functions.text.Lower('title'), name='start_time title'),
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='tag',
|
||||
options={'verbose_name': 'Étiquette', 'verbose_name_plural': 'Étiquettes'},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='category',
|
||||
index=models.Index(fields=['name'], name='agenda_cult_name_28aa03_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='referencelocation',
|
||||
index=models.Index(fields=['name'], name='agenda_cult_name_76f079_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='staticcontent',
|
||||
index=models.Index(fields=['name'], name='agenda_cult_name_fe4995_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='tag',
|
||||
index=models.Index(fields=['name'], name='agenda_cult_name_9c9c74_idx'),
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='tag',
|
||||
options={'verbose_name': 'Tag', 'verbose_name_plural': 'Tags'},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='category',
|
||||
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='description',
|
||||
field=models.TextField(blank=True, null=True, verbose_name='Description'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='end_day',
|
||||
field=models.DateField(blank=True, null=True, verbose_name='End day'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='end_time',
|
||||
field=models.TimeField(blank=True, null=True, verbose_name='End time'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='exact_location',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.place', verbose_name='Location'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='image',
|
||||
field=models.URLField(blank=True, help_text='External URL of the illustration image', max_length=1024, null=True, verbose_name='Illustration (URL)'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='local_image',
|
||||
field=models.ImageField(blank=True, max_length=1024, null=True, upload_to='', verbose_name='Illustration'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='start_day',
|
||||
field=models.DateField(verbose_name='Start day'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='start_time',
|
||||
field=models.TimeField(blank=True, null=True, verbose_name='Start time'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='tags',
|
||||
field=django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=64), blank=True, null=True, size=None, verbose_name='Tags'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='title',
|
||||
field=models.CharField(max_length=512, verbose_name='Title'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='referencelocation',
|
||||
name='suggested_distance',
|
||||
field=models.IntegerField(default=None, help_text='If this distance is given, this location is part of the suggested filters.', null=True, verbose_name='Suggested distance (km)'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='message',
|
||||
name='related_event',
|
||||
field=models.ForeignKey(default=None, help_text='The message is associated with this event.', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.event', verbose_name='Related event'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('cour3coquins', 'la cour des 3 coquins'), ('arachnee', 'Arachnée concert'), ('rio', 'Le Rio')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='created_by_user',
|
||||
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_DEFAULT, related_name='created_events', to=settings.AUTH_USER_MODEL, verbose_name='Author of the event creation'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='imported_by_user',
|
||||
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_DEFAULT, related_name='imported_events', to=settings.AUTH_USER_MODEL, verbose_name='Author of the last importation'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='moderated_by_user',
|
||||
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_DEFAULT, related_name='moderated_events', to=settings.AUTH_USER_MODEL, verbose_name='Author of the last moderation'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='modified_by_user',
|
||||
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_DEFAULT, related_name='modified_events', to=settings.AUTH_USER_MODEL, verbose_name='Author of the last modification'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='place',
|
||||
name='postcode',
|
||||
field=models.CharField(blank=True, help_text='The post code is not displayed, but makes it easier to find an address when you enter it.', null=True, verbose_name='Postcode'),
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='message',
|
||||
options={'verbose_name': 'Message', 'verbose_name_plural': 'Messages'},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='message',
|
||||
name='user',
|
||||
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to=settings.AUTH_USER_MODEL, verbose_name='Author of the message'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='event',
|
||||
index=models.Index(fields=['end_day', 'end_time'], name='agenda_cult_end_day_4660a5_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='event',
|
||||
index=models.Index(fields=['status'], name='agenda_cult_status_893243_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='event',
|
||||
index=models.Index(fields=['recurrence_dtstart', 'recurrence_dtend'], name='agenda_cult_recurre_a8911c_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='event',
|
||||
index=models.Index(models.F('start_time'), models.F('start_day'), models.F('end_day'), models.F('end_time'), django.db.models.functions.text.Lower('title'), name='datetimes title'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='batchimportation',
|
||||
index=models.Index(fields=['created_date'], name='agenda_cult_created_a23990_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='batchimportation',
|
||||
index=models.Index(fields=['status'], name='agenda_cult_status_54b205_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='batchimportation',
|
||||
index=models.Index(fields=['created_date', 'recurrentImport'], name='agenda_cult_created_0296e4_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='duplicatedevents',
|
||||
index=models.Index(fields=['representative'], name='agenda_cult_represe_9a4fa2_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='message',
|
||||
index=models.Index(fields=['related_event'], name='agenda_cult_related_79de3c_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='message',
|
||||
index=models.Index(fields=['user'], name='agenda_cult_user_id_42dc88_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='message',
|
||||
index=models.Index(fields=['date'], name='agenda_cult_date_049c71_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='message',
|
||||
index=models.Index(fields=['spam', 'closed'], name='agenda_cult_spam_22f9b3_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='place',
|
||||
index=models.Index(fields=['name'], name='agenda_cult_name_222846_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='place',
|
||||
index=models.Index(fields=['city'], name='agenda_cult_city_156dc7_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='place',
|
||||
index=models.Index(fields=['location'], name='agenda_cult_locatio_6f3c05_idx'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='recurrentimport',
|
||||
name='forceLocation',
|
||||
field=models.BooleanField(default=False, help_text='force location even if another is detected.', verbose_name='Force location'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='message',
|
||||
name='message_type',
|
||||
field=models.CharField(choices=[('from_contributor', 'From contributor'), ('import_process', 'Import process'), ('contact_form', 'Contact form'), ('event_report', 'Event report'), ('from_contrib_no_msg', 'From contributor (without message)')], default=None, max_length=20, null=True, verbose_name='Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='reference_urls',
|
||||
field=django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.URLField(max_length=512), blank=True, null=True, size=None, verbose_name='Online sources or ticketing'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='tag',
|
||||
name='principal',
|
||||
field=models.BooleanField(default=False, help_text='This tag is highlighted as a main tag for visitors, particularly in the filter.', verbose_name='Principal'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('cour3coquins', 'la cour des 3 coquins'), ('arachnee', 'Arachnée concert'), ('rio', 'Le Rio'), ('raymonde', 'La Raymonde'), ('apidae', 'Agenda apidae tourisme')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('cour3coquins', 'la cour des 3 coquins'), ('arachnee', 'Arachnée concert'), ('rio', 'Le Rio'), ('raymonde', 'La Raymonde'), ('apidae', 'Agenda apidae tourisme'), ('iguana', 'Agenda iguana (médiathèques)')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='category',
|
||||
name='slug',
|
||||
field=autoslug.fields.AutoSlugField(default=None, editable=False, null=True, populate_from='name', unique=True),
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=migrate_data_slug_forward,
|
||||
reverse_code=migrate_data_slug_backward,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='message',
|
||||
name='message_type',
|
||||
field=models.CharField(choices=[('from_contributor', 'From contributor'), ('import_process', 'Import process'), ('update_process', 'Update process'), ('contact_form', 'Contact form'), ('event_report', 'Event report'), ('from_contrib_no_msg', 'From contributor (without message)')], default=None, max_length=20, null=True, verbose_name='Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='message',
|
||||
name='message_type',
|
||||
field=models.CharField(choices=[('from_contributor', 'From contributor'), ('import_process', 'Import process'), ('update_process', 'Update process'), ('contact_form', 'Contact form'), ('event_report', 'Event report'), ('from_contrib_no_msg', 'From contributor (without message)'), ('warning', 'Warning')], default=None, max_length=20, null=True, verbose_name='Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='created_by_user',
|
||||
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_DEFAULT, related_name='created_events', to=settings.AUTH_USER_MODEL, verbose_name='Author of the event creation'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='imported_by_user',
|
||||
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_DEFAULT, related_name='imported_events', to=settings.AUTH_USER_MODEL, verbose_name='Author of the last importation'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='moderated_by_user',
|
||||
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_DEFAULT, related_name='moderated_events', to=settings.AUTH_USER_MODEL, verbose_name='Author of the last moderation'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='modified_by_user',
|
||||
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_DEFAULT, related_name='modified_events', to=settings.AUTH_USER_MODEL, verbose_name='Author of the last modification'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('Billetterie CF', 'Billetterie Clermont-Ferrand'), ('arachnee', 'Arachnée concert'), ('rio', 'Le Rio'), ('raymonde', 'La Raymonde'), ('apidae', 'Agenda apidae tourisme'), ('iguana', 'Agenda iguana (médiathèques)')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('Billetterie CF', 'Billetterie Clermont-Ferrand'), ('arachnee', 'Arachnée concert'), ('rio', 'Le Rio'), ('raymonde', 'La Raymonde'), ('apidae', 'Agenda apidae tourisme'), ('iguana', 'Agenda iguana (médiathèques)'), ('Mille formes', 'Mille formes')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='event',
|
||||
index=models.Index(fields=['recurrences'], name='agenda_cult_recurre_5c5094_idx'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('Billetterie CF', 'Billetterie Clermont-Ferrand'), ('arachnee', 'Arachnée concert'), ('rio', 'Le Rio'), ('raymonde', 'La Raymonde'), ('apidae', 'Agenda apidae tourisme'), ('iguana', 'Agenda iguana (médiathèques)'), ('Mille formes', 'Mille formes'), ('Amis cerises', 'Les Amis du Temps des Cerises')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('Billetterie CF', 'Billetterie Clermont-Ferrand'), ('arachnee', 'Arachnée concert'), ('rio', 'Le Rio'), ('raymonde', 'La Raymonde'), ('apidae', 'Agenda apidae tourisme'), ('iguana', 'Agenda iguana (médiathèques)'), ('Mille formes', 'Mille formes'), ('Amis cerises', 'Les Amis du Temps des Cerises'), ('Mobilizon', 'Mobilizon')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='source',
|
||||
field=models.URLField(help_text='URL of the source document', max_length=1024, unique=True, verbose_name='Source'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC'), ('ical naive tz', 'ical naive timezone'), ('lacoope', 'lacoope.org'), ('lacomedie', 'la comédie'), ('lefotomat', 'le fotomat'), ('lapucealoreille', "la puce à l'oreille"), ('Plugin wordpress MEC', 'Plugin wordpress MEC'), ('Facebook events', "Événements d'une page FB"), ('Billetterie CF', 'Billetterie Clermont-Ferrand'), ('arachnee', 'Arachnée concert'), ('rio', 'Le Rio'), ('raymonde', 'La Raymonde'), ('apidae', 'Agenda apidae tourisme'), ('iguana', 'Agenda iguana (médiathèques)'), ('Mille formes', 'Mille formes'), ('Amis cerises', 'Les Amis du Temps des Cerises'), ('Mobilizon', 'Mobilizon')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='local_image',
|
||||
field=django_resized.forms.ResizedImageField(blank=True, crop=None, force_format='JPEG', keep_meta=True, max_length=1024, null=True, quality=75, scale=0.5, size=[900, None], upload_to='', verbose_name='Illustration'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='place',
|
||||
name='city',
|
||||
field=models.CharField(help_text='City name', verbose_name='City'),
|
||||
),
|
||||
]
|
||||
19
src/agenda_culturel/migrations/0002_alter_event_category.py
Normal file
19
src/agenda_culturel/migrations/0002_alter_event_category.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-15 14:59
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='category',
|
||||
field=models.ForeignKey(default=1, help_text='Category of the event', on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
]
|
||||
19
src/agenda_culturel/migrations/0003_alter_event_category.py
Normal file
19
src/agenda_culturel/migrations/0003_alter_event_category.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-15 15:26
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0002_alter_event_category'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='category',
|
||||
field=models.ForeignKey(default=None, help_text='Category of the event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
]
|
||||
19
src/agenda_culturel/migrations/0004_alter_event_category.py
Normal file
19
src/agenda_culturel/migrations/0004_alter_event_category.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-15 15:38
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0003_alter_event_category'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='category',
|
||||
field=models.ForeignKey(default=1, help_text='Category of the event', null=True, on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,16 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-26 12:16
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0004_alter_event_category'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name='EventSubmissionForm',
|
||||
),
|
||||
]
|
||||
18
src/agenda_culturel/migrations/0006_alter_event_status.py
Normal file
18
src/agenda_culturel/migrations/0006_alter_event_status.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-12 19:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0005_delete_eventsubmissionform'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('published', 'Published'), ('draft', 'Draft'), ('trash', 'Trash')], default='published', max_length=20, verbose_name='Status'),
|
||||
),
|
||||
]
|
||||
26
src/agenda_culturel/migrations/0007_contactmessage.py
Normal file
26
src/agenda_culturel/migrations/0007_contactmessage.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-16 18:06
|
||||
|
||||
import ckeditor.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0006_alter_event_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ContactMessage',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('subject', models.CharField(help_text='The subject of your message', max_length=512, verbose_name='Subject')),
|
||||
('name', models.CharField(blank=True, help_text='Your name', max_length=512, null=True, verbose_name='Name')),
|
||||
('author', models.EmailField(blank=True, help_text='Your email address', max_length=254, null=True, verbose_name='Email address')),
|
||||
('date', models.DateTimeField(auto_now_add=True)),
|
||||
('closed', models.BooleanField(default=False, help_text='this message has been processed and no longer needs to be handled', verbose_name='Closed')),
|
||||
('message', ckeditor.fields.RichTextField(default='', help_text='Comments on the message from the modaration team', verbose_name='Comments')),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-16 18:33
|
||||
|
||||
import ckeditor.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0007_contactmessage'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='contactmessage',
|
||||
name='author',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='contactmessage',
|
||||
name='email',
|
||||
field=models.EmailField(blank=True, help_text='Your email address', max_length=254, null=True, verbose_name='Email address'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='contactmessage',
|
||||
name='message',
|
||||
field=ckeditor.fields.RichTextField(default='', help_text='Comments on the message from the moderation team', verbose_name='Comments'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-16 18:35
|
||||
|
||||
import ckeditor.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0008_remove_contactmessage_author_contactmessage_email_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='contactmessage',
|
||||
name='comments',
|
||||
field=ckeditor.fields.RichTextField(default='', help_text='Comments on the message from the moderation team', verbose_name='Comments'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='contactmessage',
|
||||
name='message',
|
||||
field=ckeditor.fields.RichTextField(help_text='Your message', verbose_name='Message'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-16 19:54
|
||||
|
||||
import ckeditor.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0009_contactmessage_comments_alter_contactmessage_message'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='contactmessage',
|
||||
name='comments',
|
||||
field=ckeditor.fields.RichTextField(blank=True, default='', help_text='Comments on the message from the moderation team', null=True, verbose_name='Comments'),
|
||||
),
|
||||
]
|
||||
24
src/agenda_culturel/migrations/0011_batchimportation.py
Normal file
24
src/agenda_culturel/migrations/0011_batchimportation.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-23 08:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0010_alter_contactmessage_comments'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='BatchImportation',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created_date', models.DateTimeField(auto_now_add=True)),
|
||||
('source', models.URLField(blank=True, help_text='URL of the source document', max_length=1024, null=True, verbose_name='Source')),
|
||||
('browsable_url', models.URLField(blank=True, help_text='URL of the corresponding document that will be shown to visitors.', max_length=1024, null=True, verbose_name='Browsable url')),
|
||||
('running', models.BooleanField(default=True, help_text='This batch importation is still running', verbose_name='Running')),
|
||||
('success', models.BooleanField(default=False, help_text='This batch importation successfully finished', verbose_name='Success')),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-23 08:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0011_batchimportation'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='batchimportation',
|
||||
name='running',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='batchimportation',
|
||||
name='success',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='batchimportation',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('running', 'Running'), ('canceled', 'Canceled'), ('success', 'Success'), ('failed', 'Failed')], default='running', max_length=20, verbose_name='Status'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-23 10:14
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0012_remove_batchimportation_running_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='batchimportation',
|
||||
name='error_message',
|
||||
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Error message'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='batchimportation',
|
||||
name='nb_imported',
|
||||
field=models.PositiveIntegerField(default=0, verbose_name='Number of imported events'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='batchimportation',
|
||||
name='nb_initial',
|
||||
field=models.PositiveIntegerField(default=0, verbose_name='Number of collected events'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-23 11:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0013_batchimportation_error_message_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='batchimportation',
|
||||
name='celery_id',
|
||||
field=models.CharField(default='', max_length=128),
|
||||
),
|
||||
]
|
||||
19
src/agenda_culturel/migrations/0015_event_uuids.py
Normal file
19
src/agenda_culturel/migrations/0015_event_uuids.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-23 11:58
|
||||
|
||||
from django.db import migrations, models
|
||||
import django_better_admin_arrayfield.models.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0014_batchimportation_celery_id'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='uuids',
|
||||
field=django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=512), blank=True, help_text='UUIDs from import to detect duplicated entries.', null=True, size=None, verbose_name='UUIDs'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-23 12:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0015_event_uuids'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='batchimportation',
|
||||
name='nb_removed',
|
||||
field=models.PositiveIntegerField(default=0, verbose_name='Number of removed events'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-23 13:01
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0016_batchimportation_nb_removed'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='batchimportation',
|
||||
name='nb_updated',
|
||||
field=models.PositiveIntegerField(default=0, verbose_name='Number of updated events'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-23 13:31
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0017_batchimportation_nb_updated'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='imported_date',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='created_date',
|
||||
field=models.DateTimeField(editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='modified_date',
|
||||
field=models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-29 11:44
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0018_event_imported_date_alter_event_created_date_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='DuplicatedEvents',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
],
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='possibly_duplicated',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.duplicatedevents'),
|
||||
),
|
||||
]
|
||||
11
src/agenda_culturel/migrations/0020_trigram_extension.py
Normal file
11
src/agenda_culturel/migrations/0020_trigram_extension.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from django.db import migrations
|
||||
from django.contrib.postgres.operations import TrigramExtension
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0019_duplicatedevents_event_possibly_duplicated'),
|
||||
]
|
||||
|
||||
operations = [TrigramExtension()]
|
||||
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-30 12:32
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0020_trigram_extension'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='possibly_duplicated',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.duplicatedevents', verbose_name='Possibly duplicated'),
|
||||
),
|
||||
]
|
||||
19
src/agenda_culturel/migrations/0022_event_import_sources.py
Normal file
19
src/agenda_culturel/migrations/0022_event_import_sources.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-31 20:06
|
||||
|
||||
from django.db import migrations, models
|
||||
import django_better_admin_arrayfield.models.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0021_alter_event_possibly_duplicated'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='import_sources',
|
||||
field=django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=512), blank=True, help_text='Importation source used to detect removed entries.', null=True, size=None, verbose_name='Importation source'),
|
||||
),
|
||||
]
|
||||
19
src/agenda_culturel/migrations/0023_event_recurrences.py
Normal file
19
src/agenda_culturel/migrations/0023_event_recurrences.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.7 on 2024-01-02 10:13
|
||||
|
||||
from django.db import migrations
|
||||
import recurrence.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0022_event_import_sources'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='recurrences',
|
||||
field=recurrence.fields.RecurrenceField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 4.2.7 on 2024-01-04 18:58
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0023_event_recurrences'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='dtend',
|
||||
field=models.DateTimeField(blank=True, editable=False, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='dtstart',
|
||||
field=models.DateTimeField(blank=True, editable=False, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('published', 'Published'), ('draft', 'Draft'), ('trash', 'Trash')], default='draft', max_length=20, verbose_name='Status'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.7 on 2024-01-04 19:35
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0024_event_dtend_event_dtstart_alter_event_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='event',
|
||||
old_name='dtend',
|
||||
new_name='recurrence_dtend',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='event',
|
||||
old_name='dtstart',
|
||||
new_name='recurrence_dtstart',
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 4.2.7 on 2024-01-05 15:23
|
||||
|
||||
from django.db import migrations
|
||||
import recurrence.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0025_rename_dtend_event_recurrence_dtend_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='recurrences',
|
||||
field=recurrence.fields.RecurrenceField(blank=True, null=True, verbose_name='Recurrence'),
|
||||
),
|
||||
]
|
||||
24
src/agenda_culturel/migrations/0027_set_dtstart_dtend.py
Normal file
24
src/agenda_culturel/migrations/0027_set_dtstart_dtend.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def forwards_func(apps, schema_editor):
|
||||
pass
|
||||
#Event = apps.get_model("agenda_culturel", "Event")
|
||||
#db_alias = schema_editor.connection.alias
|
||||
#events = Event.objects.filter(recurrence_dtstart__isnull=True)
|
||||
#for e in events:
|
||||
# e.update_recurrence_dtstartend()
|
||||
#Event.objects.bulk_update(events, ["recurrence_dtstart", "recurrence_dtend"])
|
||||
|
||||
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('agenda_culturel', '0026_alter_event_recurrences'),
|
||||
]
|
||||
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(forwards_func),
|
||||
]
|
||||
@@ -0,0 +1,33 @@
|
||||
# Generated by Django 4.2.7 on 2024-01-20 15:59
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django_better_admin_arrayfield.models.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0027_set_dtstart_dtend'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='RecurrentImport',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('processor', models.CharField(choices=[('ical', 'ical')], default='ical', max_length=20, verbose_name='Processor')),
|
||||
('recurrence', models.CharField(choices=[('daily', 'daily'), ('weekly', 'weekly')], default='daily', max_length=10, verbose_name='Import recurrence')),
|
||||
('source', models.URLField(help_text='URL of the source document', max_length=1024, verbose_name='Source')),
|
||||
('browsable_url', models.URLField(blank=True, help_text='URL of the corresponding document that will be shown to visitors.', max_length=1024, null=True, verbose_name='Browsable url')),
|
||||
('defaultLocation', models.CharField(blank=True, help_text='Address for each imported event', max_length=512, null=True, verbose_name='Location')),
|
||||
('defaultTags', django_better_admin_arrayfield.models.fields.ArrayField(base_field=models.CharField(max_length=64), blank=True, help_text='A list of tags that describe each imported event.', null=True, size=None, verbose_name='Tags for each imported event')),
|
||||
('defaultCategory', models.ForeignKey(blank=True, help_text='Category of each imported event', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.category', verbose_name='Category')),
|
||||
],
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='batchimportation',
|
||||
name='recurrentImport',
|
||||
field=models.ForeignKey(blank=True, editable=False, help_text='Reference to the recurrent import processing', null=True, on_delete=django.db.models.deletion.SET_NULL, to='agenda_culturel.recurrentimport', verbose_name='Recurrent import'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,21 @@
|
||||
# Generated by Django 4.2.7 on 2024-01-25 21:37
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0028_recurrentimport_batchimportation_recurrentimport'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='batchimportation',
|
||||
name='browsable_url',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='batchimportation',
|
||||
name='source',
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2024-01-26 10:15
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0029_remove_batchimportation_browsable_url_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='recurrentimport',
|
||||
name='downloader',
|
||||
field=models.CharField(choices=[('simple', 'simple'), ('chromium headless', 'Headless Chromium')], default='simple', max_length=20, verbose_name='Downloader'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2024-01-26 12:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0030_recurrentimport_downloader'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='recurrentimport',
|
||||
name='defaultPublished',
|
||||
field=models.BooleanField(default=True, help_text='Status of each imported event (published or draft)', verbose_name='Published'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 4.2.7 on 2024-02-17 08:45
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0031_recurrentimport_defaultpublished'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='defaultCategory',
|
||||
field=models.ForeignKey(default=1, help_text='Category of each imported event', on_delete=django.db.models.deletion.SET_DEFAULT, to='agenda_culturel.category', verbose_name='Category'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='recurrentimport',
|
||||
name='processor',
|
||||
field=models.CharField(choices=[('ical', 'ical'), ('icalnobusy', 'ical no busy'), ('icalnovc', 'ical no VC')], default='ical', max_length=20, verbose_name='Processor'),
|
||||
),
|
||||
]
|
||||
26
src/agenda_culturel/migrations/0033_categorisationrule.py
Normal file
26
src/agenda_culturel/migrations/0033_categorisationrule.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 4.2.7 on 2024-02-17 10:40
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0032_alter_recurrentimport_defaultcategory_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CategorisationRule',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('weight', models.IntegerField(default=0, help_text='The lower is the weight, the earlier the filter is applied', verbose_name='Weight')),
|
||||
('description_contains', models.CharField(blank=True, help_text='Text contained in the description', max_length=512, null=True, verbose_name='Contained in the description')),
|
||||
('desc_exact', models.BooleanField(default=False, help_text='If checked, the extract will be searched for in the description using the exact form (capitals, accents).', verbose_name='Exact description extract')),
|
||||
('title_contains', models.CharField(blank=True, help_text='Text contained in the event title', max_length=512, null=True, verbose_name='Contained in the title')),
|
||||
('title_exact', models.BooleanField(default=False, help_text='If checked, the extract will be searched for in the title using the exact form (capitals, accents).', verbose_name='Exact title extract')),
|
||||
('category', models.ForeignKey(help_text='Category applied to the event', on_delete=django.db.models.deletion.CASCADE, to='agenda_culturel.category', verbose_name='Category')),
|
||||
],
|
||||
),
|
||||
]
|
||||
18
src/agenda_culturel/migrations/0034_recurrentimport_name.py
Normal file
18
src/agenda_culturel/migrations/0034_recurrentimport_name.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2024-02-17 14:21
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0033_categorisationrule'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='recurrentimport',
|
||||
name='name',
|
||||
field=models.CharField(default='', help_text='Recurrent import name', max_length=512, verbose_name='Name'),
|
||||
),
|
||||
]
|
||||
18
src/agenda_culturel/migrations/0035_alter_event_location.py
Normal file
18
src/agenda_culturel/migrations/0035_alter_event_location.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.2.7 on 2024-02-17 14:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0034_recurrentimport_name'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='location',
|
||||
field=models.CharField(default='', help_text='Address of the event', max_length=512, verbose_name='Location'),
|
||||
),
|
||||
]
|
||||
25
src/agenda_culturel/migrations/0036_auto_20240331_1421.py
Normal file
25
src/agenda_culturel/migrations/0036_auto_20240331_1421.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Create groups for several tasks
|
||||
|
||||
from django.db import migrations
|
||||
from django.contrib.auth.models import Group
|
||||
|
||||
|
||||
def groups_permissions_creation(apps, schema_editor):
|
||||
user_roles = ["Automation Manager", "Q&A Manager", "Receptionist"]
|
||||
|
||||
for name in user_roles:
|
||||
Group.objects.create(name=name)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0035_alter_event_location'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(groups_permissions_creation),
|
||||
]
|
||||
@@ -0,0 +1,29 @@
|
||||
# Generated by Django 4.2.7 on 2024-03-31 16:15
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0036_auto_20240331_1421'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='batchimportation',
|
||||
options={'permissions': [('run_batchimportation', 'Can run a batch importation')]},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='categorisationrule',
|
||||
options={'permissions': [('apply_categorisationrules', 'Apply a categorisation rule')]},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='event',
|
||||
options={'permissions': [('set_duplicated_event', 'Can set an event as duplicated')], 'verbose_name': 'Event', 'verbose_name_plural': 'Events'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='recurrentimport',
|
||||
options={'permissions': [('run_recurrentimport', 'Can run a recurrent import')]},
|
||||
),
|
||||
]
|
||||
44
src/agenda_culturel/migrations/0038_auto_20240331_1815.py
Normal file
44
src/agenda_culturel/migrations/0038_auto_20240331_1815.py
Normal file
@@ -0,0 +1,44 @@
|
||||
# Generated by Django 4.2.7 on 2024-03-31 16:15
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
from django.contrib.auth.models import Group, Permission
|
||||
|
||||
|
||||
def update_groups_permissions(apps, schema_editor):
|
||||
# first add a missing role
|
||||
user_roles = ["Moderator"]
|
||||
|
||||
for name in user_roles:
|
||||
Group.objects.create(name=name)
|
||||
|
||||
all_perms = Permission.objects.all()
|
||||
|
||||
# set permissions for moderators
|
||||
moderator_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model in ['event', 'duplicatedevents']]
|
||||
Group.objects.get(name="Moderator").permissions.add(*moderator_perms)
|
||||
|
||||
read_mod_perms = [i for i in moderator_perms if i.codename.startswith('view_')]
|
||||
|
||||
# set permissions for automation managers
|
||||
automanager_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model in ['batchimportation', 'recurrentimport', 'categorisationrule']]
|
||||
Group.objects.get(name="Automation Manager").permissions.add(*automanager_perms)
|
||||
Group.objects.get(name="Automation Manager").permissions.add(*read_mod_perms)
|
||||
|
||||
# set permissions for receptionists
|
||||
receptionist_perms = [i for i in all_perms if i.content_type.app_label == 'agenda_culturel' and i.content_type.model in ['contactmessage']]
|
||||
Group.objects.get(name="Receptionist").permissions.add(*receptionist_perms)
|
||||
Group.objects.get(name="Receptionist").permissions.add(*read_mod_perms)
|
||||
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('agenda_culturel', '0037_alter_batchimportation_options_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(update_groups_permissions),
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user