Compare commits
84 Commits
Author | SHA1 | Date |
---|---|---|
Giacomo Leidi | 584186e831 | |
Giacomo Leidi | 9763ca5fbf | |
Giacomo Leidi | 904aa06629 | |
Giacomo Leidi | be715d201c | |
Giacomo Leidi | 647925acd3 | |
Giacomo Leidi | e5100d499e | |
Giacomo Leidi | 9ac1d55d02 | |
Giacomo Leidi | bf1c18f347 | |
Giacomo Leidi | e381c1b522 | |
Giacomo Leidi | 77a881980b | |
Giacomo Leidi | 5710d46874 | |
Giacomo Leidi | 9794b00cc0 | |
Giacomo Leidi | 5ebaa04f3d | |
Giacomo Leidi | 0f19cf4a9e | |
Giacomo Leidi | 8d3026523a | |
Giacomo Leidi | 1e43a4e12d | |
Giacomo Leidi | 45e1f551d8 | |
Giacomo Leidi | 056e0217aa | |
Giacomo Leidi | acce3a83fe | |
Giacomo Leidi | 775fb89cf6 | |
Giacomo Leidi | bf3170cb6f | |
Giacomo Leidi | ff7567dc1b | |
Giacomo Leidi | f16cffa44e | |
Giacomo Leidi | 7bcb374891 | |
Giacomo Leidi | b17dc556d7 | |
Giacomo Leidi | 201e259d37 | |
Giacomo Leidi | 9744f436ae | |
Giacomo Leidi | 34ebd8f982 | |
Giacomo Leidi | aaff82fe98 | |
Giacomo Leidi | 1c7e3c7ed5 | |
Giacomo Leidi | c40a7aca35 | |
Giacomo Leidi | 4757cc6ec8 | |
Giacomo Leidi | 6bd2d606df | |
Giacomo Leidi | 3874acf247 | |
Giacomo Leidi | 6b72b2630f | |
Giacomo Leidi | 28446c3401 | |
Giacomo Leidi | 1d3e5047e8 | |
Giacomo Leidi | 1c9a95db84 | |
Giacomo Leidi | 1678e836c9 | |
Giacomo Leidi | a445eedaea | |
Simone Robutti | 370e00d187 | |
Simone Robutti | ddc706e201 | |
Giacomo Leidi | 081ca87857 | |
Giacomo Leidi | cf9ffd2149 | |
Giacomo Leidi | 04f29e37e4 | |
Simone Robutti | 44340fde8f | |
Simone Robutti | 63a30bb483 | |
Giacomo Leidi | 2d328a30bf | |
Giacomo Leidi | 4ce6c7b171 | |
Simone Robutti | b6e203577e | |
Simone Robutti | 9d71ef36b9 | |
Giacomo Leidi | cf2fabefb4 | |
Giacomo Leidi | 9810c9d5a5 | |
Giacomo Leidi | b66c94c8a2 | |
Giacomo Leidi | 7f3ce9a55a | |
Giacomo Leidi | a9b90be963 | |
Simone Robutti | 94c85d8b48 | |
Giacomo Leidi | 4e40a1979e | |
Giacomo Leidi | 5f51d68c82 | |
Giacomo Leidi | 05a8c9d5b0 | |
Giacomo Leidi | 981fcf0486 | |
Giacomo Leidi | 002399161d | |
Simone Robutti | 3e7b9097a4 | |
Giacomo Leidi | af45f2b5ea | |
Giacomo Leidi | f8a614750c | |
Giacomo Leidi | e558034194 | |
Simone Robutti | 1217b17326 | |
Simone Robutti | 8b81ceedd0 | |
Simone Robutti | dae9dfd889 | |
Simone Robutti | cb0fe7b5fc | |
Simone Robutti | f04942eefe | |
Simone Robutti | 420f823dd4 | |
Giacomo Leidi | 9c77afa456 | |
Giacomo Leidi | db659e9cdc | |
Giacomo Leidi | ca878454b4 | |
Giacomo Leidi | 5afbcd2192 | |
magowiz | 8ff6555c60 | |
Giacomo Leidi | 529f83825e | |
Simone Robutti | bda4b8ee0d | |
Giacomo Leidi | e003cf9a90 | |
Giacomo Leidi | d19f3ac5ca | |
Simone Robutti | b0e88a9e1f | |
Simone Robutti | 11de1e1213 | |
Giacomo Leidi | 9c44c8d730 |
14
.envrc
14
.envrc
|
@ -1,10 +1,10 @@
|
|||
if command -v guix; then
|
||||
if has guix; then
|
||||
GUIX_PROFILE="${PWD}/.guix-root"
|
||||
rm -f "$GUIX_PROFILE"
|
||||
eval "$(guix time-machine -C channels-lock.scm -- shell -r "$GUIX_PROFILE" -D -f guix.scm -m manifest.scm --search-paths -L .)"
|
||||
eval "$(guix time-machine -C channels-lock.scm -- shell -r "$GUIX_PROFILE" -D -f guix.scm -m manifest.scm --search-paths)"
|
||||
|
||||
# Add development scripts to PATH
|
||||
export PATH="$(pwd)/scripts:${PATH}"
|
||||
PATH_add "$(pwd)/scripts"
|
||||
|
||||
venv_dir=".venv"
|
||||
|
||||
|
@ -13,7 +13,7 @@ if command -v guix; then
|
|||
pre-commit uninstall
|
||||
fi
|
||||
if [ ! -d "$venv_dir" ] ; then
|
||||
virtualenv -p `which python3.9` "$venv_dir"
|
||||
virtualenv -p `which python3` "$venv_dir"
|
||||
poetry install
|
||||
pre-commit install
|
||||
fi
|
||||
|
@ -21,14 +21,8 @@ if command -v guix; then
|
|||
clear
|
||||
git-cal --author="$(git config user.name)"
|
||||
|
||||
run-tests () {
|
||||
run_pipeline_tests.sh
|
||||
}
|
||||
export_function run-tests
|
||||
cat << EOF
|
||||
|
||||
run-tests Runs pytest in the current directory
|
||||
|
||||
The 'scripts' directory has been added to your PATH: you can now invoke scripts without typing the relative path.
|
||||
|
||||
EOF
|
||||
|
|
|
@ -4,49 +4,68 @@ name: CI
|
|||
|
||||
# Controls when the workflow will run
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'guix.scm'
|
||||
- 'manifest.scm'
|
||||
- 'channels-lock.scm'
|
||||
- '.envrc'
|
||||
- '.gitignore'
|
||||
- 'pre-commit-*.yaml'
|
||||
- Dockerfile
|
||||
- README.*
|
||||
- LICENSE
|
||||
- 'sample_settings/**'
|
||||
- 'etc/**'
|
||||
|
||||
push:
|
||||
# Sequence of patterns matched against refs/tags
|
||||
branches: ["master"]
|
||||
|
||||
paths-ignore:
|
||||
- 'guix.scm'
|
||||
- 'manifest.scm'
|
||||
- 'channels-lock.scm'
|
||||
- '.envrc'
|
||||
- '.gitignore'
|
||||
- 'pre-commit-*.yaml'
|
||||
- Dockerfile
|
||||
- README.*
|
||||
- LICENSE
|
||||
- 'sample_settings/**'
|
||||
- 'etc/**'
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||
jobs:
|
||||
# This workflow contains a single job called "build"
|
||||
run-tests-dev:
|
||||
# The type of runner that the job will run on
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11"]
|
||||
poetry-version: ["1.1.12", "1.7.0"]
|
||||
os: [ubuntu-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# Runs a single command using the runners shell
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Run image
|
||||
uses: abatilo/actions-poetry@v2
|
||||
with:
|
||||
poetry-version: ${{ matrix.poetry-version }}
|
||||
- name: Setup a local virtual environment
|
||||
run: |
|
||||
poetry config virtualenvs.create true --local
|
||||
poetry config virtualenvs.in-project true --local
|
||||
- uses: actions/cache@v3
|
||||
name: Define a cache for the virtual environment based on the dependencies lock file
|
||||
with:
|
||||
path: ./.venv
|
||||
key: venv-${{ hashFiles('poetry.lock') }}
|
||||
- name: Install dependencies
|
||||
run: scripts/install_github_actions_dev_dependencies.sh
|
||||
- name: Run tests in dev env
|
||||
run: scripts/run_pipeline_tests.sh
|
||||
|
||||
run-tests-preprod:
|
||||
# The type of runner that the job will run on
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# Runs a single command using the runners shell
|
||||
- name: Install GNU Guix
|
||||
uses: PromyLOPh/guix-install-action@v1
|
||||
|
||||
# Runs a set of commands using the runners shell
|
||||
- name: Run tests in preprod env
|
||||
run: guix time-machine -C channels-lock.scm -- build -L . mobilizon-reshare.git
|
||||
|
|
|
@ -27,11 +27,11 @@ jobs:
|
|||
|
||||
# Runs a single command using the runners shell
|
||||
- name: Install GNU Guix
|
||||
uses: PromyLOPh/guix-install-action@v1
|
||||
uses: PromyLOPh/guix-install-action@v1.4
|
||||
|
||||
# Runs a set of commands using the runners shell
|
||||
- name: Build image
|
||||
run: scripts/build_docker_image.sh
|
||||
run: scripts/build_docker_image.sh -r
|
||||
- name: Upload pack (Docker)
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
|
@ -59,9 +59,9 @@ jobs:
|
|||
uses: fishinthecalculator/publish-docker-image-action@v0.1.10
|
||||
env:
|
||||
IMAGE_TAG: ${{ steps.vars.outputs.tag }}
|
||||
IMAGE_NAME_TAG: mobilizon-reshare-scheduler:latest
|
||||
IMAGE_NAME_TAG: mobilizon-reshare-scheduler-python:latest
|
||||
with:
|
||||
name: fishinthecalculator/mobilizon-reshare
|
||||
name: twcita/mobilizon-reshare
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
image: docker-image.tar.gz
|
||||
|
|
|
@ -177,10 +177,16 @@ fabric.properties
|
|||
.idea
|
||||
*/local_testing.toml
|
||||
.direnv/
|
||||
etc/
|
||||
var/
|
||||
docker-image.tar.gz
|
||||
.guix-root
|
||||
|
||||
# test run script
|
||||
test_run.sh
|
||||
|
||||
# directory where sphinx documents resides
|
||||
api_documentation/source/*
|
||||
!api_documentation/source/conf.py
|
||||
!api_documentation/source/index.rst
|
||||
!api_documentation/source/_static/
|
||||
./settings.toml
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="118.9" height="20"><linearGradient id="smooth" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="round"><rect width="118.9" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#round)"><rect width="56.2" height="20" fill="#555"/><rect x="56.2" width="62.7" height="20" fill="#007ec6"/><rect width="118.9" height="20" fill="url(#smooth)"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"><text x="291.0" y="150" fill="#010101" fill-opacity=".3" transform="scale(0.1)" textLength="462.0" lengthAdjust="spacing">LICENSE</text><text x="291.0" y="140" transform="scale(0.1)" textLength="462.0" lengthAdjust="spacing">LICENSE</text><text x="865.5000000000001" y="150" fill="#010101" fill-opacity=".3" transform="scale(0.1)" textLength="527.0" lengthAdjust="spacing">Coopyleft</text><text x="865.5000000000001" y="140" transform="scale(0.1)" textLength="527.0" lengthAdjust="spacing">Coopyleft</text><a xlink:href="https://github.com/Tech-Workers-Coalition-Italia/mobilizon-reshare/blob/master/LICENSE"><rect width="56.2" height="20" fill="rgba(0,0,0,0)"/></a><a xlink:href="https://github.com/Tech-Workers-Coalition-Italia/mobilizon-reshare/blob/master/LICENSE"><rect x="56.2" width="62.7" height="20" fill="rgba(0,0,0,0)"/></a></g></svg>
|
After Width: | Height: | Size: 1.4 KiB |
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="71.6" height="20"><linearGradient id="smooth" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="round"><rect width="71.6" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#round)"><rect width="33.6" height="20" fill="#555"/><rect x="33.6" width="38.0" height="20" fill="#007ec6"/><rect width="71.6" height="20" fill="url(#smooth)"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"><text x="178.0" y="150" fill="#010101" fill-opacity=".3" transform="scale(0.1)" textLength="236.0" lengthAdjust="spacing">pypi</text><text x="178.0" y="140" transform="scale(0.1)" textLength="236.0" lengthAdjust="spacing">pypi</text><text x="516.0" y="150" fill="#010101" fill-opacity=".3" transform="scale(0.1)" textLength="280.0" lengthAdjust="spacing">0.3.6</text><text x="516.0" y="140" transform="scale(0.1)" textLength="280.0" lengthAdjust="spacing">0.3.6</text><a xlink:href="https://pypi.org/project/mobilizon-reshare/"><rect width="33.6" height="20" fill="rgba(0,0,0,0)"/></a><a xlink:href="https://pypi.org/project/mobilizon-reshare/"><rect x="33.6" width="38.0" height="20" fill="rgba(0,0,0,0)"/></a></g></svg>
|
After Width: | Height: | Size: 1.3 KiB |
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="131.5" height="20"><linearGradient id="smooth" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="round"><rect width="131.5" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#round)"><rect width="65.5" height="20" fill="#555"/><rect x="65.5" width="66.0" height="20" fill="#007ec6"/><rect width="131.5" height="20" fill="url(#smooth)"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="110"><image x="5" y="3" width="14" height="14" xlink:href="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxMDAgMTAwIj4KICA8ZGVmcz4KICAgIDxsaW5lYXJHcmFkaWVudCBpZD0icHlZZWxsb3ciIGdyYWRpZW50VHJhbnNmb3JtPSJyb3RhdGUoNDUpIj4KICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iI2ZlNSIgb2Zmc2V0PSIwLjYiLz4KICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iI2RhMSIgb2Zmc2V0PSIxIi8+CiAgICA8L2xpbmVhckdyYWRpZW50PgogICAgPGxpbmVhckdyYWRpZW50IGlkPSJweUJsdWUiIGdyYWRpZW50VHJhbnNmb3JtPSJyb3RhdGUoNDUpIj4KICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iIzY5ZiIgb2Zmc2V0PSIwLjQiLz4KICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iIzQ2OCIgb2Zmc2V0PSIxIi8+CiAgICA8L2xpbmVhckdyYWRpZW50PgogIDwvZGVmcz4KCiAgPHBhdGggZD0iTTI3LDE2YzAtNyw5LTEzLDI0LTEzYzE1LDAsMjMsNiwyMywxM2wwLDIyYzAsNy01LDEyLTExLDEybC0yNCwwYy04LDAtMTQsNi0xNCwxNWwwLDEwbC05LDBjLTgsMC0xMy05LTEzLTI0YzAtMTQsNS0yMywxMy0yM2wzNSwwbDAtM2wtMjQsMGwwLTlsMCwweiBNODgsNTB2MSIgZmlsbD0idXJsKCNweUJsdWUpIi8+CiAgPHBhdGggZD0iTTc0LDg3YzAsNy04LDEzLTIzLDEzYy0xNSwwLTI0LTYtMjQtMTNsMC0yMmMwLTcsNi0xMiwxMi0xMmwyNCwwYzgsMCwxNC03LDE0LTE1bDAtMTBsOSwwYzcsMCwxMyw5LDEzLDIzYzAsMTUtNiwyNC0xMywyNGwtMzUsMGwwLDNsMjMsMGwwLDlsMCwweiBNMTQwLDUwdjEiIGZpbGw9InVybCgjcHlZZWxsb3cpIi8+CgogIDxjaXJjbGUgcj0iNCIgY3g9IjY0IiBjeT0iODgiIGZpbGw9IiNGRkYiLz4KICA8Y2lyY2xlIHI9IjQiIGN4PSIzNyIgY3k9IjE1IiBmaWxsPSIjRkZGIi8+Cjwvc3ZnPgo="/><text x="422.5" y="150" fill="#010101" fill-opacity=".3" transform="scale(0.1)" textLength="385.0" lengthAdjust="spacing">python</text><text x="422.5" y="140" transform="scale(0.1)" textLength="385.0" lengthAdjust="spacing">python</text><text x="975.0" y="150" fill="#010101" fill-opacity=".3" transform="scale(0.1)" textLength="560.0" lengthAdjust="spacing">3.10, 3.11</text><text x="975.0" y="140" transform="scale(0.1)" textLength="560.0" lengthAdjust="spacing">3.10, 3.11</text><a xlink:href="https://www.python.org/"><rect width="65.5" height="20" fill="rgba(0,0,0,0)"/></a><a xlink:href="https://www.python.org/"><rect x="65.5" width="66.0" height="20" fill="rgba(0,0,0,0)"/></a></g></svg>
|
After Width: | Height: | Size: 2.6 KiB |
|
@ -3,7 +3,7 @@ repos:
|
|||
rev: stable
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3.9
|
||||
language_version: python3.10
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v1.2.3
|
||||
hooks:
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
FROM python:3.10-alpine3.16
|
||||
|
||||
ENV ENV_FOR_DYNACONF=${ENV_FOR_DYNACONF} \
|
||||
PYTHONFAULTHANDLER=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PYTHONHASHSEED=random \
|
||||
PIP_NO_CACHE_DIR=off \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=on \
|
||||
PIP_DEFAULT_TIMEOUT=100 \
|
||||
POETRY_VERSION=1.0.0
|
||||
|
||||
# System deps:
|
||||
RUN pip install "poetry==$POETRY_VERSION"
|
||||
|
||||
# Copy only requirements to cache them in docker layer
|
||||
WORKDIR /app
|
||||
COPY poetry.lock pyproject.toml /app/
|
||||
|
||||
# Project initialization:
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry install $(test "$ENV_FOR_DYNACONF" == production && echo "--no-dev") --no-interaction --no-ansi
|
||||
|
||||
# Creating folders, and files for a project:
|
||||
COPY . /app
|
14
README.md
14
README.md
|
@ -1,4 +1,7 @@
|
|||
[![CI](https://github.com/Tech-Workers-Coalition-Italia/mobilizon-reshare/actions/workflows/main.yml/badge.svg?branch=master)](https://github.com/Tech-Workers-Coalition-Italia/mobilizon-reshare/actions/workflows/main.yml)
|
||||
[![Python versions](https://raw.githubusercontent.com/Tech-Workers-Coalition-Italia/mobilizon-reshare/master/.img/python.svg)](https://python.org)
|
||||
[![PyPI version](https://raw.githubusercontent.com/Tech-Workers-Coalition-Italia/mobilizon-reshare/master/.img/pypi.svg)](https://pypi.org/project/mobilizon-reshare/)
|
||||
[![License](https://raw.githubusercontent.com/Tech-Workers-Coalition-Italia/mobilizon-reshare/master/.img/license.svg)](https://github.com/Tech-Workers-Coalition-Italia/mobilizon-reshare/blob/master/LICENSE)
|
||||
|
||||
The goal of `mobilizon_reshare` is to provide a suite to reshare Mobilizon events on a broad selection of platforms. This
|
||||
tool enables an organization to automate their social media strategy in regards
|
||||
|
@ -24,7 +27,7 @@ to publish an event, with the minimization of human effort as its first priority
|
|||
|
||||
## Installation
|
||||
|
||||
`mobilizon_reshare` is distributed through Pypi and [DockerHub](https://hub.docker.com/r/fishinthecalculator/mobilizon-reshare). Use
|
||||
`mobilizon_reshare` is distributed through [Pypi](https://pypi.org/project/mobilizon-reshare/) and [DockerHub](https://hub.docker.com/r/twcita/mobilizon-reshare). Use
|
||||
|
||||
```shell
|
||||
$ pip install mobilizon-reshare
|
||||
|
@ -37,13 +40,12 @@ commands and their description.
|
|||
|
||||
### Guix package
|
||||
|
||||
If you run the Guix System you can install `mobilizon_reshare` by running:
|
||||
If you run Guix you can install `mobilizon-reshare` by adding our [Guix channel](https://git.sr.ht/~fishinthecalculator/mobilizon-reshare-guix#configure) to your `.config/guix/channels.scm`.
|
||||
|
||||
``` shell
|
||||
$ guix install -L . mobilizon-reshare.git
|
||||
```
|
||||
|
||||
To use the same dependencies used in CI env:
|
||||
|
||||
|
||||
To run `mobilizon-reshare` from master you can run the following command from the root of the repository:
|
||||
|
||||
``` shell
|
||||
$ guix time-machine -C channels-lock.scm -- install -L . mobilizon-reshare.git
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = source
|
||||
BUILDDIR = build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
@ -0,0 +1,54 @@
|
|||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# This file only contains a selection of the most common options. For a full
|
||||
# list see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# -- Path setup --------------------------------------------------------------
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#
|
||||
# import os
|
||||
# import sys
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
project = "mobilizon-reshare"
|
||||
copyright = "2022, -"
|
||||
author = "-"
|
||||
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = ["sphinxcontrib.napoleon", "sphinx_autodoc_typehints"]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This pattern also affects html_static_path and html_extra_path.
|
||||
exclude_patterns = []
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = "sphinx_material"
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# disable full module path in methods list
|
||||
add_module_names = False
|
|
@ -0,0 +1,20 @@
|
|||
.. mobilizon-reshare documentation master file, created by
|
||||
sphinx-quickstart on Mon Feb 21 12:57:17 2022.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to mobilizon-reshare's documentation!
|
||||
=============================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
|
@ -1,13 +1,18 @@
|
|||
(use-modules (guix channels))
|
||||
(define-module (channels-lock)
|
||||
#:use-module (guix channels))
|
||||
|
||||
(list
|
||||
(channel
|
||||
(name 'mobilizon-reshare)
|
||||
(url "https://git.sr.ht/~fishinthecalculator/mobilizon-reshare-guix")
|
||||
(branch "main"))
|
||||
(channel
|
||||
(name 'guix)
|
||||
(url "https://git.savannah.gnu.org/git/guix.git")
|
||||
(commit
|
||||
"1121fa432f0e6422d5f9ebb96fb0014c4d5231f5")
|
||||
"b7eb1a8116b2caee7acf26fb963ae998fbdb4253")
|
||||
(introduction
|
||||
(make-channel-introduction
|
||||
"9edb3f66fd807b096b48283debdcddccfea34bad"
|
||||
"afb9f2752315f131e4ddd44eba02eed403365085"
|
||||
(openpgp-fingerprint
|
||||
"BBB0 2DDF 2CEA F6A8 0D1D E643 A2A0 6DF2 A33A 54FA")))))
|
||||
|
|
|
@ -25,3 +25,25 @@ To run the test suite, run `scripts/run_pipeline_tests.sh` from the root of the
|
|||
|
||||
At the moment no integration test is present and they are executed manually. Reach out to us if you want to
|
||||
access the testing environment or you want to help automate the integration tests.
|
||||
|
||||
### How to handle migrations
|
||||
|
||||
Changes to the data model need to be handled through migrations. We use aerich to manage the migration files.
|
||||
Both our CLI and our web service are configured in such a way that migrations are run transparently when the package is
|
||||
updated. If you want to test that the update doesn't corrupt your data, we suggest trying the update in a test database.
|
||||
|
||||
To create a new migration file, use aerich CLI. It will take care of generating the file. If further code is necessary,
|
||||
add it to the new migration file.
|
||||
|
||||
Since we support two database (sqlite and postgres) that have slightly different dialects and since aerich doesn't
|
||||
really support this scenario, it is necessary to generate migrations separately and place the migrations files in the
|
||||
respective folders.
|
||||
|
||||
Aerich picks up the migrations according to the scheme of the db in the configuration.
|
||||
|
||||
Currently the consistency of the migrations for the different databases is not tested so please pay extra care when
|
||||
committing a change and request special review.
|
||||
|
||||
Aerich configuration is specified in the pyproject.toml file. Since it doesn't support multiple databases, we have two
|
||||
configuration files that allow to run aerich on different databases if you enter their respective migration folders.
|
||||
You can find them in mobilizon_reshare/migrations.
|
|
@ -0,0 +1,76 @@
|
|||
# Beating dependency hell with GNU Guix
|
||||
|
||||
`mobilizon-reshare`'s distribution process relies quite a bit upon GNU Guix. It's involved in our CI pipeline, and it builds the [OCI compliant](https://opencontainers.org/) container image available on Docker Hub. It provides us with [inspectable](https://hpc.guix.info/blog/2021/10/when-docker-images-become-fixed-point/), [bit-for-bit reproducible](https://reproducible-builds.org/) and [fully bootstrappable](https://bootstrappable.org) images which in turns allows for strong control on what code is actually bundled within the image and it should prevent entire classes of supply-chain attacks starting from the [Trusting Trust attack](https://www.cs.cmu.edu/~rdriley/487/papers/Thompson_1984_ReflectionsonTrustingTrust.pdf) up until the many recent [attacks](https://www.sonatype.com/resources/state-of-the-software-supply-chain-2021) to many FOSS software registries.
|
||||
|
||||
To allow for interoperability with the Python ecosystem, we also ship a `pyproject.toml` that we handle with [Poetry](https://python-poetry.org/). The next paragraph will elaborate on the interactions between Poetry and Guix.
|
||||
|
||||
## Update the dependency graph of mobilizon-reshare
|
||||
|
||||
> **Beware!** - Dependency updates are better delivered to master as a single commit, to avoid confusing the CI.
|
||||
|
||||
### Python dependencies
|
||||
|
||||
We **must** keep Poetry and Guix version as much aligned as possible, to prevent unpredictable behavior. All the following content assumes this invariant.
|
||||
|
||||
Everything starts from `pyproject.toml`: usually your IDE warns you about outdated dependency, so let's assume you want to bump the version of a Python package. First keep in mind that Poetry's [tilde requirements](https://python-poetry.org/docs/dependency-specification/#tilde-requirements) are SemVer compatible but stricter than caret requirements so they should make matching Guix version easier. Then it's time to actually edit `pyproject.toml` and bump the version of a package.
|
||||
|
||||
To update Python dependencies and test your changes the steps are:
|
||||
|
||||
```shell
|
||||
$ poetry update
|
||||
$ guix time-machine -C channels-lock.scm -- build -f guix.scm
|
||||
$ scripts/build_docker_image.sh
|
||||
```
|
||||
|
||||
If these steps succeed you can safely commit your changes. If Guix fails you have to examine the output of the command that failed and figure out the problem. 99% of the times it'll be a version mismatch, as Guix's [`python-build-system`](https://guix.gnu.org/en/manual/devel/en/guix.html#index-python_002dbuild_002dsystem) has a `sanity-check` phase that'll try to instantiate the entry point generated by Poetry that, among other things, checks for runtime dependencies versions and errors out if it finds a mismatch between the version actually available in the runtime environment and the version defined in `pyproject.toml`.
|
||||
|
||||
You now have two alternatives:
|
||||
|
||||
1. You try to follow the next step about system dependencies. `channels-lock.scm` locks everything: as long as the Guix commit specified in that file does not change, `guix time-machine` will look for the exact same package graph. This means that every time we build the image we get the same exact dependencies we ask for, but this semantics is slightly different from Poetry's lock-file which instead tracks the **latest version** (within the constraints) available on Pypi. Having a more updated Guix version may allow for a more updated mapping of Pypi.
|
||||
2. You find the package (or packages) responsible for the mismatch and try to manipulate it to follow Poetry's constraints. This requires some basic Scheme understanding but nothing complex. There are many ways a Guix package can be programmatically manipulated as it's just a structured Scheme record, you can start by looking into [package variants](https://guix.gnu.org/en/manual/devel/en/guix.html#Defining-Package-Variants) or also directly at the [channel code](https://github.com/fishinthecalculator/mobilizon-reshare-guix/tree/main/modules/mobilizon-reshare).
|
||||
|
||||
### System dependencies
|
||||
|
||||
Python's own dependencies are dependencies too! Guix freezes the whole dependency graph of an artifact with [channels specifications](https://guix.gnu.org/en/manual/devel/en/guix.html#Replicating-Guix) so to update "system" dependencies you need to follow these steps.
|
||||
|
||||
First let's update our Guix version to the latest commit:
|
||||
|
||||
```shell
|
||||
$ guix pull
|
||||
Updating channel 'guix' from Git repository at 'https://git.savannah.gnu.org/git/guix.git'...
|
||||
Authenticating channel 'guix', commits 9edb3f6 to d41c82b (162 new commits)...
|
||||
Building from these channels:
|
||||
guix https://git.savannah.gnu.org/git/guix.git d41c82b
|
||||
substitute: updating substitutes from 'https://ci.guix.gnu.org'... 100.0%\
|
||||
|
||||
[...]
|
||||
|
||||
building package cache...
|
||||
building profile with 3 packages...
|
||||
$
|
||||
```
|
||||
|
||||
Channels specification define the Guix commit that should be used to fetch the right dependency graph, so what we want to do is replace the commit in `channels-lock.scm` with the one we just pulled:
|
||||
|
||||
```shell
|
||||
$ guix describe
|
||||
Generation 31 Mar 12 2022 12:35:00 (current)
|
||||
guix d41c82b
|
||||
repository URL: https://git.savannah.gnu.org/git/guix.git
|
||||
branch: master
|
||||
commit: d41c82b481fd0f5c7d45d6e2629fdf9d2085205b
|
||||
|
||||
$ vim channels-lock.scm
|
||||
```
|
||||
|
||||
To test our change we can run:
|
||||
|
||||
```shell
|
||||
$ guix time-machine -C channels-lock.scm -- build -f guix.scm
|
||||
```
|
||||
|
||||
But a better test would be to build the Docker image, as that actually bundles all required runtime dependencies:
|
||||
|
||||
```shell
|
||||
$ scripts/build_docker_image.sh
|
||||
```
|
|
@ -18,29 +18,32 @@ $
|
|||
|
||||
Hurray 🎉 ! Now you can hack on `mobilizon-reshare` without worrying about dependencies.
|
||||
|
||||
## Installing Guix
|
||||
## Guix
|
||||
## Installation
|
||||
|
||||
*Caveat:* Guix currently runs only on Linux, if you run a different OS you're probably better off with something like [poetry](https://python-poetry.org/). Just beware that you may end up with slightly different behavior, since `poetry` only locks Python dependencies.
|
||||
|
||||
### Debian Bullseye
|
||||
#### Debian/Ubuntu/Linx Mint and derivatives
|
||||
|
||||
If you run Debian Bullseye (or one of its derivatives) installing Guix is achieved with:
|
||||
If you are running Debian or a derivative such as Ubuntu or Linux Mint installing Guix is achieved with:
|
||||
|
||||
```shell
|
||||
$ sudo apt install guix
|
||||
```
|
||||
|
||||
If you want to find out if your distribution is a derivative of Debian Bullseye you can run:
|
||||
#### openSUSE
|
||||
|
||||
Likewise on openSUSE:
|
||||
|
||||
```shell
|
||||
$ sudo cat /etc/debian_release
|
||||
$ sudo zypper install guix
|
||||
```
|
||||
|
||||
### Arch Linux
|
||||
#### Arch Linux
|
||||
|
||||
The Arch Wiki has a very good [article](https://wiki.archlinux.org/title/Guix).
|
||||
|
||||
### Other distributions
|
||||
#### Other distributions
|
||||
|
||||
For every other distributions you can install Guix with the installer script. It will guide you through the process of installing Guix.
|
||||
|
||||
|
@ -48,9 +51,9 @@ For every other distributions you can install Guix with the installer script. It
|
|||
$ curl https://git.savannah.gnu.org/cgit/guix.git/plain/etc/guix-install.sh | sudo bash
|
||||
```
|
||||
|
||||
Beware that piping to `sudo bash` is usually a *very* bad idea. Before running the above command please read the script and the Guix manual.
|
||||
Beware that piping to `sudo bash` is usually a *very* bad idea. Before running the above command please read the script and the [Guix manual](https://guix.gnu.org/en/manual/en/guix.html#Binary-Installation).
|
||||
|
||||
## Configuring Guix
|
||||
### Configuring Guix
|
||||
|
||||
To make Guix applications work out of the box you should add the following variables to your `.bash_profile` (or its equivalent for shells other than Bash):
|
||||
|
||||
|
@ -77,7 +80,9 @@ $ guix install nss-certs
|
|||
$ sudo -i guix install glibc-locales
|
||||
```
|
||||
|
||||
## Installing direnv
|
||||
## direnv
|
||||
|
||||
### Installation
|
||||
|
||||
Once you have Guix properly setup, you can install `direnv` with:
|
||||
|
||||
|
@ -115,4 +120,4 @@ Alternately, see `guix package --search-paths'.
|
|||
$
|
||||
```
|
||||
|
||||
when you see a message like that you can either run it to make the current shell work with the new packages installed by Guix or just close the current shell and spawn another, this way it'll put Guix packages in the right plache in your `PATH`.
|
||||
when you see a message like that you can either run it to make the current shell work with the new packages installed by Guix or just close the current shell and spawn another, this way it'll put Guix packages in the right place in your `PATH`.
|
||||
|
|
|
@ -58,9 +58,11 @@ A Publisher is responsible publishing an event or a message on a given platform.
|
|||
|
||||
Currently the following publishers are supported:
|
||||
|
||||
* Telegram
|
||||
* Zulip
|
||||
* Twitter
|
||||
- Facebook
|
||||
- Mastodon
|
||||
- Twitter
|
||||
- Telegram
|
||||
- Zulip
|
||||
|
||||
### Notifier
|
||||
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
version: "3.7"
|
||||
services:
|
||||
db:
|
||||
image: postgres:13
|
||||
env_file:
|
||||
- ./.env
|
||||
healthcheck:
|
||||
test: ["CMD", "pg_isready", "-U", "mobilizon_reshare"]
|
||||
interval: 5s
|
||||
retries: 5
|
||||
ports:
|
||||
- 5432:5432
|
|
@ -0,0 +1,34 @@
|
|||
version: "3.7"
|
||||
services:
|
||||
db:
|
||||
image: postgres:13
|
||||
env_file:
|
||||
- ./.env
|
||||
volumes:
|
||||
- postgres-db-volume:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD", "pg_isready", "-U", "mobilizon_reshare"]
|
||||
interval: 5s
|
||||
retries: 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
web:
|
||||
build: .
|
||||
command: poetry run mobilizon-reshare web
|
||||
#command: sh
|
||||
environment:
|
||||
SECRETS_FOR_DYNACONF: /app/.secrets.toml
|
||||
SETTINGS_FILE_FOR_DYNACONF: /app/settings.toml
|
||||
ENV_FOR_DYNACONF: development
|
||||
volumes:
|
||||
- ./sample_settings/docker_web/.sample_secrets.toml:/app/.secrets.toml
|
||||
- ./sample_settings/docker_web/settings.toml:/app/settings.toml
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
- postgres-db-volume:/var/lib/postgresql
|
||||
- ./:/app
|
||||
ports:
|
||||
- 8000:8000
|
||||
|
||||
volumes:
|
||||
postgres-db-volume:
|
|
@ -1,14 +1,14 @@
|
|||
version: "3.7"
|
||||
services:
|
||||
mobilizon-reshare:
|
||||
image: fishinthecalculator/mobilizon-reshare:latest
|
||||
image: twcita/mobilizon-reshare:v0.3.6
|
||||
environment:
|
||||
SECRETS_FOR_DYNACONF: /etc/xdg/mobilizon-reshare/0.2.0/.secrets.toml
|
||||
SECRETS_FOR_DYNACONF: /etc/xdg/mobilizon-reshare/0.3.6/.secrets.toml
|
||||
ENV_FOR_DYNACONF: production
|
||||
MOBILIZON_RESHARE_INTERVAL: "*/15 10-18 * * 1-5"
|
||||
MOBILIZON_RESHARE_INTERVAL: "*/15 10-18 * * 0-4"
|
||||
volumes:
|
||||
- ./.secrets.toml:/etc/xdg/mobilizon-reshare/0.2.0/.secrets.toml:ro
|
||||
- ./mobilizon_reshare.toml:/etc/xdg/mobilizon-reshare/0.2.0/mobilizon_reshare.toml:ro
|
||||
- ./.secrets.toml:/etc/xdg/mobilizon-reshare/0.3.6/.secrets.toml:ro
|
||||
- ./mobilizon_reshare.toml:/etc/xdg/mobilizon-reshare/0.3.6/mobilizon_reshare.toml:ro
|
||||
- ./var:/var/lib/mobilizon-reshare
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
(define-module (docker image)
|
||||
#:use-module (guix build-system python)
|
||||
#:use-module (guix gexp) ;; for #$ and #~
|
||||
#:use-module (guix packages)
|
||||
#:use-module (docker mobilizon-reshare) ;; for mobilizon-reshare.git
|
||||
#:use-module (gnu packages python))
|
||||
|
||||
(define-public mobilizon-reshare-scheduler
|
||||
(package (inherit mobilizon-reshare.git)
|
||||
(name "mobilizon-reshare-scheduler")
|
||||
(build-system python-build-system)
|
||||
(arguments
|
||||
`(#:phases
|
||||
(modify-phases %standard-phases
|
||||
(delete 'configure)
|
||||
(delete 'build)
|
||||
(delete 'check)
|
||||
(replace 'install
|
||||
(lambda* (#:key outputs #:allow-other-keys)
|
||||
(let ((bin (string-append (assoc-ref outputs "out")
|
||||
"/bin")))
|
||||
(mkdir-p bin)
|
||||
(install-file "scripts/scheduler.py" bin)))))))
|
||||
(propagated-inputs (list mobilizon-reshare.git
|
||||
python-apscheduler-for-telegram-bot))
|
||||
(synopsis "Mobilizon Reshare's scheduler")
|
||||
(description "This script is intended to start a scheduler
|
||||
running @code{mobilizon-reshare}.")))
|
|
@ -1,384 +0,0 @@
|
|||
(define-module (docker mobilizon-reshare)
|
||||
#:use-module (guix download)
|
||||
#:use-module (guix gexp)
|
||||
#:use-module (guix git-download)
|
||||
#:use-module (guix packages)
|
||||
#:use-module (guix utils)
|
||||
#:use-module ((guix licenses) #:prefix license:)
|
||||
#:use-module (guix build-system python)
|
||||
#:use-module (gnu packages check)
|
||||
#:use-module (gnu packages databases)
|
||||
#:use-module (gnu packages markup)
|
||||
#:use-module (gnu packages openstack)
|
||||
#:use-module (gnu packages python-build)
|
||||
#:use-module (gnu packages python-check)
|
||||
#:use-module (gnu packages python-crypto)
|
||||
#:use-module (gnu packages python-web)
|
||||
#:use-module (gnu packages python-xyz)
|
||||
#:use-module (gnu packages qt)
|
||||
#:use-module (gnu packages time)
|
||||
#:use-module (ice-9 popen)
|
||||
#:use-module (ice-9 rdelim)
|
||||
#:use-module (srfi srfi-1))
|
||||
|
||||
(define %source-dir (getcwd))
|
||||
|
||||
(define coopyleft
|
||||
(let ((license (@@ (guix licenses) license)))
|
||||
(license "Coopyleft"
|
||||
"https://wiki.coopcycle.org/en:license"
|
||||
"Coopyleft License")))
|
||||
|
||||
(define-public python-tweepy
|
||||
(package
|
||||
(name "python-tweepy")
|
||||
(version "4.1.0")
|
||||
(source
|
||||
(origin
|
||||
(method git-fetch)
|
||||
(uri
|
||||
(git-reference
|
||||
(url "https://github.com/tweepy/tweepy")
|
||||
(commit (string-append "v" version))))
|
||||
(file-name (git-file-name name version))
|
||||
(sha256
|
||||
(base32
|
||||
"1c0paxc38i5jq8i20f9xwv966sap4nnhgnbdxg3611pllnzg5wdv"))))
|
||||
(build-system python-build-system)
|
||||
(arguments
|
||||
`(#:phases
|
||||
(modify-phases %standard-phases
|
||||
(replace 'check
|
||||
(lambda* (#:key tests? #:allow-other-keys)
|
||||
(when tests?
|
||||
(invoke "python" "-m" "unittest")))))))
|
||||
(propagated-inputs
|
||||
(list python-aiohttp python-requests python-requests-oauthlib))
|
||||
(native-inputs
|
||||
(list python-coveralls python-tox python-vcrpy))
|
||||
(home-page "https://www.tweepy.org/")
|
||||
(synopsis "Twitter library for Python")
|
||||
(description "Twitter library for Python")
|
||||
(license license:expat)))
|
||||
|
||||
(define-public python-facebook-sdk
|
||||
(package
|
||||
(name "python-facebook-sdk")
|
||||
(version "3.1.0")
|
||||
(source
|
||||
(origin
|
||||
(method url-fetch)
|
||||
(uri (pypi-uri "facebook-sdk" version))
|
||||
(sha256
|
||||
(base32 "138grz0n6plzdqgi4h6hhszf58bsvx9v76cwj51g1nd3kvkd5g6a"))))
|
||||
(build-system python-build-system)
|
||||
(propagated-inputs `(("python-requests" ,python-requests)))
|
||||
(home-page "https://facebook-sdk.readthedocs.io")
|
||||
(synopsis
|
||||
"Facebook Graph API client in Python")
|
||||
(description
|
||||
"This client library is designed to support the Facebook Graph API and
|
||||
the official Facebook JavaScript SDK, which is the canonical way to implement
|
||||
Facebook authentication.")
|
||||
(license license:asl2.0)))
|
||||
|
||||
(define-public python-facebook-sdk.git
|
||||
(let ((version (package-version python-facebook-sdk))
|
||||
(revision "0")
|
||||
(commit "3fa89fec6a20dd070ccf57968c6f89256f237f54"))
|
||||
(package (inherit python-facebook-sdk)
|
||||
(name "python-facebook-sdk.git")
|
||||
(version (git-version version revision commit))
|
||||
(source
|
||||
(origin
|
||||
(method git-fetch)
|
||||
(uri
|
||||
(git-reference
|
||||
(url "https://github.com/mobolic/facebook-sdk")
|
||||
(commit commit)))
|
||||
(file-name (git-file-name name version))
|
||||
(sha256
|
||||
(base32
|
||||
"0vayxkg6p8wdj63qvzr24dj3q7rkyhr925b31z2qv2mnbas01dmg"))))
|
||||
(arguments
|
||||
;; Tests depend on network access.
|
||||
`(#:tests? #false)))))
|
||||
|
||||
(define-public python-ddlparse
|
||||
(package
|
||||
(name "python-ddlparse")
|
||||
(version "1.10.0")
|
||||
(source
|
||||
(origin
|
||||
(method url-fetch)
|
||||
(uri (pypi-uri "ddlparse" version))
|
||||
(sha256
|
||||
(base32 "1nh8m6rxslwk05daxshxmgk41qfp18yynydba49b13l4m8dnh634"))))
|
||||
(build-system python-build-system)
|
||||
(arguments
|
||||
;; Tests depend on network access.
|
||||
`(#:tests? #false))
|
||||
(propagated-inputs (list python-pyparsing))
|
||||
(home-page "http://github.com/shinichi-takii/ddlparse")
|
||||
(synopsis "DDL parase and Convert to BigQuery JSON schema")
|
||||
(description "DDL parase and Convert to BigQuery JSON schema")
|
||||
(license #f)))
|
||||
|
||||
(define-public python-dictdiffer/fixed
|
||||
(package (inherit python-dictdiffer)
|
||||
(arguments
|
||||
(substitute-keyword-arguments (package-arguments python-send2trash)
|
||||
((#:phases phases)
|
||||
`(modify-phases ,phases
|
||||
(delete 'check)))))))
|
||||
|
||||
(define-public python-pypika-tortoise-0.1.3
|
||||
(package (inherit python-pypika-tortoise)
|
||||
(version "0.1.3")
|
||||
(source
|
||||
(origin
|
||||
(method url-fetch)
|
||||
(uri (pypi-uri "pypika-tortoise" version))
|
||||
(sha256
|
||||
(base32 "066jb88f3hk42sks69gv6w7k5irf6r0ssbly1n41a3pb19p2vpzc"))))))
|
||||
|
||||
(define-public python-tortoise-orm-0.18.1
|
||||
(package (inherit python-tortoise-orm)
|
||||
(version "0.18.1")
|
||||
(source
|
||||
(origin
|
||||
(method url-fetch)
|
||||
(uri (pypi-uri "tortoise-orm" version))
|
||||
(sha256
|
||||
(base32 "1c8xq3620z04i1yp8n6bfshi98qkjjydkbs3zld78a885p762wsk"))))
|
||||
(arguments
|
||||
`(#:tests? #f
|
||||
#:phases
|
||||
(modify-phases %standard-phases
|
||||
(delete 'sanity-check))))
|
||||
(propagated-inputs
|
||||
(modify-inputs (package-propagated-inputs python-tortoise-orm)
|
||||
(replace "python-pypika-tortoise" python-pypika-tortoise-0.1.3)))))
|
||||
|
||||
(define-public python-aerich
|
||||
(package
|
||||
(name "python-aerich")
|
||||
(version "0.6.2")
|
||||
(source
|
||||
(origin
|
||||
(method url-fetch)
|
||||
(uri (pypi-uri "aerich" version))
|
||||
(sha256
|
||||
(base32 "1r4xqw9x0fvdjbd36riz72n3ih1p7apv2p92lq1h6nwjfzwr2jvq"))))
|
||||
(build-system python-build-system)
|
||||
(propagated-inputs
|
||||
(list python-asyncmy
|
||||
python-asyncpg
|
||||
python-click
|
||||
python-ddlparse
|
||||
python-dictdiffer/fixed
|
||||
python-pytz
|
||||
python-pypika-tortoise-0.1.3
|
||||
python-tomlkit
|
||||
python-tortoise-orm-0.18.1))
|
||||
(home-page "https://github.com/tortoise/aerich")
|
||||
(synopsis "A database migrations tool for Tortoise ORM.")
|
||||
(description
|
||||
"This package provides a database migrations tool for Tortoise ORM.")
|
||||
(license #f)))
|
||||
|
||||
(define-public python-pytest-tornado5
|
||||
(package
|
||||
(name "python-pytest-tornado5")
|
||||
(version "2.0.0")
|
||||
(source
|
||||
(origin
|
||||
(method url-fetch)
|
||||
(uri (pypi-uri "pytest-tornado5" version))
|
||||
(sha256
|
||||
(base32 "0qb62jw2w0xr6y942yp0qxiy755bismjfpnxaxjjm05gy2pymr8d"))))
|
||||
(build-system python-build-system)
|
||||
(propagated-inputs (list python-pytest python-tornado))
|
||||
(home-page "https://github.com/vidartf/pytest-tornado")
|
||||
(synopsis
|
||||
"Fixtures and markers to simplify testing of Tornado applications")
|
||||
(description
|
||||
"This package provides a @code{py.test} plugin providing fixtures and markers to
|
||||
simplify testing of asynchronous tornado applications.")
|
||||
(license license:asl2.0)))
|
||||
|
||||
(define-public python-rethinkdb
|
||||
(package
|
||||
(name "python-rethinkdb")
|
||||
(version "2.4.8")
|
||||
(source
|
||||
(origin
|
||||
(method url-fetch)
|
||||
(uri (pypi-uri "rethinkdb" version))
|
||||
(sha256
|
||||
(base32 "1vmap0la5j8xpigyp5bqph9cb6dskyw76y37n3vb16l9rlmsfxcz"))))
|
||||
(build-system python-build-system)
|
||||
(arguments
|
||||
`(#:tests? #f))
|
||||
(propagated-inputs (list python-six))
|
||||
(home-page "https://github.com/RethinkDB/rethinkdb-python")
|
||||
(synopsis "Python driver library for the RethinkDB database server.")
|
||||
(description "Python driver library for the RethinkDB database server.")
|
||||
(license #f)))
|
||||
|
||||
(define-public python-apscheduler
|
||||
(package
|
||||
(name "python-apscheduler")
|
||||
(version "3.8.1")
|
||||
(source
|
||||
(origin
|
||||
(method url-fetch)
|
||||
(uri (pypi-uri "APScheduler" version))
|
||||
(sha256
|
||||
(base32 "0m93bz9qpw6iwhay68bwljjcfyzcbh2rq0lc2yp4iamxrzml9wsw"))))
|
||||
(build-system python-build-system)
|
||||
(arguments
|
||||
`(#:phases
|
||||
(modify-phases %standard-phases
|
||||
(replace 'check
|
||||
(lambda* (#:key tests? #:allow-other-keys)
|
||||
(when tests?
|
||||
;; FIXME: Currently python-kazoo fails to build.
|
||||
(delete-file "tests/test_jobstores.py")
|
||||
(invoke "pytest")))))))
|
||||
(propagated-inputs
|
||||
(list python-pytz
|
||||
python-setuptools
|
||||
python-six
|
||||
python-tzlocal))
|
||||
(native-inputs
|
||||
(list python-mock
|
||||
python-pyqt
|
||||
python-twisted
|
||||
python-gevent
|
||||
python-setuptools-scm
|
||||
python-sqlalchemy
|
||||
python-redis
|
||||
python-pymongo
|
||||
python-rethinkdb
|
||||
python-pytest
|
||||
python-pytest-asyncio
|
||||
python-pytest-cov
|
||||
python-pytest-tornado5))
|
||||
(home-page "https://github.com/agronholm/apscheduler")
|
||||
(synopsis "In-process task scheduler with Cron-like capabilities")
|
||||
(description "In-process task scheduler with Cron-like capabilities")
|
||||
(license license:expat)))
|
||||
|
||||
(define-public python-apscheduler-for-telegram-bot
|
||||
(package (inherit python-apscheduler)
|
||||
(version "3.6.3")
|
||||
(source
|
||||
(origin
|
||||
(method url-fetch)
|
||||
(uri (pypi-uri "APScheduler" version))
|
||||
(sha256
|
||||
(base32 "0i72qpqgrgq6bb9vwsac46m7bqb6mq92g5nf2gydmfvgxng25d9v"))))))
|
||||
|
||||
(define-public python-telegram-bot
|
||||
(package
|
||||
(name "python-telegram-bot")
|
||||
(version "13.10")
|
||||
(source
|
||||
(origin
|
||||
(method url-fetch)
|
||||
(uri (pypi-uri "python-telegram-bot" version))
|
||||
(sha256
|
||||
(base32 "0ghyq044s0zi67hxwxdjjfvh37wr86pi5kmpq7harx11311mbifj"))))
|
||||
(build-system python-build-system)
|
||||
(arguments
|
||||
;; FIXME: Most tests require network access. Some of them can
|
||||
;; be run from the git repository but many still fail due
|
||||
;; to vendoring of a seemingly heavily patched urllib3.
|
||||
`(#:tests? #f))
|
||||
(native-inputs
|
||||
(list python-beautifulsoup4
|
||||
python-pytest
|
||||
python-flaky))
|
||||
(propagated-inputs
|
||||
(list python-apscheduler-for-telegram-bot
|
||||
python-cachetools
|
||||
python-certifi
|
||||
python-pytz
|
||||
python-tornado-6))
|
||||
(home-page "https://python-telegram-bot.org/")
|
||||
(synopsis "We have made you a wrapper you can't refuse")
|
||||
(description "We have made you a wrapper you can't refuse")
|
||||
(license #f)))
|
||||
|
||||
(define-public mobilizon-reshare.git
|
||||
(let ((source-version (with-input-from-file
|
||||
(string-append %source-dir
|
||||
"/mobilizon_reshare/VERSION")
|
||||
read-line))
|
||||
(revision "0")
|
||||
(commit (read-line
|
||||
(open-input-pipe "git show HEAD | head -1 | cut -d ' ' -f 2"))))
|
||||
(package
|
||||
(name "mobilizon-reshare.git")
|
||||
(version (git-version source-version revision commit))
|
||||
(source (local-file %source-dir
|
||||
#:recursive? #t
|
||||
#:select? (git-predicate %source-dir)))
|
||||
(build-system python-build-system)
|
||||
(arguments
|
||||
`(#:phases
|
||||
(modify-phases %standard-phases
|
||||
(add-after 'unpack 'generate-setup.py
|
||||
(lambda* (#:key inputs outputs #:allow-other-keys)
|
||||
;; This is a hack needed to get poetry's
|
||||
;; setup.py.
|
||||
(setenv "POETRY_VIRTUALENVS_CREATE" "false")
|
||||
(invoke "poetry" "build" "-f" "sdist")
|
||||
(invoke "bash" "-c"
|
||||
"tar --wildcards -xvf dist/*-`poetry version -s`.tar.gz -O '*/setup.py' > setup.py")
|
||||
(substitute* "setup.py"
|
||||
(("'install_requires': install_requires,") ""))))
|
||||
(replace 'check
|
||||
(lambda* (#:key tests? inputs outputs #:allow-other-keys)
|
||||
(when tests?
|
||||
(setenv "POETRY_VIRTUALENVS_CREATE" "false")
|
||||
(invoke "./scripts/run_pipeline_tests.sh"))))
|
||||
(add-before 'sanity-check 'set-dummy-config
|
||||
(lambda _
|
||||
;; This is needed to prevent the tool from
|
||||
;; crashing at startup during the sanity check.
|
||||
(setenv "SECRETS_FOR_DYNACONF"
|
||||
(string-append (getcwd)
|
||||
"/mobilizon_reshare/.secrets.toml")))))))
|
||||
(native-inputs
|
||||
(list python-iniconfig
|
||||
poetry
|
||||
python-pytest
|
||||
python-pytest-cov
|
||||
python-pytest-asyncio
|
||||
python-pytest-lazy-fixture
|
||||
python-responses))
|
||||
(propagated-inputs
|
||||
(list python-aerich
|
||||
python-aiosqlite
|
||||
python-appdirs
|
||||
python-arrow
|
||||
python-beautifulsoup4
|
||||
python-click
|
||||
dynaconf
|
||||
python-facebook-sdk.git
|
||||
python-jinja2
|
||||
python-markdownify
|
||||
python-requests
|
||||
python-telegram-bot
|
||||
python-tweepy
|
||||
python-tortoise-orm-0.18.1))
|
||||
(home-page
|
||||
"https://github.com/Tech-Workers-Coalition-Italia/mobilizon-reshare")
|
||||
(synopsis
|
||||
"Publish Mobilizon events to your social networks")
|
||||
(description
|
||||
"This package provides a CLI application to publish Mobilizon
|
||||
events to your social media.")
|
||||
(license coopyleft))))
|
|
@ -1,53 +0,0 @@
|
|||
(define-module (docker service)
|
||||
#:use-module (gnu services)
|
||||
#:use-module (gnu system shadow)
|
||||
#:use-module (gnu packages admin)
|
||||
#:use-module (guix records)
|
||||
#:use-module (guix gexp)
|
||||
#:use-module (docker mobilizon-reshare)
|
||||
#:export (mobilizon-reshare-service-type
|
||||
mobilizon-reshare-configuration
|
||||
mobilizon-reshare-configuration?))
|
||||
|
||||
(define-record-type* <mobilizon-reshare-configuration>
|
||||
mobilizon-reshare-configuration make-mobilizon-reshare-configuration
|
||||
mobilizon-reshare-configuration?
|
||||
(mobilizon-reshare mobilizon-reshare-configuration-mobilizon-reshare (default mobilizon-reshare.git))
|
||||
(datadir mobilizon-reshare-datadir (default "/var/lib/mobilizon-reshare")))
|
||||
|
||||
(define %mobilizon-reshare-accounts
|
||||
(list (user-group
|
||||
(name "mobilizon-reshare")
|
||||
(system? #t))
|
||||
(user-account
|
||||
(name "mobilizon-reshare")
|
||||
(comment "Mobilizon Reshare's Service Account")
|
||||
(group "mobilizon-reshare")
|
||||
(supplementary-groups '("tty"))
|
||||
(system? #t)
|
||||
(home-directory "/var/empty")
|
||||
(shell (file-append shadow "/sbin/nologin")))))
|
||||
|
||||
(define (%mobilizon-reshare-activation config)
|
||||
"Return an activation gexp for Mobilizon Reshare."
|
||||
(let ((datadir (mobilizon-reshare-datadir config)))
|
||||
#~(begin
|
||||
(use-modules (guix build utils))
|
||||
(let* ((user (getpwnam "mobilizon-reshare"))
|
||||
(uid (passwd:uid user))
|
||||
(gid (passwd:gid user))
|
||||
(datadir #$datadir))
|
||||
(mkdir-p datadir)
|
||||
(chown datadir uid gid)))))
|
||||
|
||||
(define mobilizon-reshare-service-type
|
||||
(service-type
|
||||
(name 'mobilizon-reshare)
|
||||
(extensions
|
||||
(list (service-extension profile-service-type
|
||||
(compose list mobilizon-reshare-configuration-mobilizon-reshare))
|
||||
(service-extension account-service-type
|
||||
(const %mobilizon-reshare-accounts))
|
||||
(service-extension activation-service-type
|
||||
%mobilizon-reshare-activation)))
|
||||
(default-value (mobilizon-reshare-configuration))))
|
|
@ -0,0 +1,29 @@
|
|||
_mobilizon_reshare_completion() {
|
||||
local IFS=$'\n'
|
||||
local response
|
||||
|
||||
response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD _MOBILIZON_RESHARE_COMPLETE=bash_complete $1)
|
||||
|
||||
for completion in $response; do
|
||||
IFS=',' read type value <<< "$completion"
|
||||
|
||||
if [[ $type == 'dir' ]]; then
|
||||
COMPREPLY=()
|
||||
compopt -o dirnames
|
||||
elif [[ $type == 'file' ]]; then
|
||||
COMPREPLY=()
|
||||
compopt -o default
|
||||
elif [[ $type == 'plain' ]]; then
|
||||
COMPREPLY+=($value)
|
||||
fi
|
||||
done
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
_mobilizon_reshare_completion_setup() {
|
||||
complete -o nosort -F _mobilizon_reshare_completion mobilizon-reshare
|
||||
}
|
||||
|
||||
_mobilizon_reshare_completion_setup;
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
function _mobilizon_reshare_completion;
|
||||
set -l response;
|
||||
|
||||
for value in (env _MOBILIZON_RESHARE_COMPLETE=fish_complete COMP_WORDS=(commandline -cp) COMP_CWORD=(commandline -t) mobilizon-reshare);
|
||||
set response $response $value;
|
||||
end;
|
||||
|
||||
for completion in $response;
|
||||
set -l metadata (string split "," $completion);
|
||||
|
||||
if test $metadata[1] = "dir";
|
||||
__fish_complete_directories $metadata[2];
|
||||
else if test $metadata[1] = "file";
|
||||
__fish_complete_path $metadata[2];
|
||||
else if test $metadata[1] = "plain";
|
||||
echo $metadata[2];
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
|
||||
complete --no-files --command mobilizon-reshare --arguments "(_mobilizon_reshare_completion)";
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
#compdef mobilizon-reshare
|
||||
|
||||
_mobilizon_reshare_completion() {
|
||||
local -a completions
|
||||
local -a completions_with_descriptions
|
||||
local -a response
|
||||
(( ! $+commands[mobilizon-reshare] )) && return 1
|
||||
|
||||
response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) _MOBILIZON_RESHARE_COMPLETE=zsh_complete mobilizon-reshare)}")
|
||||
|
||||
for type key descr in ${response}; do
|
||||
if [[ "$type" == "plain" ]]; then
|
||||
if [[ "$descr" == "_" ]]; then
|
||||
completions+=("$key")
|
||||
else
|
||||
completions_with_descriptions+=("$key":"$descr")
|
||||
fi
|
||||
elif [[ "$type" == "dir" ]]; then
|
||||
_path_files -/
|
||||
elif [[ "$type" == "file" ]]; then
|
||||
_path_files -f
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$completions_with_descriptions" ]; then
|
||||
_describe -V unsorted completions_with_descriptions -U
|
||||
fi
|
||||
|
||||
if [ -n "$completions" ]; then
|
||||
compadd -U -V unsorted -a completions
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _mobilizon_reshare_completion mobilizon-reshare;
|
||||
|
70
guix.scm
70
guix.scm
|
@ -1,3 +1,71 @@
|
|||
(use-modules (docker mobilizon-reshare))
|
||||
(define-module (guix)
|
||||
#:use-module (guix git-download)
|
||||
#:use-module (guix build-system python)
|
||||
#:use-module (guix gexp)
|
||||
#:use-module (guix packages)
|
||||
#:use-module (guix utils)
|
||||
#:use-module (gnu packages markup) ;; for python-markdownify
|
||||
#:use-module (gnu packages python-web) ;; for python-fastapi-pagination-minimal and uvicorn
|
||||
#:use-module (gnu packages python-xyz) ;; for python-apscheduler
|
||||
#:use-module (mobilizon-reshare package)
|
||||
#:use-module (mobilizon-reshare dependencies)
|
||||
#:use-module (ice-9 rdelim)
|
||||
#:use-module (ice-9 popen))
|
||||
|
||||
(define %source-dir (getcwd))
|
||||
|
||||
(define mobilizon-reshare-git-origin
|
||||
(local-file %source-dir
|
||||
#:recursive? #t
|
||||
#:select? (git-predicate %source-dir)))
|
||||
|
||||
(define mobilizon-reshare.git
|
||||
(let ((source-version (with-input-from-file
|
||||
(string-append %source-dir
|
||||
"/mobilizon_reshare/VERSION")
|
||||
read-line))
|
||||
(revision "0")
|
||||
(commit (read-line
|
||||
(open-input-pipe "git show HEAD | head -1 | cut -d ' ' -f 2"))))
|
||||
((package-input-rewriting/spec `(("python-fastapi" . ,(const python-fastapi))
|
||||
("python-dotenv" . ,(const python-dotenv-0.13.0))
|
||||
("python-uvicorn" . ,(const python-uvicorn))))
|
||||
(package (inherit mobilizon-reshare)
|
||||
(name "mobilizon-reshare.git")
|
||||
(version (git-version source-version revision commit))
|
||||
(source mobilizon-reshare-git-origin)
|
||||
(propagated-inputs
|
||||
(modify-inputs (package-propagated-inputs mobilizon-reshare)
|
||||
(replace "python-uvicorn" python-uvicorn)
|
||||
(replace "python-fastapi" python-fastapi)
|
||||
(replace "python-fastapi-pagination-minimal"
|
||||
(package
|
||||
(inherit python-fastapi-pagination-minimal)
|
||||
(propagated-inputs
|
||||
(modify-inputs (package-propagated-inputs python-fastapi-pagination-minimal)
|
||||
(replace "python-fastapi" python-fastapi)))))
|
||||
(replace "python-markdownify" python-markdownify)))))))
|
||||
|
||||
(define-public mobilizon-reshare-scheduler
|
||||
(package (inherit mobilizon-reshare.git)
|
||||
(name "mobilizon-reshare-scheduler")
|
||||
(build-system python-build-system)
|
||||
(arguments
|
||||
(list
|
||||
#:phases
|
||||
#~(modify-phases %standard-phases
|
||||
(delete 'configure)
|
||||
(delete 'build)
|
||||
(delete 'check)
|
||||
(replace 'install
|
||||
(lambda _
|
||||
(let ((bin (string-append #$output "/bin")))
|
||||
(mkdir-p bin)
|
||||
(install-file "scripts/scheduler.py" bin)))))))
|
||||
(propagated-inputs (list mobilizon-reshare.git
|
||||
python-apscheduler))
|
||||
(synopsis "Mobilizon Reshare's scheduler")
|
||||
(description "This script is intended to start a scheduler
|
||||
running @code{mobilizon-reshare}.")))
|
||||
|
||||
mobilizon-reshare.git
|
||||
|
|
13
manifest.scm
13
manifest.scm
|
@ -1,14 +1,17 @@
|
|||
(define-module (manifest)
|
||||
#:use-module (docker mobilizon-reshare)
|
||||
#:use-module (mobilizon-reshare package)
|
||||
#:use-module (gnu packages)
|
||||
#:use-module (guix channels)
|
||||
#:use-module (guix inferior)
|
||||
#:use-module (guix packages)
|
||||
#:use-module (guix profiles))
|
||||
#:use-module (guix profiles)
|
||||
#:use-module (srfi srfi-1))
|
||||
|
||||
(packages->manifest
|
||||
(append
|
||||
(map cadr (package-direct-inputs mobilizon-reshare.git))
|
||||
(map cadr (package-direct-inputs mobilizon-reshare))
|
||||
(map specification->package+output
|
||||
'("git-cal" "man-db" "texinfo"
|
||||
"python-pre-commit"
|
||||
"pre-commit" "cloc"
|
||||
"ripgrep" "python-semver"
|
||||
"fd" "docker-compose"))))
|
||||
"fd" "docker-compose" "poetry"))))
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[default.publisher.telegram]
|
||||
active=true
|
||||
chat_id="xxx"
|
||||
message_thread_id="xxx"
|
||||
token="xxx"
|
||||
username="xxx"
|
||||
[default.publisher.zulip]
|
||||
|
@ -31,6 +32,7 @@ page_access_token="xxx"
|
|||
[default.notifier.telegram]
|
||||
active=true
|
||||
chat_id="xxx"
|
||||
message_thread_id="xxx"
|
||||
token="xxx"
|
||||
username="xxx"
|
||||
[default.notifier.zulip]
|
||||
|
@ -51,4 +53,4 @@ active=false
|
|||
|
||||
[default.notifier.facebook]
|
||||
active=false
|
||||
page_access_token="xxx"
|
||||
page_access_token="xxx"
|
||||
|
|
|
@ -1 +1 @@
|
|||
0.2.0
|
||||
0.3.6
|
|
@ -1,4 +0,0 @@
|
|||
[aerich]
|
||||
tortoise_orm = storage.db.TORTOISE_ORM
|
||||
location = ./migrations
|
||||
src_folder = ./.
|
|
@ -1,11 +1,12 @@
|
|||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
import sys
|
||||
import traceback
|
||||
from logging.config import dictConfig
|
||||
from pathlib import Path
|
||||
|
||||
from mobilizon_reshare.config.config import get_settings
|
||||
from mobilizon_reshare.storage.db import tear_down, MoReDB
|
||||
from mobilizon_reshare.config.command import CommandConfig
|
||||
from mobilizon_reshare.config.config import init_logging
|
||||
from mobilizon_reshare.storage.db import tear_down, init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -14,20 +15,13 @@ async def graceful_exit():
|
|||
await tear_down()
|
||||
|
||||
|
||||
async def init():
|
||||
settings = get_settings()
|
||||
dictConfig(settings["logging"])
|
||||
db_path = Path(settings.db_path)
|
||||
db = MoReDB(db_path)
|
||||
await db.setup()
|
||||
|
||||
|
||||
async def _safe_execution(f):
|
||||
async def _safe_execution(function):
|
||||
init_logging()
|
||||
await init()
|
||||
|
||||
return_code = 1
|
||||
try:
|
||||
return_code = await f()
|
||||
return_code = await function()
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
finally:
|
||||
|
@ -36,6 +30,9 @@ async def _safe_execution(f):
|
|||
return return_code
|
||||
|
||||
|
||||
def safe_execution(f):
|
||||
code = asyncio.run(_safe_execution(f))
|
||||
exit(code)
|
||||
def safe_execution(function, command_config: CommandConfig = None):
|
||||
if command_config:
|
||||
function = functools.partial(function, command_config)
|
||||
|
||||
code = asyncio.run(_safe_execution(function))
|
||||
sys.exit(code)
|
||||
|
|
|
@ -1,26 +1,59 @@
|
|||
import functools
|
||||
|
||||
import click
|
||||
import uvicorn
|
||||
from click import pass_context
|
||||
|
||||
from mobilizon_reshare.cli import safe_execution
|
||||
from mobilizon_reshare.cli.commands.format.format import format_event
|
||||
from mobilizon_reshare.cli.commands.list.list_event import list_events
|
||||
from mobilizon_reshare.cli.commands.list.list_publication import list_publications
|
||||
from mobilizon_reshare.cli.commands.recap.main import main as recap_main
|
||||
from mobilizon_reshare.cli.commands.start.main import main as start_main
|
||||
from mobilizon_reshare.config.config import current_version
|
||||
from mobilizon_reshare.cli.commands.publish.main import publish_command as publish_main
|
||||
from mobilizon_reshare.cli.commands.pull.main import pull_command as pull_main
|
||||
from mobilizon_reshare.cli.commands.recap.main import recap_command as recap_main
|
||||
from mobilizon_reshare.cli.commands.retry.main import (
|
||||
retry_event_command,
|
||||
retry_publication_command,
|
||||
)
|
||||
from mobilizon_reshare.cli.commands.start.main import start_command as start_main
|
||||
from mobilizon_reshare.config.command import CommandConfig
|
||||
from mobilizon_reshare.config.config import current_version, get_settings, init_logging
|
||||
from mobilizon_reshare.config.publishers import publisher_names
|
||||
from mobilizon_reshare.event.event import EventPublicationStatus
|
||||
from mobilizon_reshare.main.retry import retry, retry_publication
|
||||
from mobilizon_reshare.dataclasses.event import _EventPublicationStatus
|
||||
from mobilizon_reshare.models.publication import PublicationStatus
|
||||
from mobilizon_reshare.publishers import get_active_publishers
|
||||
|
||||
|
||||
def test_settings(ctx, param, value):
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
settings = get_settings()
|
||||
init_logging(settings)
|
||||
click.echo("OK!")
|
||||
ctx.exit()
|
||||
|
||||
|
||||
def print_version(ctx, param, value):
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
click.echo(current_version())
|
||||
ctx.exit()
|
||||
|
||||
|
||||
def print_platforms(ctx, param, value):
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
for platform in get_active_publishers():
|
||||
click.echo(platform)
|
||||
ctx.exit()
|
||||
|
||||
|
||||
status_name_to_enum = {
|
||||
"event": {
|
||||
"waiting": EventPublicationStatus.WAITING,
|
||||
"completed": EventPublicationStatus.COMPLETED,
|
||||
"failed": EventPublicationStatus.FAILED,
|
||||
"partial": EventPublicationStatus.PARTIAL,
|
||||
"waiting": _EventPublicationStatus.WAITING,
|
||||
"completed": _EventPublicationStatus.COMPLETED,
|
||||
"failed": _EventPublicationStatus.FAILED,
|
||||
"partial": _EventPublicationStatus.PARTIAL,
|
||||
"all": None,
|
||||
},
|
||||
"publication": {
|
||||
|
@ -41,48 +74,122 @@ to_date_option = click.option(
|
|||
"--end",
|
||||
type=click.DateTime(),
|
||||
expose_value=True,
|
||||
help="Include only events that begin before this datetime.",
|
||||
help="Include only events that end before this datetime.",
|
||||
)
|
||||
event_status_option = click.argument(
|
||||
event_status_argument = click.argument(
|
||||
"status",
|
||||
type=click.Choice(list(status_name_to_enum["event"].keys())),
|
||||
default="all",
|
||||
expose_value=True,
|
||||
)
|
||||
publication_status_option = click.argument(
|
||||
publication_status_argument = click.argument(
|
||||
"status",
|
||||
type=click.Choice(list(status_name_to_enum["publication"].keys())),
|
||||
default="all",
|
||||
expose_value=True,
|
||||
)
|
||||
|
||||
|
||||
def print_version(ctx, param, value):
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
click.echo(current_version())
|
||||
ctx.exit()
|
||||
force_publish_option = click.option(
|
||||
"-F",
|
||||
"--force",
|
||||
type=click.UUID,
|
||||
expose_value=True,
|
||||
help="Publish the given event, bypassing all selection logic. This command WILL publish"
|
||||
"regardless of the configured strategy, so use it with care.",
|
||||
)
|
||||
platform_name_option = click.option(
|
||||
"-p",
|
||||
"--platform",
|
||||
type=str,
|
||||
expose_value=True,
|
||||
help="Restrict the platforms where the event will be published. This makes sense only in"
|
||||
" case of force-publishing.",
|
||||
)
|
||||
list_supported_option = click.option(
|
||||
"--list-platforms",
|
||||
is_flag=True,
|
||||
callback=print_platforms,
|
||||
expose_value=False,
|
||||
is_eager=True,
|
||||
help="Show all active platforms.",
|
||||
)
|
||||
test_configuration = click.option(
|
||||
"-t",
|
||||
"--test-configuration",
|
||||
is_flag=True,
|
||||
callback=test_settings,
|
||||
expose_value=False,
|
||||
is_eager=True,
|
||||
help="Validate the current configuration.",
|
||||
)
|
||||
|
||||
|
||||
@click.group()
|
||||
@test_configuration
|
||||
@list_supported_option
|
||||
@click.option(
|
||||
"--version", is_flag=True, callback=print_version, expose_value=False, is_eager=True
|
||||
"--version",
|
||||
is_flag=True,
|
||||
callback=print_version,
|
||||
expose_value=False,
|
||||
is_eager=True,
|
||||
help="Show the current version.",
|
||||
)
|
||||
@pass_context
|
||||
def mobilizon_reshare(obj):
|
||||
pass
|
||||
|
||||
|
||||
@mobilizon_reshare.command(help="Synchronize and publish events.")
|
||||
@pass_context
|
||||
def start(ctx,):
|
||||
ctx.ensure_object(dict)
|
||||
safe_execution(start_main,)
|
||||
@mobilizon_reshare.command(
|
||||
help="Synchronize and publish events. It is equivalent to running consecutively pull and then publish."
|
||||
)
|
||||
@click.option(
|
||||
"--dry-run",
|
||||
is_flag=True,
|
||||
help="Prevents data to be published to platforms. WARNING: it will download and write new events to the database",
|
||||
default=False,
|
||||
)
|
||||
def start(dry_run):
|
||||
|
||||
safe_execution(start_main, CommandConfig(dry_run=dry_run))
|
||||
|
||||
|
||||
@mobilizon_reshare.command(help="Publish a recap of already published events.")
|
||||
def recap():
|
||||
safe_execution(recap_main,)
|
||||
@click.option(
|
||||
"--dry-run",
|
||||
"dry_run",
|
||||
is_flag=True,
|
||||
help="Prevents data to be published to platforms. WARNING: it will download and write new events to the database",
|
||||
default=False,
|
||||
)
|
||||
def recap(dry_run):
|
||||
safe_execution(recap_main, CommandConfig(dry_run=dry_run))
|
||||
|
||||
|
||||
@mobilizon_reshare.command(
|
||||
help="Fetch the latest events from Mobilizon, store them if they are unknown, "
|
||||
"update them if they are known and changed."
|
||||
)
|
||||
def pull():
|
||||
safe_execution(pull_main,)
|
||||
|
||||
|
||||
@mobilizon_reshare.command(
|
||||
help="Select an event with the current configured strategy"
|
||||
" and publish it to all active platforms."
|
||||
)
|
||||
@force_publish_option
|
||||
@platform_name_option
|
||||
@click.option(
|
||||
"--dry-run",
|
||||
"dry_run",
|
||||
is_flag=True,
|
||||
help="Prevents data to be published to platforms.",
|
||||
default=False,
|
||||
)
|
||||
def publish(event, platform, dry_run):
|
||||
safe_execution(functools.partial(
|
||||
publish_main, event, platform
|
||||
), CommandConfig(dry_run=dry_run))
|
||||
|
||||
|
||||
@mobilizon_reshare.group(help="Operations that pertain to events")
|
||||
|
@ -96,7 +203,7 @@ def publication():
|
|||
|
||||
|
||||
@event.command(help="Query for events in the database.", name="list")
|
||||
@event_status_option
|
||||
@event_status_argument
|
||||
@from_date_option
|
||||
@to_date_option
|
||||
def event_list(status, begin, end):
|
||||
|
@ -109,7 +216,7 @@ def event_list(status, begin, end):
|
|||
|
||||
|
||||
@publication.command(help="Query for publications in the database.", name="list")
|
||||
@publication_status_option
|
||||
@publication_status_argument
|
||||
@from_date_option
|
||||
@to_date_option
|
||||
def publication_list(status, begin, end):
|
||||
|
@ -138,13 +245,20 @@ def format(
|
|||
@event.command(name="retry", help="Retries all the failed publications")
|
||||
@click.argument("event-id", type=click.UUID)
|
||||
def event_retry(event_id):
|
||||
safe_execution(functools.partial(retry, event_id),)
|
||||
safe_execution(functools.partial(retry_event_command, event_id),)
|
||||
|
||||
|
||||
@publication.command(name="retry", help="Retries a specific publication")
|
||||
@click.argument("publication-id", type=click.UUID)
|
||||
def publication_retry(publication_id):
|
||||
safe_execution(functools.partial(retry_publication, publication_id),)
|
||||
safe_execution(functools.partial(retry_publication_command, publication_id),)
|
||||
|
||||
|
||||
@mobilizon_reshare.command("web")
|
||||
def web():
|
||||
uvicorn.run(
|
||||
"mobilizon_reshare.web.backend.main:app", host="0.0.0.0", port=8000, reload=True
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
import click
|
||||
|
||||
|
||||
def print_reports(reports) -> None:
|
||||
click.echo(reports)
|
|
@ -1,8 +1,8 @@
|
|||
import click
|
||||
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent
|
||||
from mobilizon_reshare.models.event import Event
|
||||
from mobilizon_reshare.publishers.platforms.platform_mapping import get_formatter_class
|
||||
from mobilizon_reshare.storage.query.event_converter import from_model
|
||||
|
||||
|
||||
async def format_event(event_id, publisher_name: str):
|
||||
|
@ -12,6 +12,6 @@ async def format_event(event_id, publisher_name: str):
|
|||
if not event:
|
||||
click.echo(f"Event with mobilizon_id {event_id} not found.")
|
||||
return
|
||||
event = from_model(event)
|
||||
event = MobilizonEvent.from_model(event)
|
||||
message = get_formatter_class(publisher_name)().get_message_from_event(event)
|
||||
click.echo(message)
|
||||
|
|
|
@ -1,23 +1,24 @@
|
|||
from typing import Iterable
|
||||
from datetime import datetime
|
||||
from typing import Iterable, Optional
|
||||
|
||||
import click
|
||||
from arrow import Arrow
|
||||
|
||||
from mobilizon_reshare.event.event import EventPublicationStatus
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.event.event_selection_strategies import select_unpublished_events
|
||||
from mobilizon_reshare.storage.query.read import (
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent
|
||||
from mobilizon_reshare.dataclasses.event import (
|
||||
_EventPublicationStatus,
|
||||
get_all_mobilizon_events,
|
||||
get_published_events,
|
||||
events_with_status,
|
||||
get_all_events,
|
||||
events_without_publications,
|
||||
get_mobilizon_events_with_status,
|
||||
get_mobilizon_events_without_publications,
|
||||
)
|
||||
from mobilizon_reshare.event.event_selection_strategies import select_unpublished_events
|
||||
|
||||
status_to_color = {
|
||||
EventPublicationStatus.COMPLETED: "green",
|
||||
EventPublicationStatus.FAILED: "red",
|
||||
EventPublicationStatus.PARTIAL: "yellow",
|
||||
EventPublicationStatus.WAITING: "white",
|
||||
_EventPublicationStatus.COMPLETED: "green",
|
||||
_EventPublicationStatus.FAILED: "red",
|
||||
_EventPublicationStatus.PARTIAL: "yellow",
|
||||
_EventPublicationStatus.WAITING: "white",
|
||||
}
|
||||
|
||||
|
||||
|
@ -28,28 +29,44 @@ def show_events(events: Iterable[MobilizonEvent]):
|
|||
def pretty(event: MobilizonEvent):
|
||||
return (
|
||||
f"{event.name : ^40}{click.style(event.status.name, fg=status_to_color[event.status]) : ^22}"
|
||||
f"{str(event.mobilizon_id) : <40}{event.begin_datetime.isoformat() : <29}{event.end_datetime.isoformat()}"
|
||||
f"{str(event.mobilizon_id) : <40}"
|
||||
f"{event.begin_datetime.to('local').isoformat() : <29}"
|
||||
f"{event.end_datetime.to('local').isoformat()}"
|
||||
)
|
||||
|
||||
|
||||
async def list_unpublished_events(frm: Arrow = None, to: Arrow = None):
|
||||
return select_unpublished_events(
|
||||
list(await get_published_events(from_date=frm, to_date=to)),
|
||||
list(await events_without_publications(from_date=frm, to_date=to)),
|
||||
list(
|
||||
await get_mobilizon_events_without_publications(from_date=frm, to_date=to)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def list_events(
|
||||
status: EventPublicationStatus = None, frm: Arrow = None, to: Arrow = None
|
||||
status: Optional[_EventPublicationStatus] = None,
|
||||
frm: Optional[datetime] = None,
|
||||
to: Optional[datetime] = None,
|
||||
):
|
||||
|
||||
frm = Arrow.fromdatetime(frm) if frm else None
|
||||
to = Arrow.fromdatetime(to) if to else None
|
||||
if status is None:
|
||||
events = await get_all_events(from_date=frm, to_date=to)
|
||||
elif status == EventPublicationStatus.WAITING:
|
||||
events = await get_all_mobilizon_events(from_date=frm, to_date=to)
|
||||
elif status == _EventPublicationStatus.WAITING:
|
||||
events = await list_unpublished_events(frm=frm, to=to)
|
||||
else:
|
||||
events = await events_with_status([status], from_date=frm, to_date=to)
|
||||
|
||||
events = await get_mobilizon_events_with_status(
|
||||
[status], from_date=frm, to_date=to
|
||||
)
|
||||
events = list(events)
|
||||
if events:
|
||||
show_events(events)
|
||||
else:
|
||||
click.echo(f"No event found with status: {status.name}")
|
||||
message = (
|
||||
f"No event found with status: {status.name}"
|
||||
if status is not None
|
||||
else "No event found"
|
||||
)
|
||||
click.echo(message)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from typing import Iterable
|
||||
from datetime import datetime
|
||||
from typing import Iterable, Optional
|
||||
|
||||
import click
|
||||
from arrow import Arrow
|
||||
|
@ -23,19 +24,28 @@ def pretty(publication: Publication):
|
|||
return (
|
||||
f"{str(publication.id) : <40}{publication.timestamp.isoformat() : <36}"
|
||||
f"{click.style(publication.status.name, fg=status_to_color[publication.status]) : <22}"
|
||||
f"{publication.publisher.name : <12}{str(publication.event.id)}"
|
||||
f"{publication.publisher.name : <12}{str(publication.event.mobilizon_id)}"
|
||||
)
|
||||
|
||||
|
||||
async def list_publications(
|
||||
status: PublicationStatus = None, frm: Arrow = None, to: Arrow = None
|
||||
status: PublicationStatus = None,
|
||||
frm: Optional[datetime] = None,
|
||||
to: Optional[datetime] = None,
|
||||
):
|
||||
frm = Arrow.fromdatetime(frm) if frm else None
|
||||
to = Arrow.fromdatetime(to) if to else None
|
||||
if status is None:
|
||||
publications = await get_all_publications(from_date=frm, to_date=to)
|
||||
else:
|
||||
publications = await publications_with_status(status, from_date=frm, to_date=to)
|
||||
|
||||
if publications:
|
||||
show_publications(list(publications))
|
||||
show_publications(publications)
|
||||
else:
|
||||
click.echo(f"No publication found with status: {status.name}")
|
||||
message = (
|
||||
f"No publication found with status: {status.name}"
|
||||
if status is not None
|
||||
else "No publication found"
|
||||
)
|
||||
click.echo(message)
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
import logging
|
||||
import click
|
||||
|
||||
from mobilizon_reshare.config.command import CommandConfig
|
||||
from mobilizon_reshare.main.publish import select_and_publish, publish_by_mobilizon_id
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def publish_command(event_mobilizon_id: click.UUID, platform: str, command_config: CommandConfig):
|
||||
"""
|
||||
Select an event with the current configured strategy
|
||||
and publish it to all active platforms.
|
||||
"""
|
||||
if event_mobilizon_id is not None:
|
||||
report = await publish_by_mobilizon_id(
|
||||
event_mobilizon_id,
|
||||
command_config,
|
||||
[platform] if platform is not None else None,
|
||||
)
|
||||
else:
|
||||
report = await select_and_publish(command_config)
|
||||
return 0 if report and report.successful else 1
|
|
@ -0,0 +1,10 @@
|
|||
from mobilizon_reshare.main.pull import pull
|
||||
|
||||
|
||||
async def pull_command():
|
||||
"""
|
||||
STUB
|
||||
:return:
|
||||
"""
|
||||
await pull()
|
||||
return 0
|
|
@ -1,11 +1,15 @@
|
|||
import logging.config
|
||||
|
||||
from mobilizon_reshare.cli.commands import print_reports
|
||||
from mobilizon_reshare.config.command import CommandConfig
|
||||
from mobilizon_reshare.main.recap import recap
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def main():
|
||||
async def recap_command(command_config: CommandConfig):
|
||||
|
||||
reports = await recap()
|
||||
reports = await recap(command_config)
|
||||
if command_config.dry_run and reports:
|
||||
print_reports(reports)
|
||||
return 0 if reports and reports.successful else 1
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
from mobilizon_reshare.main.retry import retry_publication, retry_event
|
||||
|
||||
|
||||
async def retry_event_command(event_id):
|
||||
reports = await retry_event(event_id)
|
||||
return 0 if reports and reports.successful else 1
|
||||
|
||||
|
||||
async def retry_publication_command(publication_id):
|
||||
reports = await retry_publication(publication_id)
|
||||
return 0 if reports and reports.successful else 1
|
|
@ -1,10 +1,14 @@
|
|||
from mobilizon_reshare.cli.commands import print_reports
|
||||
from mobilizon_reshare.config.command import CommandConfig
|
||||
from mobilizon_reshare.main.start import start
|
||||
|
||||
|
||||
async def main():
|
||||
async def start_command(command_config: CommandConfig):
|
||||
"""
|
||||
STUB
|
||||
:return:
|
||||
"""
|
||||
reports = await start()
|
||||
reports = await start(command_config)
|
||||
if command_config.dry_run and reports:
|
||||
print_reports(reports)
|
||||
return 0 if reports and reports.successful else 1
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
import dataclasses
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class CommandConfig:
|
||||
dry_run: bool = dataclasses.field(default=False)
|
|
@ -1,9 +1,9 @@
|
|||
import importlib.resources
|
||||
import importlib
|
||||
import logging
|
||||
from logging.config import dictConfig
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import pkg_resources
|
||||
from appdirs import AppDirs
|
||||
from dynaconf import Dynaconf, Validator
|
||||
|
||||
|
@ -20,6 +20,8 @@ base_validators = [
|
|||
# url of the main Mobilizon instance to download events from
|
||||
Validator("source.mobilizon.url", must_exist=True, is_type_of=str),
|
||||
Validator("source.mobilizon.group", must_exist=True, is_type_of=str),
|
||||
Validator("db_url", must_exist=True, is_type_of=str),
|
||||
Validator("locale", must_exist=True, is_type_of=str, default="en-us"),
|
||||
]
|
||||
|
||||
activeness_validators = [
|
||||
|
@ -36,23 +38,30 @@ def current_version() -> str:
|
|||
return fp.read()
|
||||
|
||||
|
||||
def get_settings_files_paths():
|
||||
def init_logging(settings: Optional[Dynaconf] = None):
|
||||
if settings is None:
|
||||
settings = get_settings()
|
||||
dictConfig(settings["logging"])
|
||||
|
||||
|
||||
def get_settings_files_paths() -> Optional[str]:
|
||||
|
||||
dirs = AppDirs(appname="mobilizon-reshare", version=current_version())
|
||||
bundled_settings_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare", "settings.toml"
|
||||
)
|
||||
for config_path in [
|
||||
Path(dirs.user_config_dir, "mobilizon_reshare.toml").absolute(),
|
||||
Path(dirs.site_config_dir, "mobilizon_reshare.toml").absolute(),
|
||||
bundled_settings_path,
|
||||
]:
|
||||
if config_path and Path(config_path).exists():
|
||||
logger.debug(f"Loading configuration from {config_path}")
|
||||
return config_path
|
||||
bundled_settings_ref = importlib.resources.files(
|
||||
"mobilizon_reshare"
|
||||
) / "settings.toml"
|
||||
with importlib.resources.as_file(bundled_settings_ref) as bundled_settings_path:
|
||||
for config_path in [
|
||||
Path(dirs.user_config_dir, "mobilizon_reshare.toml").absolute(),
|
||||
Path(dirs.site_config_dir, "mobilizon_reshare.toml").absolute(),
|
||||
bundled_settings_path.absolute(),
|
||||
]:
|
||||
if config_path and Path(config_path).exists():
|
||||
logger.debug(f"Loading configuration from {config_path}")
|
||||
return config_path
|
||||
|
||||
|
||||
def build_settings(validators: Optional[list[Validator]] = None):
|
||||
def build_settings(validators: Optional[list[Validator]] = None) -> Dynaconf:
|
||||
"""
|
||||
Creates a Dynaconf base object. Configuration files are checked in this order:
|
||||
|
||||
|
@ -76,7 +85,7 @@ def build_settings(validators: Optional[list[Validator]] = None):
|
|||
return config
|
||||
|
||||
|
||||
def build_and_validate_settings():
|
||||
def build_and_validate_settings() -> Dynaconf:
|
||||
"""
|
||||
Creates a settings object to be used in the application. It collects and apply generic validators and validators
|
||||
specific for each publisher, notifier and publication strategy.
|
||||
|
@ -111,7 +120,7 @@ def build_and_validate_settings():
|
|||
# better in the future.
|
||||
|
||||
|
||||
class CustomConfig(object):
|
||||
class CustomConfig:
|
||||
@classmethod
|
||||
def get_instance(cls):
|
||||
if not hasattr(cls, "_instance") or cls._instance is None:
|
||||
|
@ -126,5 +135,9 @@ class CustomConfig(object):
|
|||
cls._instance = None
|
||||
|
||||
|
||||
def get_settings():
|
||||
def get_settings() -> Dynaconf:
|
||||
return CustomConfig.get_instance().settings
|
||||
|
||||
|
||||
def get_settings_without_validation() -> Dynaconf:
|
||||
return build_settings()
|
||||
|
|
|
@ -4,6 +4,7 @@ from dynaconf import Validator
|
|||
|
||||
telegram_validators = [
|
||||
Validator("notifier.telegram.chat_id", must_exist=True),
|
||||
Validator("notifier.telegram.message_thread_id", default=None),
|
||||
Validator("notifier.telegram.token", must_exist=True),
|
||||
Validator("notifier.telegram.username", must_exist=True),
|
||||
]
|
||||
|
|
|
@ -3,6 +3,7 @@ from dynaconf import Validator
|
|||
|
||||
telegram_validators = [
|
||||
Validator("publisher.telegram.chat_id", must_exist=True),
|
||||
Validator("publisher.telegram.message_thread_id", default=None),
|
||||
Validator("publisher.telegram.msg_template_path", must_exist=True, default=None),
|
||||
Validator("publisher.telegram.recap_template_path", must_exist=True, default=None),
|
||||
Validator(
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
from mobilizon_reshare.dataclasses.event import _MobilizonEvent
|
||||
from mobilizon_reshare.dataclasses.event_publication_status import (
|
||||
_EventPublicationStatus,
|
||||
)
|
||||
from mobilizon_reshare.dataclasses.publication import (
|
||||
_EventPublication,
|
||||
_PublicationNotification,
|
||||
)
|
||||
|
||||
EventPublication = _EventPublication
|
||||
MobilizonEvent = _MobilizonEvent
|
||||
EventPublicationStatus = _EventPublicationStatus
|
||||
PublicationNotification = _PublicationNotification
|
|
@ -0,0 +1,164 @@
|
|||
from dataclasses import dataclass, asdict
|
||||
from typing import Optional, Iterable
|
||||
from uuid import UUID
|
||||
|
||||
import arrow
|
||||
from arrow import Arrow
|
||||
from jinja2 import Template
|
||||
|
||||
from mobilizon_reshare.config.config import get_settings
|
||||
from mobilizon_reshare.dataclasses.event_publication_status import (
|
||||
_EventPublicationStatus,
|
||||
_compute_event_status,
|
||||
)
|
||||
from mobilizon_reshare.models.event import Event
|
||||
from mobilizon_reshare.storage.query.read import (
|
||||
get_all_events,
|
||||
get_event,
|
||||
get_events_without_publications,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class _MobilizonEvent:
|
||||
"""Class representing an event retrieved from Mobilizon."""
|
||||
|
||||
name: str
|
||||
description: Optional[str]
|
||||
begin_datetime: arrow.Arrow
|
||||
end_datetime: arrow.Arrow
|
||||
mobilizon_link: str
|
||||
mobilizon_id: UUID
|
||||
last_update_time: arrow.Arrow
|
||||
thumbnail_link: Optional[str] = None
|
||||
location: Optional[str] = None
|
||||
publication_time: Optional[dict[str, arrow.Arrow]] = None
|
||||
status: _EventPublicationStatus = _EventPublicationStatus.WAITING
|
||||
|
||||
def __post_init__(self):
|
||||
assert self.begin_datetime.tzinfo == self.end_datetime.tzinfo
|
||||
assert self.begin_datetime < self.end_datetime
|
||||
if self.publication_time is None:
|
||||
self.publication_time = {}
|
||||
if self.publication_time:
|
||||
assert self.status in [
|
||||
_EventPublicationStatus.COMPLETED,
|
||||
_EventPublicationStatus.PARTIAL,
|
||||
_EventPublicationStatus.FAILED,
|
||||
]
|
||||
|
||||
def _fill_template(self, pattern: Template) -> str:
|
||||
config = get_settings()
|
||||
return pattern.render(locale=config["locale"], **asdict(self))
|
||||
|
||||
def format(self, pattern: Template) -> str:
|
||||
return self._fill_template(pattern)
|
||||
|
||||
@classmethod
|
||||
def from_model(cls, event: Event):
|
||||
publication_status = _compute_event_status(list(event.publications))
|
||||
publication_time = {}
|
||||
|
||||
for pub in event.publications:
|
||||
if publication_status != _EventPublicationStatus.WAITING:
|
||||
assert pub.timestamp is not None
|
||||
publication_time[pub.publisher.name] = arrow.get(pub.timestamp).to(
|
||||
"local"
|
||||
)
|
||||
return cls(
|
||||
name=event.name,
|
||||
description=event.description,
|
||||
begin_datetime=arrow.get(event.begin_datetime).to("local"),
|
||||
end_datetime=arrow.get(event.end_datetime).to("local"),
|
||||
mobilizon_link=event.mobilizon_link,
|
||||
mobilizon_id=event.mobilizon_id,
|
||||
thumbnail_link=event.thumbnail_link,
|
||||
location=event.location,
|
||||
publication_time=publication_time,
|
||||
status=publication_status,
|
||||
last_update_time=arrow.get(event.last_update_time).to("local"),
|
||||
)
|
||||
|
||||
def to_model(self, db_id: Optional[UUID] = None) -> Event:
|
||||
|
||||
kwargs = {
|
||||
"name": self.name,
|
||||
"description": self.description,
|
||||
"mobilizon_id": self.mobilizon_id,
|
||||
"mobilizon_link": self.mobilizon_link,
|
||||
"thumbnail_link": self.thumbnail_link,
|
||||
"location": self.location,
|
||||
"begin_datetime": self.begin_datetime.astimezone(
|
||||
self.begin_datetime.tzinfo
|
||||
),
|
||||
"end_datetime": self.end_datetime.astimezone(self.end_datetime.tzinfo),
|
||||
"last_update_time": self.last_update_time.astimezone(
|
||||
self.last_update_time.tzinfo
|
||||
),
|
||||
}
|
||||
if db_id is not None:
|
||||
kwargs.update({"id": db_id})
|
||||
return Event(**kwargs)
|
||||
|
||||
@classmethod
|
||||
async def retrieve(cls, mobilizon_id):
|
||||
return cls.from_model(await get_event(mobilizon_id))
|
||||
|
||||
|
||||
async def get_all_mobilizon_events(
|
||||
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
|
||||
) -> list[_MobilizonEvent]:
|
||||
return [_MobilizonEvent.from_model(event) for event in await get_all_events(from_date, to_date)]
|
||||
|
||||
|
||||
async def get_published_events(
|
||||
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None
|
||||
) -> Iterable[_MobilizonEvent]:
|
||||
"""
|
||||
Retrieves events that are not waiting. Function could be renamed to something more fitting
|
||||
:return:
|
||||
"""
|
||||
return await get_mobilizon_events_with_status(
|
||||
[
|
||||
_EventPublicationStatus.COMPLETED,
|
||||
_EventPublicationStatus.PARTIAL,
|
||||
_EventPublicationStatus.FAILED,
|
||||
],
|
||||
from_date=from_date,
|
||||
to_date=to_date,
|
||||
)
|
||||
|
||||
|
||||
async def get_mobilizon_events_with_status(
|
||||
status: list[_EventPublicationStatus],
|
||||
from_date: Optional[Arrow] = None,
|
||||
to_date: Optional[Arrow] = None,
|
||||
) -> Iterable[_MobilizonEvent]:
|
||||
def _filter_event_with_status(event: Event) -> bool:
|
||||
# This computes the status client-side instead of running in the DB. It shouldn't pose a performance problem
|
||||
# in the short term, but should be moved to the query if possible.
|
||||
event_status = _compute_event_status(list(event.publications))
|
||||
return event_status in status
|
||||
|
||||
return map(
|
||||
_MobilizonEvent.from_model,
|
||||
filter(_filter_event_with_status, await get_all_events(from_date, to_date)),
|
||||
)
|
||||
|
||||
|
||||
async def get_mobilizon_events_without_publications(
|
||||
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
|
||||
) -> list[_MobilizonEvent]:
|
||||
return [
|
||||
_MobilizonEvent.from_model(event)
|
||||
for event in await get_events_without_publications(
|
||||
from_date=from_date, to_date=to_date
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
async def get_mobilizon_event_by_id(
|
||||
event_id: UUID,
|
||||
) -> _MobilizonEvent:
|
||||
event = await get_event(event_id)
|
||||
return _MobilizonEvent.from_model(event)
|
|
@ -0,0 +1,27 @@
|
|||
from enum import IntEnum
|
||||
|
||||
from mobilizon_reshare.models.publication import Publication, PublicationStatus
|
||||
|
||||
|
||||
class _EventPublicationStatus(IntEnum):
|
||||
WAITING = 1
|
||||
FAILED = 2
|
||||
COMPLETED = 3
|
||||
PARTIAL = 4
|
||||
|
||||
|
||||
def _compute_event_status(publications: list[Publication],) -> _EventPublicationStatus:
|
||||
if not publications:
|
||||
return _EventPublicationStatus.WAITING
|
||||
|
||||
unique_statuses: set[PublicationStatus] = set(pub.status for pub in publications)
|
||||
|
||||
if unique_statuses == {
|
||||
PublicationStatus.COMPLETED,
|
||||
PublicationStatus.FAILED,
|
||||
}:
|
||||
return _EventPublicationStatus.PARTIAL
|
||||
elif len(unique_statuses) == 1:
|
||||
return _EventPublicationStatus[unique_statuses.pop().name]
|
||||
|
||||
raise ValueError(f"Illegal combination of PublicationStatus: {unique_statuses}")
|
|
@ -0,0 +1,77 @@
|
|||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
from typing import List, Iterator
|
||||
from uuid import UUID
|
||||
|
||||
from tortoise.transactions import atomic
|
||||
|
||||
from mobilizon_reshare.dataclasses.event import _MobilizonEvent
|
||||
from mobilizon_reshare.models.publication import Publication
|
||||
from mobilizon_reshare.publishers.abstract import (
|
||||
AbstractPlatform,
|
||||
AbstractEventFormatter,
|
||||
)
|
||||
from mobilizon_reshare.storage.query.read import (
|
||||
get_event,
|
||||
prefetch_publication_relations,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BasePublication:
|
||||
publisher: AbstractPlatform
|
||||
formatter: AbstractEventFormatter
|
||||
|
||||
|
||||
@dataclass
|
||||
class _EventPublication(BasePublication):
|
||||
event: _MobilizonEvent
|
||||
id: UUID
|
||||
|
||||
@classmethod
|
||||
def from_orm(cls, model: Publication, event: _MobilizonEvent):
|
||||
# imported here to avoid circular dependencies
|
||||
from mobilizon_reshare.publishers.platforms.platform_mapping import (
|
||||
get_publisher_class,
|
||||
get_formatter_class,
|
||||
)
|
||||
|
||||
publisher = get_publisher_class(model.publisher.name)()
|
||||
formatter = get_formatter_class(model.publisher.name)()
|
||||
return cls(publisher, formatter, event, model.id,)
|
||||
|
||||
@classmethod
|
||||
async def retrieve(cls, publication_id):
|
||||
publication = await prefetch_publication_relations(
|
||||
Publication.get(id=publication_id)
|
||||
)
|
||||
event = _MobilizonEvent.from_model(publication.event)
|
||||
return cls.from_orm(publication, event)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RecapPublication(BasePublication):
|
||||
events: List[_MobilizonEvent]
|
||||
|
||||
|
||||
@dataclass
|
||||
class _PublicationNotification(BasePublication):
|
||||
publication: _EventPublication
|
||||
|
||||
|
||||
@atomic()
|
||||
async def build_publications_for_event(
|
||||
event: _MobilizonEvent, publishers: Iterator[str]
|
||||
) -> list[_EventPublication]:
|
||||
publication_models = await event.to_model().build_publications(publishers)
|
||||
return [_EventPublication.from_orm(m, event) for m in publication_models]
|
||||
|
||||
|
||||
async def get_failed_publications_for_event(
|
||||
event: _MobilizonEvent,
|
||||
) -> List[_EventPublication]:
|
||||
event_model = await get_event(event.mobilizon_id)
|
||||
failed_publications = await event_model.get_failed_publications()
|
||||
return list(
|
||||
map(partial(_EventPublication.from_orm, event=event), failed_publications)
|
||||
)
|
|
@ -1,49 +0,0 @@
|
|||
from dataclasses import dataclass, asdict
|
||||
from enum import IntEnum
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
import arrow
|
||||
from jinja2 import Template
|
||||
|
||||
|
||||
class EventPublicationStatus(IntEnum):
|
||||
WAITING = 1
|
||||
FAILED = 2
|
||||
COMPLETED = 3
|
||||
PARTIAL = 4
|
||||
|
||||
|
||||
@dataclass
|
||||
class MobilizonEvent:
|
||||
"""Class representing an event retrieved from Mobilizon."""
|
||||
|
||||
name: str
|
||||
description: Optional[str]
|
||||
begin_datetime: arrow.Arrow
|
||||
end_datetime: arrow.Arrow
|
||||
mobilizon_link: str
|
||||
mobilizon_id: UUID
|
||||
last_update_time: arrow.Arrow
|
||||
thumbnail_link: Optional[str] = None
|
||||
location: Optional[str] = None
|
||||
publication_time: Optional[dict[str, arrow.Arrow]] = None
|
||||
status: EventPublicationStatus = EventPublicationStatus.WAITING
|
||||
|
||||
def __post_init__(self):
|
||||
assert self.begin_datetime.tzinfo == self.end_datetime.tzinfo
|
||||
assert self.begin_datetime < self.end_datetime
|
||||
if self.publication_time is None:
|
||||
self.publication_time = {}
|
||||
if self.publication_time:
|
||||
assert self.status in [
|
||||
EventPublicationStatus.COMPLETED,
|
||||
EventPublicationStatus.PARTIAL,
|
||||
EventPublicationStatus.FAILED,
|
||||
]
|
||||
|
||||
def _fill_template(self, pattern: Template) -> str:
|
||||
return pattern.render(**asdict(self))
|
||||
|
||||
def format(self, pattern: Template) -> str:
|
||||
return self._fill_template(pattern)
|
|
@ -5,7 +5,7 @@ from typing import List, Optional
|
|||
import arrow
|
||||
|
||||
from mobilizon_reshare.config.config import get_settings
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -19,8 +19,7 @@ class EventSelectionStrategy(ABC):
|
|||
selected = self._select(published_events, unpublished_events)
|
||||
if selected:
|
||||
return selected[0]
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
@abstractmethod
|
||||
def _select(
|
||||
|
@ -94,7 +93,7 @@ def select_unpublished_events(
|
|||
|
||||
def select_event_to_publish(
|
||||
published_events: List[MobilizonEvent], unpublished_events: List[MobilizonEvent],
|
||||
):
|
||||
) -> Optional[MobilizonEvent]:
|
||||
|
||||
strategy = STRATEGY_NAME_TO_STRATEGY_CLASS[
|
||||
get_settings()["selection"]["strategy"]
|
||||
|
|
|
@ -1,28 +1,28 @@
|
|||
from typing import List
|
||||
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
import markdownify
|
||||
|
||||
|
||||
def get_bottom_paragraphs(soup: BeautifulSoup) -> List[Tag]:
|
||||
def get_bottom_paragraphs(soup: BeautifulSoup) -> list[Tag]:
|
||||
return [d for d in soup.findAll("p") if not d.find("p")]
|
||||
|
||||
|
||||
def html_to_plaintext(content):
|
||||
def html_to_plaintext(content) -> str:
|
||||
"""
|
||||
Transform a HTML in a plaintext sting that can be more easily processed by the publishers.
|
||||
Transform a HTML in a plaintext string that can be more easily processed by the publishers.
|
||||
|
||||
:param content:
|
||||
:return:
|
||||
"""
|
||||
# TODO: support links and quotes
|
||||
soup = BeautifulSoup(content)
|
||||
return "\n".join(
|
||||
" ".join(tag.stripped_strings) for tag in get_bottom_paragraphs(soup)
|
||||
)
|
||||
soup = BeautifulSoup(content, features="html.parser")
|
||||
p_list = get_bottom_paragraphs(soup)
|
||||
if p_list:
|
||||
return "\n".join(" ".join(tag.stripped_strings) for tag in p_list)
|
||||
|
||||
return soup.text
|
||||
|
||||
|
||||
def html_to_markdown(content):
|
||||
def html_to_markdown(content) -> str:
|
||||
markdown = markdownify.markdownify(content)
|
||||
escaped_markdown = markdown.replace(">", "\\>")
|
||||
return escaped_markdown.strip()
|
||||
|
|
|
@ -0,0 +1,99 @@
|
|||
import logging.config
|
||||
from typing import Optional, Iterator
|
||||
|
||||
from mobilizon_reshare.config.command import CommandConfig
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent
|
||||
from mobilizon_reshare.dataclasses.event import (
|
||||
get_published_events,
|
||||
get_mobilizon_events_without_publications,
|
||||
get_mobilizon_event_by_id,
|
||||
)
|
||||
from mobilizon_reshare.dataclasses.publication import (
|
||||
_EventPublication,
|
||||
build_publications_for_event,
|
||||
)
|
||||
from mobilizon_reshare.event.event_selection_strategies import select_event_to_publish
|
||||
from mobilizon_reshare.publishers import get_active_publishers
|
||||
from mobilizon_reshare.publishers.coordinators.event_publishing.dry_run import (
|
||||
DryRunPublisherCoordinator,
|
||||
)
|
||||
from mobilizon_reshare.publishers.coordinators.event_publishing.notify import (
|
||||
PublicationFailureNotifiersCoordinator,
|
||||
)
|
||||
from mobilizon_reshare.publishers.coordinators.event_publishing.publish import (
|
||||
PublisherCoordinatorReport,
|
||||
PublisherCoordinator,
|
||||
)
|
||||
from mobilizon_reshare.storage.query.write import (
|
||||
save_publication_report,
|
||||
save_notification_report,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def publish_publications(
|
||||
publications: list[_EventPublication],
|
||||
) -> PublisherCoordinatorReport:
|
||||
publishers_report = PublisherCoordinator(publications).run()
|
||||
await save_publication_report(publishers_report)
|
||||
|
||||
for publication_report in publishers_report.reports:
|
||||
if not publication_report.successful:
|
||||
notifiers_report = PublicationFailureNotifiersCoordinator(publication_report,).notify_failure()
|
||||
if notifiers_report:
|
||||
await save_notification_report(notifiers_report)
|
||||
|
||||
return publishers_report
|
||||
|
||||
|
||||
def perform_dry_run(publications: list[_EventPublication]):
|
||||
return DryRunPublisherCoordinator(publications).run()
|
||||
|
||||
|
||||
async def publish_event(
|
||||
event: MobilizonEvent,
|
||||
command_config: CommandConfig,
|
||||
publishers: Optional[Iterator[str]] = None,
|
||||
) -> PublisherCoordinatorReport:
|
||||
logger.info(f"Event to publish found: {event.name}")
|
||||
|
||||
if not (publishers and all(publishers)):
|
||||
publishers = get_active_publishers()
|
||||
|
||||
publications = await build_publications_for_event(event, publishers)
|
||||
if command_config.dry_run:
|
||||
logger.info("Executing in dry run mode. No event is going to be published.")
|
||||
return perform_dry_run(publications)
|
||||
else:
|
||||
return await publish_publications(publications)
|
||||
|
||||
|
||||
async def publish_by_mobilizon_id(
|
||||
event_mobilizon_id,
|
||||
command_config: CommandConfig,
|
||||
publishers: Optional[Iterator[str]] = None,
|
||||
):
|
||||
event = await get_mobilizon_event_by_id(event_mobilizon_id)
|
||||
return await publish_event(event, command_config, publishers)
|
||||
|
||||
|
||||
async def select_and_publish(
|
||||
command_config: CommandConfig,
|
||||
unpublished_events: Optional[list[MobilizonEvent]] = None,
|
||||
) -> Optional[PublisherCoordinatorReport]:
|
||||
"""
|
||||
STUB
|
||||
:return:
|
||||
"""
|
||||
if unpublished_events is None:
|
||||
unpublished_events = await get_mobilizon_events_without_publications()
|
||||
|
||||
event = select_event_to_publish(
|
||||
list(await get_published_events()), unpublished_events,
|
||||
)
|
||||
|
||||
if event:
|
||||
return await publish_event(event, command_config)
|
||||
else:
|
||||
logger.info("No event to publish found")
|
|
@ -0,0 +1,22 @@
|
|||
import logging.config
|
||||
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent
|
||||
from mobilizon_reshare.mobilizon.events import get_mobilizon_future_events
|
||||
from mobilizon_reshare.storage.query.write import create_unpublished_events
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def pull() -> list[MobilizonEvent]:
|
||||
"""
|
||||
Fetches the latest events from Mobilizon and stores them.
|
||||
:return:
|
||||
"""
|
||||
|
||||
# Pull future events from Mobilizon
|
||||
future_events = get_mobilizon_future_events()
|
||||
logger.info(f"Pulled {len(future_events)} events from Mobilizon.")
|
||||
# Store in the DB only the ones we didn't know about
|
||||
events = await create_unpublished_events(future_events)
|
||||
logger.debug(f"There are now {len(events)} unpublished events.")
|
||||
return events
|
|
@ -3,33 +3,40 @@ from typing import Optional, List
|
|||
|
||||
from arrow import now
|
||||
|
||||
from mobilizon_reshare.event.event import EventPublicationStatus, MobilizonEvent
|
||||
from mobilizon_reshare.config.command import CommandConfig
|
||||
from mobilizon_reshare.dataclasses import EventPublicationStatus
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent
|
||||
from mobilizon_reshare.dataclasses.event import get_mobilizon_events_with_status
|
||||
from mobilizon_reshare.dataclasses.publication import RecapPublication
|
||||
from mobilizon_reshare.publishers import get_active_publishers
|
||||
from mobilizon_reshare.publishers.abstract import RecapPublication
|
||||
from mobilizon_reshare.publishers.coordinator import (
|
||||
RecapCoordinator,
|
||||
from mobilizon_reshare.publishers.coordinators import BaseCoordinatorReport
|
||||
from mobilizon_reshare.publishers.coordinators.event_publishing.notify import (
|
||||
PublicationFailureNotifiersCoordinator,
|
||||
BaseCoordinatorReport,
|
||||
)
|
||||
from mobilizon_reshare.publishers.coordinators.recap_publishing.dry_run import (
|
||||
DryRunRecapCoordinator,
|
||||
)
|
||||
from mobilizon_reshare.publishers.coordinators.recap_publishing.recap import (
|
||||
RecapCoordinator,
|
||||
)
|
||||
from mobilizon_reshare.publishers.platforms.platform_mapping import (
|
||||
get_publisher_class,
|
||||
get_formatter_class,
|
||||
)
|
||||
from mobilizon_reshare.storage.query.read import events_with_status
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def select_events_to_recap() -> List[MobilizonEvent]:
|
||||
return list(
|
||||
await events_with_status(
|
||||
await get_mobilizon_events_with_status(
|
||||
status=[EventPublicationStatus.COMPLETED], from_date=now()
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def recap() -> Optional[BaseCoordinatorReport]:
|
||||
# I want to recap only the events that have been succesfully published and that haven't happened yet
|
||||
async def recap(command_config: CommandConfig) -> Optional[BaseCoordinatorReport]:
|
||||
# I want to recap only the events that have been successfully published and that haven't happened yet
|
||||
events_to_recap = await select_events_to_recap()
|
||||
|
||||
if events_to_recap:
|
||||
|
@ -42,7 +49,10 @@ async def recap() -> Optional[BaseCoordinatorReport]:
|
|||
)
|
||||
for publisher in get_active_publishers()
|
||||
]
|
||||
reports = RecapCoordinator(recap_publications).run()
|
||||
if command_config.dry_run:
|
||||
reports = DryRunRecapCoordinator(recap_publications).run()
|
||||
else:
|
||||
reports = RecapCoordinator(recap_publications).run()
|
||||
|
||||
for report in reports.reports:
|
||||
if report.status == EventPublicationStatus.FAILED:
|
||||
|
|
|
@ -1,57 +1,52 @@
|
|||
import logging
|
||||
from typing import Optional
|
||||
from uuid import UUID
|
||||
|
||||
from mobilizon_reshare.publishers.coordinator import (
|
||||
PublisherCoordinator,
|
||||
PublicationFailureNotifiersCoordinator,
|
||||
from tortoise.exceptions import DoesNotExist
|
||||
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent, EventPublication
|
||||
from mobilizon_reshare.dataclasses.publication import get_failed_publications_for_event
|
||||
from mobilizon_reshare.main.publish import publish_publications
|
||||
from mobilizon_reshare.publishers.coordinators.event_publishing.publish import (
|
||||
PublisherCoordinatorReport,
|
||||
)
|
||||
from mobilizon_reshare.storage.query.exceptions import EventNotFound
|
||||
from mobilizon_reshare.storage.query.read import (
|
||||
get_failed_publications_for_event,
|
||||
get_publication,
|
||||
)
|
||||
from mobilizon_reshare.storage.query.write import save_publication_report
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def retry_event_publications(event_id):
|
||||
|
||||
failed_publications = await get_failed_publications_for_event(event_id)
|
||||
async def retry_event_publications(event_id) -> Optional[PublisherCoordinatorReport]:
|
||||
event = await MobilizonEvent.retrieve(event_id)
|
||||
failed_publications = await get_failed_publications_for_event(event)
|
||||
if not failed_publications:
|
||||
logger.info("No failed publications found.")
|
||||
return
|
||||
|
||||
logger.info(f"Found {len(failed_publications)} publications.")
|
||||
return PublisherCoordinator(failed_publications).run()
|
||||
return await publish_publications(failed_publications)
|
||||
|
||||
|
||||
async def retry_publication(publication_id):
|
||||
# TODO test this function
|
||||
publication = await get_publication(publication_id)
|
||||
if not publication:
|
||||
async def retry_publication(publication_id) -> Optional[PublisherCoordinatorReport]:
|
||||
try:
|
||||
publication = await EventPublication.retrieve(publication_id)
|
||||
except DoesNotExist:
|
||||
logger.info(f"Publication {publication_id} not found.")
|
||||
return
|
||||
|
||||
logger.info(f"Publication {publication_id} found.")
|
||||
return PublisherCoordinator([publication]).run()
|
||||
return await publish_publications([publication])
|
||||
|
||||
|
||||
async def retry(mobilizon_event_id: UUID = None):
|
||||
async def retry_event(
|
||||
mobilizon_event_id: UUID = None,
|
||||
) -> Optional[PublisherCoordinatorReport]:
|
||||
if mobilizon_event_id is None:
|
||||
raise NotImplementedError(
|
||||
"Autonomous retry not implemented yet, please specify an event_id"
|
||||
)
|
||||
try:
|
||||
reports = await retry_event_publications(mobilizon_event_id)
|
||||
return await retry_event_publications(mobilizon_event_id)
|
||||
except EventNotFound as e:
|
||||
logger.debug(e, exc_info=True)
|
||||
logger.error(f"Event with id {mobilizon_event_id} not found")
|
||||
return
|
||||
|
||||
if not reports:
|
||||
return
|
||||
await save_publication_report(reports)
|
||||
for report in reports.reports:
|
||||
if not report.succesful:
|
||||
PublicationFailureNotifiersCoordinator(report,).notify_failure()
|
||||
|
|
|
@ -1,51 +1,19 @@
|
|||
import logging.config
|
||||
|
||||
from mobilizon_reshare.event.event_selection_strategies import select_event_to_publish
|
||||
from mobilizon_reshare.mobilizon.events import get_mobilizon_future_events
|
||||
from mobilizon_reshare.publishers.coordinator import (
|
||||
PublicationFailureNotifiersCoordinator,
|
||||
)
|
||||
from mobilizon_reshare.publishers.coordinator import PublisherCoordinator
|
||||
from mobilizon_reshare.storage.query.read import (
|
||||
get_published_events,
|
||||
build_publications,
|
||||
)
|
||||
from mobilizon_reshare.storage.query.write import (
|
||||
create_unpublished_events,
|
||||
save_publication_report,
|
||||
from mobilizon_reshare.config.command import CommandConfig
|
||||
from mobilizon_reshare.main.publish import select_and_publish
|
||||
from mobilizon_reshare.main.pull import pull
|
||||
from mobilizon_reshare.publishers.coordinators.event_publishing.publish import (
|
||||
PublisherCoordinatorReport,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def start():
|
||||
async def start(command_config: CommandConfig) -> PublisherCoordinatorReport:
|
||||
"""
|
||||
STUB
|
||||
:return:
|
||||
"""
|
||||
|
||||
# Pull future events from Mobilizon
|
||||
future_events = get_mobilizon_future_events()
|
||||
# Store in the DB only the ones we didn't know about
|
||||
events_without_publications = await create_unpublished_events(future_events)
|
||||
event = select_event_to_publish(
|
||||
list(await get_published_events()),
|
||||
# We must load unpublished events from DB since it contains
|
||||
# merged state between Mobilizon and previous WAITING events.
|
||||
events_without_publications,
|
||||
)
|
||||
|
||||
if event:
|
||||
logger.info(f"Event to publish found: {event.name}")
|
||||
|
||||
publications = await build_publications(event)
|
||||
reports = PublisherCoordinator(publications).run()
|
||||
|
||||
await save_publication_report(reports)
|
||||
for report in reports.reports:
|
||||
if not report.succesful:
|
||||
PublicationFailureNotifiersCoordinator(
|
||||
report,
|
||||
).notify_failure()
|
||||
else:
|
||||
logger.info("No event to publish found")
|
||||
events = await pull()
|
||||
return await select_and_publish(command_config, events,)
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
-- upgrade --
|
||||
CREATE TABLE IF NOT EXISTS "event" (
|
||||
"id" UUID NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"description" TEXT,
|
||||
"mobilizon_id" UUID NOT NULL,
|
||||
"mobilizon_link" TEXT NOT NULL,
|
||||
"thumbnail_link" TEXT,
|
||||
"location" TEXT,
|
||||
"begin_datetime" TIMESTAMPTZ NOT NULL,
|
||||
"end_datetime" TIMESTAMPTZ NOT NULL,
|
||||
"last_update_time" TIMESTAMPTZ NOT NULL
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS "publisher" (
|
||||
"id" UUID NOT NULL PRIMARY KEY,
|
||||
"name" VARCHAR(256) NOT NULL,
|
||||
"account_ref" TEXT
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS "publication" (
|
||||
"id" UUID NOT NULL PRIMARY KEY,
|
||||
"status" SMALLINT NOT NULL,
|
||||
"timestamp" TIMESTAMPTZ NOT NULL,
|
||||
"reason" TEXT,
|
||||
"event_id" UUID NOT NULL REFERENCES "event" ("id") ON DELETE CASCADE,
|
||||
"publisher_id" UUID NOT NULL REFERENCES "publisher" ("id") ON DELETE CASCADE
|
||||
);
|
||||
COMMENT ON COLUMN "publication"."status" IS 'FAILED: 0\nCOMPLETED: 1';
|
||||
CREATE TABLE IF NOT EXISTS "notification" (
|
||||
"id" UUID NOT NULL PRIMARY KEY,
|
||||
"status" SMALLINT NOT NULL,
|
||||
"message" TEXT NOT NULL,
|
||||
"publication_id" UUID REFERENCES "publication" ("id") ON DELETE CASCADE,
|
||||
"target_id" UUID REFERENCES "publisher" ("id") ON DELETE CASCADE
|
||||
);
|
||||
COMMENT ON COLUMN "notification"."status" IS 'WAITING: 1\nFAILED: 2\nPARTIAL: 3\nCOMPLETED: 4';
|
||||
CREATE TABLE IF NOT EXISTS "aerich" (
|
||||
"id" SERIAL NOT NULL PRIMARY KEY,
|
||||
"version" VARCHAR(255) NOT NULL,
|
||||
"app" VARCHAR(100) NOT NULL,
|
||||
"content" JSONB NOT NULL
|
||||
);
|
|
@ -0,0 +1,4 @@
|
|||
[tool.aerich]
|
||||
tortoise_orm = "mobilizon_reshare.storage.db.TORTOISE_ORM"
|
||||
location = "./"
|
||||
src_folder = "./."
|
|
@ -0,0 +1,4 @@
|
|||
[tool.aerich]
|
||||
tortoise_orm = "mobilizon_reshare.storage.db.TORTOISE_ORM"
|
||||
location = "."
|
||||
src_folder = "./."
|
|
@ -8,7 +8,7 @@ import arrow
|
|||
import requests
|
||||
|
||||
from mobilizon_reshare.config.config import get_settings
|
||||
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent, _EventPublicationStatus
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -24,8 +24,8 @@ def parse_location(data):
|
|||
return f"{addr['description']}, {addr['locality']}, {addr['region']}"
|
||||
elif "onlineAddress" in data and data["onlineAddress"]:
|
||||
return data["onlineAddress"]
|
||||
else:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def parse_picture(data):
|
||||
|
@ -43,7 +43,7 @@ def parse_event(data):
|
|||
thumbnail_link=parse_picture(data),
|
||||
location=parse_location(data),
|
||||
publication_time=None,
|
||||
status=EventPublicationStatus.WAITING,
|
||||
status=_EventPublicationStatus.WAITING,
|
||||
last_update_time=arrow.get(data["updatedAt"]) if "updatedAt" in data else None,
|
||||
)
|
||||
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
from tortoise.contrib.pydantic import pydantic_model_creator
|
||||
|
||||
|
||||
class WithPydantic:
|
||||
@classmethod
|
||||
def to_pydantic(cls):
|
||||
return pydantic_model_creator(cls)
|
|
@ -1,11 +1,15 @@
|
|||
from typing import Iterator
|
||||
|
||||
from tortoise import fields
|
||||
from tortoise.models import Model
|
||||
from tortoise.transactions import atomic
|
||||
|
||||
from mobilizon_reshare.models import WithPydantic
|
||||
from mobilizon_reshare.models.publication import PublicationStatus, Publication
|
||||
from mobilizon_reshare.models.publisher import Publisher
|
||||
|
||||
|
||||
class Event(Model):
|
||||
class Event(Model, WithPydantic):
|
||||
id = fields.UUIDField(pk=True)
|
||||
name = fields.TextField()
|
||||
description = fields.TextField(null=True)
|
||||
|
@ -23,7 +27,7 @@ class Event(Model):
|
|||
publications: fields.ReverseRelation["Publication"]
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
return str(self.name)
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.id} - {self.name}"
|
||||
|
@ -41,3 +45,17 @@ class Event(Model):
|
|||
publisher_id=publisher.id,
|
||||
publisher=publisher,
|
||||
)
|
||||
|
||||
async def build_publications(self, publishers: Iterator[str]):
|
||||
return [
|
||||
await self.build_publication_by_publisher_name(name) for name in publishers
|
||||
]
|
||||
|
||||
@atomic()
|
||||
async def get_failed_publications(self,) -> list[Publication]:
|
||||
return list(
|
||||
filter(
|
||||
lambda publications: publications.status == PublicationStatus.FAILED,
|
||||
self.publications,
|
||||
)
|
||||
)
|
||||
|
|
|
@ -5,10 +5,8 @@ from tortoise.models import Model
|
|||
|
||||
|
||||
class NotificationStatus(IntEnum):
|
||||
WAITING = 1
|
||||
FAILED = 2
|
||||
PARTIAL = 3
|
||||
COMPLETED = 4
|
||||
FAILED = 0
|
||||
COMPLETED = 1
|
||||
|
||||
|
||||
class Notification(Model):
|
||||
|
@ -17,9 +15,7 @@ class Notification(Model):
|
|||
|
||||
message = fields.TextField()
|
||||
|
||||
target = fields.ForeignKeyField(
|
||||
"models.Publisher", related_name="notifications", null=True
|
||||
)
|
||||
target = fields.ForeignKeyField("models.Publisher", null=True, related_name=False,)
|
||||
|
||||
publication = fields.ForeignKeyField(
|
||||
"models.Publication", related_name="notifications", null=True
|
||||
|
|
|
@ -3,13 +3,15 @@ from enum import IntEnum
|
|||
from tortoise import fields
|
||||
from tortoise.models import Model
|
||||
|
||||
from mobilizon_reshare.models import WithPydantic
|
||||
|
||||
|
||||
class PublicationStatus(IntEnum):
|
||||
FAILED = 0
|
||||
COMPLETED = 1
|
||||
|
||||
|
||||
class Publication(Model):
|
||||
class Publication(Model, WithPydantic):
|
||||
id = fields.UUIDField(pk=True)
|
||||
status = fields.IntEnumField(PublicationStatus)
|
||||
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
from typing import Iterator
|
||||
|
||||
import mobilizon_reshare.config.notifiers
|
||||
import mobilizon_reshare.config.publishers
|
||||
from mobilizon_reshare.config.config import get_settings
|
||||
|
||||
|
||||
def get_active_publishers():
|
||||
def get_active_publishers() -> Iterator[str]:
|
||||
return mobilizon_reshare.config.publishers.get_active_publishers(get_settings())
|
||||
|
||||
|
||||
def get_active_notifiers():
|
||||
def get_active_notifiers() -> Iterator[str]:
|
||||
return mobilizon_reshare.config.notifiers.get_active_notifiers(get_settings())
|
||||
|
|
|
@ -1,17 +1,15 @@
|
|||
import importlib
|
||||
import inspect
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
from typing import Optional
|
||||
|
||||
from dynaconf.utils.boxing import DynaBox
|
||||
from jinja2 import Environment, FileSystemLoader, Template
|
||||
|
||||
from mobilizon_reshare.config.config import get_settings
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.models.publication import Publication as PublicationModel
|
||||
from .exceptions import InvalidAttribute
|
||||
from ..dataclasses import _MobilizonEvent
|
||||
|
||||
JINJA_ENV = Environment(loader=FileSystemLoader("/"))
|
||||
|
||||
|
@ -84,10 +82,10 @@ class AbstractPlatform(ABC, LoggerMixin, ConfLoaderMixin):
|
|||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _send(self, message: str, event: Optional[MobilizonEvent] = None):
|
||||
def _send(self, message: str, event: Optional[_MobilizonEvent] = None):
|
||||
raise NotImplementedError # pragma: no cover
|
||||
|
||||
def send(self, message: str, event: Optional[MobilizonEvent] = None):
|
||||
def send(self, message: str, event: Optional[_MobilizonEvent] = None):
|
||||
"""
|
||||
Sends a message to the target channel
|
||||
"""
|
||||
|
@ -110,7 +108,7 @@ class AbstractPlatform(ABC, LoggerMixin, ConfLoaderMixin):
|
|||
|
||||
class AbstractEventFormatter(LoggerMixin, ConfLoaderMixin):
|
||||
@abstractmethod
|
||||
def _validate_event(self, event: MobilizonEvent) -> None:
|
||||
def _validate_event(self, event: _MobilizonEvent) -> None:
|
||||
"""
|
||||
Validates publisher's event.
|
||||
Should raise ``PublisherError`` (or one of its subclasses) if event
|
||||
|
@ -127,17 +125,45 @@ class AbstractEventFormatter(LoggerMixin, ConfLoaderMixin):
|
|||
"""
|
||||
raise NotImplementedError # pragma: no cover
|
||||
|
||||
def validate_event(self, event: MobilizonEvent) -> None:
|
||||
def _get_name(self) -> str:
|
||||
return self._conf[1]
|
||||
|
||||
|
||||
def _get_template(self, configured_template, default_generator) -> Template:
|
||||
if configured_template:
|
||||
return JINJA_ENV.get_template(configured_template)
|
||||
else:
|
||||
template_ref = default_generator()
|
||||
with importlib.resources.as_file(template_ref) as template_path:
|
||||
return JINJA_ENV.get_template(template_path.as_posix())
|
||||
|
||||
|
||||
def get_default_template_path(self, type=""):
|
||||
return importlib.resources.files(
|
||||
"mobilizon_reshare.publishers.templates"
|
||||
) / f"{self._get_name()}{type}.tmpl.j2"
|
||||
|
||||
|
||||
def get_default_recap_template_path(self):
|
||||
return self.get_default_template_path(type="_recap")
|
||||
|
||||
|
||||
def get_default_recap_header_template_path(self):
|
||||
return self.get_default_template_path(type="_recap_header")
|
||||
|
||||
|
||||
def validate_event(self, event: _MobilizonEvent) -> None:
|
||||
self._validate_event(event)
|
||||
self._validate_message(self.get_message_from_event(event))
|
||||
|
||||
@abstractmethod
|
||||
def _preprocess_event(self, event):
|
||||
"""
|
||||
Allows publishers to preprocess events before feeding them to the template
|
||||
"""
|
||||
return event
|
||||
|
||||
def get_message_from_event(self, event: MobilizonEvent) -> str:
|
||||
def get_message_from_event(self, event: _MobilizonEvent) -> str:
|
||||
"""
|
||||
Retrieves a message from the event itself.
|
||||
"""
|
||||
|
@ -150,23 +176,22 @@ class AbstractEventFormatter(LoggerMixin, ConfLoaderMixin):
|
|||
"""
|
||||
Retrieves publisher's message template.
|
||||
"""
|
||||
template_path = self.conf.msg_template_path or self.default_template_path
|
||||
return JINJA_ENV.get_template(template_path)
|
||||
return self._get_template(self.conf.msg_template_path, self.get_default_template_path)
|
||||
|
||||
def get_recap_header(self):
|
||||
template_path = (
|
||||
self.conf.recap_header_template_path
|
||||
or self.default_recap_header_template_path
|
||||
def get_recap_header(self) -> Template:
|
||||
return self._get_template(
|
||||
self.conf.recap_header_template_path,
|
||||
self.get_default_recap_header_template_path
|
||||
)
|
||||
return JINJA_ENV.get_template(template_path).render()
|
||||
|
||||
|
||||
def get_recap_fragment_template(self) -> Template:
|
||||
template_path = (
|
||||
self.conf.recap_template_path or self.default_recap_template_path
|
||||
return self._get_template(
|
||||
self.conf.recap_template_path,
|
||||
self.get_default_recap_template_path
|
||||
)
|
||||
return JINJA_ENV.get_template(template_path)
|
||||
|
||||
def get_recap_fragment(self, event: MobilizonEvent) -> str:
|
||||
def get_recap_fragment(self, event: _MobilizonEvent) -> str:
|
||||
"""
|
||||
Retrieves the fragment that describes a single event inside the event recap.
|
||||
"""
|
||||
|
@ -175,32 +200,3 @@ class AbstractEventFormatter(LoggerMixin, ConfLoaderMixin):
|
|||
|
||||
def _preprocess_message(self, message: str):
|
||||
return message
|
||||
|
||||
|
||||
@dataclass
|
||||
class BasePublication:
|
||||
publisher: AbstractPlatform
|
||||
formatter: AbstractEventFormatter
|
||||
|
||||
|
||||
@dataclass
|
||||
class EventPublication(BasePublication):
|
||||
event: MobilizonEvent
|
||||
id: UUID
|
||||
|
||||
@classmethod
|
||||
def from_orm(cls, model: PublicationModel, event: MobilizonEvent):
|
||||
# imported here to avoid circular dependencies
|
||||
from mobilizon_reshare.publishers.platforms.platform_mapping import (
|
||||
get_publisher_class,
|
||||
get_formatter_class,
|
||||
)
|
||||
|
||||
publisher = get_publisher_class(model.publisher.name)()
|
||||
formatter = get_formatter_class(model.publisher.name)()
|
||||
return cls(publisher, formatter, event, model.id,)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RecapPublication(BasePublication):
|
||||
events: List[MobilizonEvent]
|
||||
|
|
|
@ -1,209 +0,0 @@
|
|||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
|
||||
from mobilizon_reshare.models.publication import PublicationStatus
|
||||
from mobilizon_reshare.publishers import get_active_notifiers
|
||||
from mobilizon_reshare.publishers.abstract import (
|
||||
EventPublication,
|
||||
AbstractPlatform,
|
||||
RecapPublication,
|
||||
)
|
||||
from mobilizon_reshare.publishers.exceptions import PublisherError
|
||||
from mobilizon_reshare.publishers.platforms.platform_mapping import get_notifier_class
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BasePublicationReport:
|
||||
status: PublicationStatus
|
||||
reason: Optional[str]
|
||||
|
||||
@property
|
||||
def succesful(self):
|
||||
return self.status == PublicationStatus.COMPLETED
|
||||
|
||||
def get_failure_message(self):
|
||||
|
||||
return (
|
||||
f"Publication failed with status: {self.status}.\n" f"Reason: {self.reason}"
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EventPublicationReport(BasePublicationReport):
|
||||
publication: EventPublication
|
||||
|
||||
def get_failure_message(self):
|
||||
|
||||
if not self.reason:
|
||||
logger.error("Report of failure without reason.", exc_info=True)
|
||||
|
||||
return (
|
||||
f"Publication {self.publication.id} failed with status: {self.status}.\n"
|
||||
f"Reason: {self.reason}\n"
|
||||
f"Publisher: {self.publication.publisher.name}\n"
|
||||
f"Event: {self.publication.event.name}"
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BaseCoordinatorReport:
|
||||
reports: List[BasePublicationReport]
|
||||
|
||||
@property
|
||||
def successful(self):
|
||||
return all(r.status == PublicationStatus.COMPLETED for r in self.reports)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PublisherCoordinatorReport(BaseCoordinatorReport):
|
||||
|
||||
reports: List[EventPublicationReport]
|
||||
publications: List[EventPublication]
|
||||
|
||||
|
||||
class PublisherCoordinator:
|
||||
def __init__(self, publications: List[EventPublication]):
|
||||
self.publications = publications
|
||||
|
||||
def run(self) -> PublisherCoordinatorReport:
|
||||
errors = self._validate()
|
||||
if errors:
|
||||
return PublisherCoordinatorReport(
|
||||
reports=errors, publications=self.publications
|
||||
)
|
||||
|
||||
return self._post()
|
||||
|
||||
def _post(self):
|
||||
reports = []
|
||||
|
||||
for publication in self.publications:
|
||||
|
||||
try:
|
||||
logger.info(f"Publishing to {publication.publisher.name}")
|
||||
message = publication.formatter.get_message_from_event(
|
||||
publication.event
|
||||
)
|
||||
publication.publisher.send(message, publication.event)
|
||||
reports.append(
|
||||
EventPublicationReport(
|
||||
status=PublicationStatus.COMPLETED,
|
||||
publication=publication,
|
||||
reason=None,
|
||||
)
|
||||
)
|
||||
except PublisherError as e:
|
||||
logger.error(str(e))
|
||||
reports.append(
|
||||
EventPublicationReport(
|
||||
status=PublicationStatus.FAILED,
|
||||
reason=str(e),
|
||||
publication=publication,
|
||||
)
|
||||
)
|
||||
|
||||
return PublisherCoordinatorReport(
|
||||
publications=self.publications, reports=reports
|
||||
)
|
||||
|
||||
def _safe_run(self, reasons, f, *args, **kwargs):
|
||||
try:
|
||||
f(*args, **kwargs)
|
||||
return reasons
|
||||
except Exception as e:
|
||||
return reasons + [str(e)]
|
||||
|
||||
def _validate(self):
|
||||
errors = []
|
||||
|
||||
for publication in self.publications:
|
||||
reasons = []
|
||||
reasons = self._safe_run(
|
||||
reasons,
|
||||
publication.publisher.validate_credentials,
|
||||
)
|
||||
reasons = self._safe_run(
|
||||
reasons, publication.formatter.validate_event, publication.event
|
||||
)
|
||||
|
||||
if len(reasons) > 0:
|
||||
errors.append(
|
||||
EventPublicationReport(
|
||||
status=PublicationStatus.FAILED,
|
||||
reason=", ".join(reasons),
|
||||
publication=publication,
|
||||
)
|
||||
)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
class AbstractCoordinator:
|
||||
def __init__(self, message: str, platforms: List[AbstractPlatform] = None):
|
||||
self.message = message
|
||||
self.platforms = platforms
|
||||
|
||||
def send_to_all(self):
|
||||
for platform in self.platforms:
|
||||
try:
|
||||
platform.send(self.message)
|
||||
except Exception as e:
|
||||
logger.critical(f"Notifier failed to send message:\n{self.message}")
|
||||
logger.exception(e)
|
||||
|
||||
|
||||
class AbstractNotifiersCoordinator(AbstractCoordinator):
|
||||
def __init__(self, message: str, notifiers: List[AbstractPlatform] = None):
|
||||
platforms = notifiers or [
|
||||
get_notifier_class(notifier)() for notifier in get_active_notifiers()
|
||||
]
|
||||
super(AbstractNotifiersCoordinator, self).__init__(message, platforms)
|
||||
|
||||
|
||||
class PublicationFailureNotifiersCoordinator(AbstractNotifiersCoordinator):
|
||||
def __init__(self, report: BasePublicationReport, platforms=None):
|
||||
self.report = report
|
||||
super(PublicationFailureNotifiersCoordinator, self).__init__(
|
||||
message=report.get_failure_message(), notifiers=platforms
|
||||
)
|
||||
|
||||
def notify_failure(self):
|
||||
logger.info("Sending failure notifications")
|
||||
if self.report.status == PublicationStatus.FAILED:
|
||||
self.send_to_all()
|
||||
|
||||
|
||||
class RecapCoordinator:
|
||||
def __init__(self, recap_publications: List[RecapPublication]):
|
||||
self.recap_publications = recap_publications
|
||||
|
||||
def run(self) -> BaseCoordinatorReport:
|
||||
reports = []
|
||||
for recap_publication in self.recap_publications:
|
||||
try:
|
||||
|
||||
fragments = [recap_publication.formatter.get_recap_header()]
|
||||
for event in recap_publication.events:
|
||||
fragments.append(
|
||||
recap_publication.formatter.get_recap_fragment(event)
|
||||
)
|
||||
message = "\n\n".join(fragments)
|
||||
recap_publication.publisher.send(message)
|
||||
reports.append(
|
||||
BasePublicationReport(
|
||||
status=PublicationStatus.COMPLETED,
|
||||
reason=None,
|
||||
)
|
||||
)
|
||||
except PublisherError as e:
|
||||
reports.append(
|
||||
BasePublicationReport(
|
||||
status=PublicationStatus.FAILED,
|
||||
reason=str(e),
|
||||
)
|
||||
)
|
||||
|
||||
return BaseCoordinatorReport(reports=reports)
|
|
@ -0,0 +1,32 @@
|
|||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Sequence
|
||||
|
||||
from mobilizon_reshare.models.publication import PublicationStatus
|
||||
|
||||
|
||||
@dataclass
|
||||
class BasePublicationReport:
|
||||
status: PublicationStatus
|
||||
reason: Optional[str]
|
||||
|
||||
@property
|
||||
def successful(self):
|
||||
return self.status == PublicationStatus.COMPLETED
|
||||
|
||||
def get_failure_message(self):
|
||||
return (
|
||||
f"Publication failed with status: {self.status.name}.\n" f"Reason: {self.reason}"
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BaseCoordinatorReport:
|
||||
reports: Sequence[BasePublicationReport]
|
||||
|
||||
@property
|
||||
def successful(self):
|
||||
return all(r.successful for r in self.reports)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
|
@ -0,0 +1,66 @@
|
|||
import dataclasses
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
from mobilizon_reshare.dataclasses.publication import _EventPublication
|
||||
from mobilizon_reshare.models.publication import PublicationStatus
|
||||
from mobilizon_reshare.publishers.coordinators import BasePublicationReport
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EventPublicationReport(BasePublicationReport):
|
||||
publication: _EventPublication
|
||||
published_content: Optional[str] = dataclasses.field(default=None)
|
||||
|
||||
def get_failure_message(self):
|
||||
if not self.reason:
|
||||
logger.error("Report of failure without reason.", exc_info=True)
|
||||
|
||||
return (
|
||||
f"Publication {self.publication.id} failed with status: {self.status.name}.\n"
|
||||
f"Reason: {self.reason}\n"
|
||||
f"Publisher: {self.publication.publisher.name}\n"
|
||||
f"Event: {self.publication.event.name}"
|
||||
)
|
||||
|
||||
|
||||
class BaseEventPublishingCoordinator:
|
||||
def __init__(self, publications: List[_EventPublication]):
|
||||
self.publications = publications
|
||||
|
||||
def _safe_run(self, reasons, f, *args, **kwargs):
|
||||
try:
|
||||
f(*args, **kwargs)
|
||||
return reasons
|
||||
except Exception as e:
|
||||
return reasons + [str(e)]
|
||||
|
||||
def _validate(self) -> List[EventPublicationReport]:
|
||||
errors = []
|
||||
|
||||
for publication in self.publications:
|
||||
reasons = []
|
||||
reasons = self._safe_run(
|
||||
reasons, publication.publisher.validate_credentials,
|
||||
)
|
||||
reasons = self._safe_run(
|
||||
reasons, publication.formatter.validate_event, publication.event
|
||||
)
|
||||
|
||||
if len(reasons) > 0:
|
||||
errors.append(
|
||||
EventPublicationReport(
|
||||
status=PublicationStatus.FAILED,
|
||||
reason=", ".join(reasons),
|
||||
publication=publication,
|
||||
)
|
||||
)
|
||||
|
||||
return errors
|
||||
|
||||
def _filter_publications(self, errors: Sequence[EventPublicationReport]) -> List[_EventPublication]:
|
||||
publishers_with_errors = set(e.publication.publisher for e in errors)
|
||||
return [p for p in self.publications if p.publisher not in publishers_with_errors]
|
|
@ -0,0 +1,36 @@
|
|||
import logging
|
||||
from typing import List, Sequence
|
||||
|
||||
from mobilizon_reshare.dataclasses import _EventPublication
|
||||
from mobilizon_reshare.models.publication import PublicationStatus
|
||||
from mobilizon_reshare.publishers.coordinators.event_publishing.publish import (
|
||||
PublisherCoordinator,
|
||||
EventPublicationReport,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DryRunPublisherCoordinator(PublisherCoordinator):
|
||||
"""
|
||||
Coordinator to perform a dry-run on the event publication
|
||||
"""
|
||||
|
||||
def _publish(self, publications: Sequence[_EventPublication]) -> List[EventPublicationReport]:
|
||||
reports = [
|
||||
EventPublicationReport(
|
||||
status=PublicationStatus.COMPLETED,
|
||||
publication=publication,
|
||||
reason=None,
|
||||
published_content=publication.formatter.get_message_from_event(
|
||||
publication.event
|
||||
),
|
||||
)
|
||||
for publication in publications
|
||||
]
|
||||
logger.info("The following events would be published:")
|
||||
for r in reports:
|
||||
event_name = r.publication.event.name
|
||||
publisher_name = r.publication.publisher.name
|
||||
logger.info(f"{event_name} → {publisher_name}")
|
||||
return reports
|
|
@ -0,0 +1,126 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
from mobilizon_reshare.dataclasses import PublicationNotification, EventPublication
|
||||
from mobilizon_reshare.models.notification import NotificationStatus
|
||||
from mobilizon_reshare.models.publication import PublicationStatus
|
||||
from mobilizon_reshare.publishers import get_active_notifiers
|
||||
from mobilizon_reshare.publishers.abstract import (
|
||||
AbstractPlatform,
|
||||
)
|
||||
from mobilizon_reshare.publishers.coordinators import (
|
||||
logger,
|
||||
BasePublicationReport,
|
||||
BaseCoordinatorReport,
|
||||
)
|
||||
from mobilizon_reshare.publishers.coordinators.event_publishing import (
|
||||
EventPublicationReport,
|
||||
)
|
||||
from mobilizon_reshare.publishers.platforms.platform_mapping import (
|
||||
get_notifier_class,
|
||||
get_formatter_class,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PublicationNotificationReport(BasePublicationReport):
|
||||
status: NotificationStatus
|
||||
notification: PublicationNotification
|
||||
|
||||
@property
|
||||
def successful(self):
|
||||
return self.status == NotificationStatus.COMPLETED
|
||||
|
||||
def get_failure_message(self):
|
||||
if not self.reason:
|
||||
logger.error("Report of failure without reason.", exc_info=True)
|
||||
return (
|
||||
f"Failed with status: {self.status.name}.\n"
|
||||
f"Reason: {self.reason}\n"
|
||||
f"Publisher: {self.notification.publisher.name}\n"
|
||||
f"Publication: {self.notification.publication.id}"
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class NotifierCoordinatorReport(BaseCoordinatorReport):
|
||||
reports: Sequence[PublicationNotificationReport]
|
||||
notifications: Sequence[PublicationNotification] = field(default_factory=list)
|
||||
|
||||
|
||||
class Sender:
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
publication: EventPublication,
|
||||
platforms: List[AbstractPlatform] = None,
|
||||
):
|
||||
self.message = message
|
||||
self.platforms = platforms
|
||||
self.publication = publication
|
||||
|
||||
def send_to_all(self) -> NotifierCoordinatorReport:
|
||||
reports = []
|
||||
notifications = []
|
||||
for platform in self.platforms:
|
||||
notification = PublicationNotification(
|
||||
platform, get_formatter_class(platform.name)(), self.publication
|
||||
)
|
||||
try:
|
||||
platform.send(self.message)
|
||||
report = PublicationNotificationReport(
|
||||
NotificationStatus.COMPLETED, self.message, notification
|
||||
)
|
||||
except Exception as e:
|
||||
msg = f"[{platform.name}] Failed to notify failure of message:\n{self.message}"
|
||||
logger.critical(msg)
|
||||
logger.exception(e)
|
||||
report = PublicationNotificationReport(
|
||||
NotificationStatus.FAILED, msg, notification
|
||||
)
|
||||
notifications.append(notification)
|
||||
reports.append(report)
|
||||
return NotifierCoordinatorReport(reports=reports, notifications=notifications)
|
||||
|
||||
|
||||
class AbstractNotifiersCoordinator(ABC):
|
||||
def __init__(
|
||||
self, report: BasePublicationReport, notifiers: List[AbstractPlatform] = None
|
||||
):
|
||||
self.platforms = notifiers or [
|
||||
get_notifier_class(notifier)() for notifier in get_active_notifiers()
|
||||
]
|
||||
self.report = report
|
||||
|
||||
@abstractmethod
|
||||
def notify_failure(self):
|
||||
pass
|
||||
|
||||
|
||||
class PublicationFailureNotifiersCoordinator(AbstractNotifiersCoordinator):
|
||||
"""
|
||||
Sends a notification of a failure report to the active platforms
|
||||
"""
|
||||
|
||||
report: EventPublicationReport
|
||||
platforms: List[AbstractPlatform]
|
||||
|
||||
def notify_failure(self) -> Optional[NotifierCoordinatorReport]:
|
||||
logger.info("Sending failure notifications")
|
||||
if self.report.status == PublicationStatus.FAILED:
|
||||
return Sender(
|
||||
self.report.get_failure_message(),
|
||||
self.report.publication,
|
||||
self.platforms,
|
||||
).send_to_all()
|
||||
|
||||
|
||||
class PublicationFailureLoggerCoordinator(PublicationFailureNotifiersCoordinator):
|
||||
"""
|
||||
Logs a report to console
|
||||
"""
|
||||
|
||||
def notify_failure(self):
|
||||
if self.report.status == PublicationStatus.FAILED:
|
||||
logger.error(self.report.get_failure_message())
|
|
@ -0,0 +1,86 @@
|
|||
import dataclasses
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import Sequence, List
|
||||
|
||||
from mobilizon_reshare.dataclasses.publication import _EventPublication
|
||||
from mobilizon_reshare.models.publication import PublicationStatus
|
||||
from mobilizon_reshare.publishers.coordinators import BaseCoordinatorReport
|
||||
from mobilizon_reshare.publishers.coordinators.event_publishing import (
|
||||
BaseEventPublishingCoordinator,
|
||||
EventPublicationReport,
|
||||
)
|
||||
from mobilizon_reshare.publishers.exceptions import PublisherError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PublisherCoordinatorReport(BaseCoordinatorReport):
|
||||
reports: Sequence[EventPublicationReport]
|
||||
publications: Sequence[_EventPublication] = dataclasses.field(default_factory=list)
|
||||
|
||||
def __str__(self):
|
||||
platform_messages = []
|
||||
for report in self.reports:
|
||||
intro = f"Message for: {report.publication.publisher.name}"
|
||||
platform_messages.append(
|
||||
f"""{intro}
|
||||
{"*" * len(intro)}
|
||||
{report.published_content}
|
||||
{"-" * 80}"""
|
||||
)
|
||||
return "\n".join(platform_messages)
|
||||
|
||||
|
||||
class PublisherCoordinator(BaseEventPublishingCoordinator):
|
||||
"""
|
||||
Coordinator to publish an event on every active platform
|
||||
"""
|
||||
|
||||
def run(self) -> PublisherCoordinatorReport:
|
||||
validation_reports = self._validate()
|
||||
valid_publications = self._filter_publications(validation_reports)
|
||||
publishing_reports = self._publish(valid_publications)
|
||||
return PublisherCoordinatorReport(
|
||||
publications=self.publications,
|
||||
reports=validation_reports + publishing_reports
|
||||
)
|
||||
|
||||
def _publish(self, publications: Sequence[_EventPublication]) -> List[EventPublicationReport]:
|
||||
reports = []
|
||||
|
||||
for publication in publications:
|
||||
|
||||
try:
|
||||
publication_report = self._publish_publication(publication)
|
||||
reports.append(publication_report)
|
||||
except PublisherError as e:
|
||||
logger.error(str(e))
|
||||
reports.append(
|
||||
EventPublicationReport(
|
||||
status=PublicationStatus.FAILED,
|
||||
reason=str(e),
|
||||
publication=publication,
|
||||
)
|
||||
)
|
||||
|
||||
return reports
|
||||
|
||||
@staticmethod
|
||||
def _publish_publication(publication):
|
||||
"""
|
||||
Publishes a single publication
|
||||
:param publication:
|
||||
:return:
|
||||
"""
|
||||
|
||||
logger.info("Publishing to %s", publication.publisher.name)
|
||||
message = publication.formatter.get_message_from_event(publication.event)
|
||||
publication.publisher.send(message, publication.event)
|
||||
return EventPublicationReport(
|
||||
status=PublicationStatus.COMPLETED,
|
||||
publication=publication,
|
||||
reason=None,
|
||||
published_content=message,
|
||||
)
|
|
@ -0,0 +1,20 @@
|
|||
from mobilizon_reshare.publishers.coordinators.recap_publishing.recap import (
|
||||
RecapCoordinator,
|
||||
)
|
||||
|
||||
|
||||
class DryRunRecapCoordinator(RecapCoordinator):
|
||||
"""
|
||||
Coordinator to perform a dry-run on the event recap
|
||||
"""
|
||||
|
||||
def _send(self, content, recap_publication):
|
||||
"""
|
||||
Overrides the Recap Coordinator _send on the assumption that _send is just a side effect publishing
|
||||
on a given platform. The report generated by RecapCoordinator should be sufficient to perform
|
||||
the dry-run print at CLI level.
|
||||
:param content:
|
||||
:param recap_publication:
|
||||
:return:
|
||||
"""
|
||||
pass
|
|
@ -0,0 +1,78 @@
|
|||
import dataclasses
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional, Sequence, List
|
||||
|
||||
from mobilizon_reshare.models.publication import PublicationStatus
|
||||
from mobilizon_reshare.dataclasses.publication import RecapPublication
|
||||
from mobilizon_reshare.publishers.coordinators import (
|
||||
BasePublicationReport,
|
||||
BaseCoordinatorReport,
|
||||
)
|
||||
from mobilizon_reshare.publishers.exceptions import PublisherError
|
||||
|
||||
|
||||
@dataclass
|
||||
class RecapPublicationReport(BasePublicationReport):
|
||||
publication: RecapPublication
|
||||
published_content: Optional[str] = dataclasses.field(default=None)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RecapCoordinatorReport(BaseCoordinatorReport):
|
||||
reports: Sequence[RecapPublicationReport]
|
||||
|
||||
def __str__(self):
|
||||
platform_messages = []
|
||||
for report in self.reports:
|
||||
intro = f"Message for: {report.publication.publisher.name}"
|
||||
platform_messages.append(
|
||||
f"""{intro}
|
||||
{"*"*len(intro)}
|
||||
{report.published_content}
|
||||
{"-"*80}"""
|
||||
)
|
||||
return "\n".join(platform_messages)
|
||||
|
||||
|
||||
class RecapCoordinator:
|
||||
"""
|
||||
Coordinator to publish a recap on future events
|
||||
"""
|
||||
|
||||
def __init__(self, recap_publications: List[RecapPublication]):
|
||||
self.recap_publications = recap_publications
|
||||
|
||||
def _build_recap_content(self, recap_publication: RecapPublication):
|
||||
fragments = [recap_publication.formatter.get_recap_header()]
|
||||
for event in recap_publication.events:
|
||||
fragments.append(recap_publication.formatter.get_recap_fragment(event))
|
||||
return "\n\n".join(fragments)
|
||||
|
||||
def _send(self, content, recap_publication):
|
||||
recap_publication.publisher.send(content)
|
||||
|
||||
def run(self) -> RecapCoordinatorReport:
|
||||
reports = []
|
||||
for recap_publication in self.recap_publications:
|
||||
try:
|
||||
|
||||
message = self._build_recap_content(recap_publication)
|
||||
self._send(message, recap_publication)
|
||||
reports.append(
|
||||
RecapPublicationReport(
|
||||
status=PublicationStatus.COMPLETED,
|
||||
reason=None,
|
||||
published_content=message,
|
||||
publication=recap_publication,
|
||||
)
|
||||
)
|
||||
except PublisherError as e:
|
||||
reports.append(
|
||||
RecapPublicationReport(
|
||||
status=PublicationStatus.FAILED,
|
||||
reason=str(e),
|
||||
publication=recap_publication,
|
||||
)
|
||||
)
|
||||
|
||||
return RecapCoordinatorReport(reports=reports)
|
|
@ -1,8 +1,6 @@
|
|||
class PublisherError(Exception):
|
||||
"""Generic publisher error"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class InvalidAttribute(PublisherError):
|
||||
"""Publisher defined with invalid or missing attribute"""
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
from typing import Optional
|
||||
|
||||
import facebook
|
||||
import pkg_resources
|
||||
from facebook import GraphAPIError
|
||||
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent
|
||||
from mobilizon_reshare.formatting.description import html_to_plaintext
|
||||
from mobilizon_reshare.publishers.abstract import (
|
||||
AbstractPlatform,
|
||||
AbstractEventFormatter,
|
||||
|
@ -12,23 +12,13 @@ from mobilizon_reshare.publishers.abstract import (
|
|||
from mobilizon_reshare.publishers.exceptions import (
|
||||
InvalidCredentials,
|
||||
InvalidEvent,
|
||||
InvalidMessage,
|
||||
PublisherError,
|
||||
)
|
||||
|
||||
|
||||
class FacebookFormatter(AbstractEventFormatter):
|
||||
|
||||
_conf = ("publisher", "facebook")
|
||||
default_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "facebook.tmpl.j2"
|
||||
)
|
||||
|
||||
default_recap_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "facebook_recap.tmpl.j2"
|
||||
)
|
||||
|
||||
default_recap_header_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "facebook_recap_header.tmpl.j2"
|
||||
)
|
||||
|
||||
def _validate_event(self, event: MobilizonEvent) -> None:
|
||||
text = event.description
|
||||
|
@ -36,7 +26,13 @@ class FacebookFormatter(AbstractEventFormatter):
|
|||
self._log_error("No text was found", raise_error=InvalidEvent)
|
||||
|
||||
def _validate_message(self, message) -> None:
|
||||
pass
|
||||
if len(message) >= 63200:
|
||||
self._log_error("Message is too long", raise_error=InvalidMessage)
|
||||
|
||||
def _preprocess_event(self, event: MobilizonEvent):
|
||||
event.description = html_to_plaintext(event.description)
|
||||
event.name = html_to_plaintext(event.name)
|
||||
return event
|
||||
|
||||
|
||||
class FacebookPlatform(AbstractPlatform):
|
||||
|
@ -46,18 +42,21 @@ class FacebookPlatform(AbstractPlatform):
|
|||
|
||||
name = "facebook"
|
||||
|
||||
def _get_api(self):
|
||||
return facebook.GraphAPI(
|
||||
access_token=self.conf["page_access_token"], version="8.0"
|
||||
)
|
||||
def _get_api(self) -> facebook.GraphAPI:
|
||||
return facebook.GraphAPI(access_token=self.conf["page_access_token"])
|
||||
|
||||
def _send(self, message: str, event: Optional[MobilizonEvent] = None):
|
||||
self._get_api().put_object(
|
||||
parent_object="me",
|
||||
connection_name="feed",
|
||||
message=message,
|
||||
link=event.mobilizon_link if event else None,
|
||||
)
|
||||
try:
|
||||
self._get_api().put_object(
|
||||
parent_object="me",
|
||||
connection_name="feed",
|
||||
message=message,
|
||||
link=event.mobilizon_link if event else None,
|
||||
)
|
||||
except GraphAPIError:
|
||||
self._log_error(
|
||||
"Facebook send failed", raise_error=PublisherError,
|
||||
)
|
||||
|
||||
def validate_credentials(self):
|
||||
|
||||
|
@ -70,7 +69,7 @@ class FacebookPlatform(AbstractPlatform):
|
|||
raise_error=InvalidCredentials,
|
||||
)
|
||||
|
||||
self._log_debug("Facebook credentials are valid")
|
||||
self._log_debug("Facebook credentials are valid")
|
||||
|
||||
def _validate_response(self, response):
|
||||
pass
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
from typing import Optional
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import pkg_resources
|
||||
import requests
|
||||
from requests import Response
|
||||
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent
|
||||
from mobilizon_reshare.publishers.abstract import (
|
||||
AbstractPlatform,
|
||||
AbstractEventFormatter,
|
||||
|
@ -20,19 +19,7 @@ from mobilizon_reshare.publishers.exceptions import (
|
|||
|
||||
|
||||
class MastodonFormatter(AbstractEventFormatter):
|
||||
|
||||
_conf = ("publisher", "mastodon")
|
||||
default_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "mastodon.tmpl.j2"
|
||||
)
|
||||
|
||||
default_recap_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "mastodon_recap.tmpl.j2"
|
||||
)
|
||||
|
||||
default_recap_header_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "mastodon_recap_header.tmpl.j2"
|
||||
)
|
||||
|
||||
def _validate_event(self, event: MobilizonEvent) -> None:
|
||||
text = event.description
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import re
|
||||
from typing import Optional
|
||||
|
||||
import pkg_resources
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from requests import Response
|
||||
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent
|
||||
from mobilizon_reshare.publishers.abstract import (
|
||||
AbstractEventFormatter,
|
||||
AbstractPlatform,
|
||||
|
@ -19,26 +19,16 @@ from mobilizon_reshare.publishers.exceptions import (
|
|||
|
||||
|
||||
class TelegramFormatter(AbstractEventFormatter):
|
||||
default_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "telegram.tmpl.j2"
|
||||
)
|
||||
|
||||
default_recap_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "telegram_recap.tmpl.j2"
|
||||
)
|
||||
|
||||
default_recap_header_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "telegram_recap_header.tmpl.j2"
|
||||
)
|
||||
|
||||
_conf = ("publisher", "telegram")
|
||||
|
||||
def _validate_event(self, event: MobilizonEvent) -> None:
|
||||
|
||||
description = event.description
|
||||
if not (description and description.strip()):
|
||||
self._log_error("No description was found", raise_error=InvalidEvent)
|
||||
|
||||
def _validate_message(self, message: str) -> None:
|
||||
|
||||
if (
|
||||
len("".join(BeautifulSoup(message, "html.parser").findAll(text=True)))
|
||||
>= 4096
|
||||
|
@ -46,7 +36,11 @@ class TelegramFormatter(AbstractEventFormatter):
|
|||
self._log_error("Message is too long", raise_error=InvalidMessage)
|
||||
|
||||
def _preprocess_message(self, message: str) -> str:
|
||||
|
||||
"""
|
||||
This function converts HTML5 to Telegram's HTML dialect
|
||||
:param message: a HTML5 string
|
||||
:return: a HTML string compatible with Telegram
|
||||
"""
|
||||
html = BeautifulSoup(message, "html.parser")
|
||||
# replacing paragraphs
|
||||
for tag in html.findAll(["p", "br"]):
|
||||
|
@ -69,8 +63,10 @@ class TelegramFormatter(AbstractEventFormatter):
|
|||
tag.unwrap()
|
||||
# cleaning html trailing whitespace
|
||||
for tag in html.findAll("a"):
|
||||
tag["href"] = tag["href"].replace(" ", "").strip().lstrip()
|
||||
return str(html)
|
||||
if "href" in tag:
|
||||
tag["href"] = tag["href"].replace(" ", "").strip().lstrip()
|
||||
s = str(html)
|
||||
return re.sub(r"\n{2,}", "\n\n", s).strip() # remove multiple newlines
|
||||
|
||||
|
||||
class TelegramPlatform(AbstractPlatform):
|
||||
|
@ -90,14 +86,18 @@ class TelegramPlatform(AbstractPlatform):
|
|||
)
|
||||
|
||||
def _send(self, message: str, event: Optional[MobilizonEvent] = None) -> Response:
|
||||
json_message = {"chat_id": self.conf.chat_id, "text": message, "parse_mode": "html"}
|
||||
|
||||
if self.conf.message_thread_id:
|
||||
json_message["message_thread_id"] = self.conf.message_thread_id
|
||||
|
||||
return requests.post(
|
||||
url=f"https://api.telegram.org/bot{self.conf.token}/sendMessage",
|
||||
json={"chat_id": self.conf.chat_id, "text": message, "parse_mode": "html"},
|
||||
json=json_message,
|
||||
)
|
||||
|
||||
def _validate_response(self, res):
|
||||
try:
|
||||
|
||||
res.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
self._log_error(
|
||||
|
@ -107,6 +107,7 @@ class TelegramPlatform(AbstractPlatform):
|
|||
|
||||
try:
|
||||
data = res.json()
|
||||
|
||||
except Exception as e:
|
||||
self._log_error(
|
||||
f"Server returned invalid json data: {str(e)}",
|
||||
|
|
|
@ -1,41 +1,25 @@
|
|||
from typing import Optional
|
||||
|
||||
import pkg_resources
|
||||
from tweepy import OAuthHandler, API, TweepyException
|
||||
from tweepy.models import Status
|
||||
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent
|
||||
from mobilizon_reshare.publishers.abstract import (
|
||||
AbstractPlatform,
|
||||
AbstractEventFormatter,
|
||||
)
|
||||
from mobilizon_reshare.publishers.exceptions import (
|
||||
InvalidCredentials,
|
||||
InvalidEvent,
|
||||
PublisherError,
|
||||
InvalidMessage,
|
||||
)
|
||||
|
||||
|
||||
class TwitterFormatter(AbstractEventFormatter):
|
||||
|
||||
_conf = ("publisher", "twitter")
|
||||
default_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "twitter.tmpl.j2"
|
||||
)
|
||||
|
||||
default_recap_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "twitter_recap.tmpl.j2"
|
||||
)
|
||||
|
||||
default_recap_header_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "twitter_recap_header.tmpl.j2"
|
||||
)
|
||||
|
||||
def _validate_event(self, event: MobilizonEvent) -> None:
|
||||
text = event.description
|
||||
if not (text and text.strip()):
|
||||
self._log_error("No text was found", raise_error=InvalidEvent)
|
||||
pass # pragma: no cover
|
||||
|
||||
def _validate_message(self, message) -> None:
|
||||
# TODO this is not precise. It should count the characters according to Twitter's logic but
|
||||
|
@ -76,7 +60,7 @@ class TwitterPlatform(AbstractPlatform):
|
|||
)
|
||||
|
||||
def _validate_response(self, res: Status) -> dict:
|
||||
pass
|
||||
pass # pragma: no cover
|
||||
|
||||
|
||||
class TwitterPublisher(TwitterPlatform):
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
from typing import Optional
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import pkg_resources
|
||||
import requests
|
||||
from requests import Response
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.dataclasses import MobilizonEvent
|
||||
from mobilizon_reshare.formatting.description import html_to_markdown
|
||||
from mobilizon_reshare.publishers.abstract import (
|
||||
AbstractPlatform,
|
||||
|
@ -23,19 +22,7 @@ from mobilizon_reshare.publishers.exceptions import (
|
|||
|
||||
|
||||
class ZulipFormatter(AbstractEventFormatter):
|
||||
|
||||
_conf = ("publisher", "zulip")
|
||||
default_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "zulip.tmpl.j2"
|
||||
)
|
||||
|
||||
default_recap_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "zulip_recap.tmpl.j2"
|
||||
)
|
||||
|
||||
default_recap_header_template_path = pkg_resources.resource_filename(
|
||||
"mobilizon_reshare.publishers.templates", "zulip_recap_header.tmpl.j2"
|
||||
)
|
||||
|
||||
def _validate_event(self, event: MobilizonEvent) -> None:
|
||||
text = event.description
|
||||
|
@ -61,18 +48,6 @@ class ZulipPlatform(AbstractPlatform):
|
|||
api_uri = "api/v1/"
|
||||
name = "zulip"
|
||||
|
||||
def _send_private(
|
||||
self, message: str, event: Optional[MobilizonEvent] = None
|
||||
) -> Response:
|
||||
"""
|
||||
Send private messages
|
||||
"""
|
||||
return requests.post(
|
||||
url=urljoin(self.conf.instance, self.api_uri) + "messages",
|
||||
auth=HTTPBasicAuth(self.conf.bot_email, self.conf.bot_token),
|
||||
data={"type": "private", "to": f"[{self.user_id}]", "content": message},
|
||||
)
|
||||
|
||||
def _send(self, message: str, event: Optional[MobilizonEvent] = None) -> Response:
|
||||
"""
|
||||
Send stream messages
|
||||
|
@ -110,18 +85,18 @@ class ZulipPlatform(AbstractPlatform):
|
|||
raise_error=InvalidBot,
|
||||
)
|
||||
|
||||
def _validate_response(self, res: Response) -> dict:
|
||||
def _validate_response(self, response: Response) -> dict:
|
||||
try:
|
||||
res.raise_for_status()
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
self._log_debug(str(res))
|
||||
self._log_debug(str(response.text))
|
||||
self._log_error(
|
||||
str(e), raise_error=HTTPResponseError,
|
||||
)
|
||||
|
||||
# See https://zulip.com/api/rest-error-handling
|
||||
try:
|
||||
data = res.json()
|
||||
data = response.json()
|
||||
except Exception as e:
|
||||
self._log_error(
|
||||
f"Server returned invalid json data: {str(e)}",
|
||||
|
@ -130,7 +105,7 @@ class ZulipPlatform(AbstractPlatform):
|
|||
|
||||
if data["result"] == "error":
|
||||
self._log_error(
|
||||
f"{res.status_code} Error - {data['msg']}", raise_error=ZulipError,
|
||||
f"{response.status_code} Error - {data['msg']}", raise_error=ZulipError,
|
||||
)
|
||||
|
||||
return data
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
# {{ name }}
|
||||
{{ name }}
|
||||
|
||||
🕒 {{ begin_datetime.format('DD MMMM, HH:mm') }} - {{ end_datetime.format('DD MMMM, HH:mm') }}
|
||||
🕒 {{ begin_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }} - {{ end_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }}
|
||||
|
||||
{% if location %}
|
||||
📍 {{ location }}
|
||||
|
||||
{% endif %}
|
||||
{{ description }}
|
||||
|
||||
🔗 Link: {{mobilizon_link}}
|
|
@ -1,9 +1,9 @@
|
|||
# {{ name }}
|
||||
{{ name }}
|
||||
|
||||
🕒 {{ begin_datetime.format('DD MMMM, HH:mm') }} - {{ end_datetime.format('DD MMMM, HH:mm') }}
|
||||
🕒 {{ begin_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }} - {{ end_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }}
|
||||
|
||||
{% if location %}
|
||||
📍 {{ location }}
|
||||
|
||||
{% endif %}
|
||||
🔗 {{mobilizon_link}}
|
||||
🔗 Link: {{mobilizon_link}}
|
|
@ -1 +1 @@
|
|||
Upcoming events
|
||||
📅 Upcoming events
|
|
@ -1,9 +1,9 @@
|
|||
{{ name }}
|
||||
|
||||
🕒 {{ begin_datetime.format('DD MMMM, HH:mm') }} - {{ end_datetime.format('DD MMMM, HH:mm') }}
|
||||
🕒 {{ begin_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }} - {{ end_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }}
|
||||
|
||||
{% if location %}
|
||||
📍 {{ location }}
|
||||
|
||||
{% endif %}
|
||||
{{mobilizon_link}}
|
||||
🔗 {{mobilizon_link}}
|
|
@ -1,5 +1,5 @@
|
|||
*{{ name }}*
|
||||
{{ name }}
|
||||
|
||||
🕒 {{ begin_datetime.format('DD MMMM, HH:mm') }} - {{ end_datetime.format('DD MMMM, HH:mm') }}
|
||||
🕒 {{ begin_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }} - {{ end_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }}
|
||||
{% if location %}📍 {{ location }}{% endif %}
|
||||
🔗 {{mobilizon_link}}
|
|
@ -1,8 +1,8 @@
|
|||
<strong>{{ name }}</strong>
|
||||
|
||||
🕒 {{ begin_datetime.format('DD MMMM, HH:mm') }} - {{ end_datetime.format('DD MMMM, HH:mm') }}
|
||||
🕒 {{ begin_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }} - {{ end_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }}
|
||||
{% if location %}📍 {{ location }}{% endif %}
|
||||
|
||||
{{ description }}
|
||||
|
||||
<a href="{{mobilizon_link}}">Link</a>
|
||||
🔗 <a href="{{mobilizon_link}}">Link</a>
|
|
@ -1,5 +1,5 @@
|
|||
*{{ name }}*
|
||||
<strong>{{ name }}</strong>
|
||||
|
||||
🕒 {{ begin_datetime.format('DD MMMM, HH:mm') }} - {{ end_datetime.format('DD MMMM, HH:mm') }}
|
||||
🕒 {{ begin_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }} - {{ end_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }}
|
||||
{% if location %}📍 {{ location }}{% endif %}
|
||||
🔗 [Link]({{mobilizon_link}})
|
||||
🔗 <a href="{{mobilizon_link}}">Link</a>
|
|
@ -1 +1 @@
|
|||
Upcoming events
|
||||
📅 Upcoming events
|
|
@ -1,9 +1,9 @@
|
|||
# {{ name }}
|
||||
{{ name }}
|
||||
|
||||
🕒 {{ begin_datetime.format('DD MMMM, HH:mm') }} - {{ end_datetime.format('DD MMMM, HH:mm') }}
|
||||
🕒 {{ begin_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }} - {{ end_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }}
|
||||
|
||||
{% if location %}
|
||||
📍 {{ location }}
|
||||
|
||||
{% endif %}
|
||||
{{mobilizon_link}}
|
||||
🔗 {{mobilizon_link}}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# {{ name }}
|
||||
🕒 {{ begin_datetime.format('DD MMMM, HH:mm') }} - {{ end_datetime.format('DD MMMM, HH:mm') }}
|
||||
{{ name }}
|
||||
🕒 {{ begin_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }} - {{ end_datetime.to('local').format('DD MMMM, HH:mm', locale=locale) }}
|
||||
{% if location %}
|
||||
📍 {{ location }}
|
||||
{% endif %}
|
||||
🔗 {{mobilizon_link}}
|
||||
🔗 {{mobilizon_link}}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue