Compare commits

..

No commits in common. "legacy_python" and "master" have entirely different histories.

26 changed files with 5148 additions and 1080 deletions

View file

@ -1,6 +1,17 @@
HELLOASSO_API_CLIENT_ID=XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX HELLOASSO_EMAIL=user@example.org
HELLOASSO_API_CLIENT_SECRET=XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX HELLOASSO_PASSWORD=example
PAHEKO_API_CLIENT_ID=XXXXXXXXXX HELLOASSO_ORGANIZATION_SLUG="my-organization"
PAHEKO_API_CLIENT_SECRET=XXXXXXXXXXXXXXXXXXXXXX HELLOASSO_FORM_NAME="2023-2024"
PAHEKO_BASE_URL=http://paheko.example.org/api/
PAHEKO_CLIENT_ID=XXX
PAHEKO_CLIENT_SECRET=XXX
PAHEKO_TARGET_SUBSCRIPTION_NAME="Cotisation 2023-2024"
PAHEKO_ACCOUNTING_YEAR_ID=1
PAHEKO_PROXY=http://paheko.example.org
HELLOASSO_PROXY=https://localhost:8802

181
.gitignore vendored
View file

@ -1,179 +1,4 @@
# Created by https://www.toptal.com/developers/gitignore/api/python /target
# Edit at https://www.toptal.com/developers/gitignore?templates=python
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env .env
.venv log.log
env/ tmp
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
poetry.toml
# ruff
.ruff_cache/
# LSP config files
pyrightconfig.json
# End of https://www.toptal.com/developers/gitignore/api/python
tmp/
dev_inputs/

2945
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

30
Cargo.toml Normal file
View file

@ -0,0 +1,30 @@
[package]
name = "paheko_helloasso_adapter_rs"
version = "0.1.0"
edition = "2021"
[dependencies]
clap = { version = "4.0.32", features = ["derive"] }
serde = { version = "1.0", features = ["derive"]}
serde_json = "1.0"
reqwest = { version = "0.11", features = ["json", "multipart", "stream"] }
tokio = { version = "1", features = ["full"] }
url = "2.4.1"
xdg = "2.5.2"
anyhow = "1.0.75"
thiserror = "1.0" # macro to derive Error
surf = "2.3.2"
chrono = { version = "0.4.31", features = ["serde"] }
envy = "0.4.2"
strum = { version = "0.25", features = ["derive"] }
dotenvy = "0.15.7"
rand = "0.8.5"
phonenumber = "0.3.3"
email_address = "0.2"
fully_pub = "0.1.4"
base64_light = "0.1.4"
csv = "1.3.0"
argh = "0.1.12"
strsim = "0.11.0"
async-recursion = "1.0.5"
serde_variant = "0.1.3"

View file

@ -1,10 +1,74 @@
# Adapteur Helloasso vers paheko # Paheko Adapter
## Design de l'algo pour MAJ [HelloAsso](https://www.helloasso.com/) <-> [Paheko](https://paheko.cloud/)
CSV <-> [Paheko](https://paheko.cloud/)
- on regarde quelle est la période d'adhesion en cours The goal is to provide a way for organizations to import members and memberships data into the Paheko software from Helloasso or from CSV.
- on regarde si le member existe déjà à partir de son email
- si non, on ajoute le member **WARNING: This is a very custom script adapted for my environment and requirements**, but I plan to make it more flexible in the future.
- ensuite on regarde si le membre a déjà adhéré pour cette période
- on assert également si il n'y a pas un id existant helloasso (sanity check) And with some specifics features:
- sinon, on ajoute la cotisation pour cette année - clean-up and normalization of data
- handle subscriptions and activities
- handle linked users
- handle additional donations
- implement additional custom fields
- WIP: update the data without duplication.
IO:
- read from stdin for the CSV
- use the Helloasso unofficial API to read data
- use the [Paheko API](https://paheko.cloud/api)
Written in the Rust programming language.
## Getting started
Create a `.env` file from `.env.example` and fill in some secrets.
Run the program `dotenv cargo run`
## Fonctionnement
Arbre de décision au moment de l'import :
- On va déjà récupérer la liste des utilisateurs en mode "summary" UserSummary` cad avec juste l'id, l'email, le prénom et le nom.
- Ensuite on va récupérer la liste des transactions avec helloasso pour cette période comptable
- ce qui compte c'est d'avoir la référence comptable
- on peut faire une req sql qui filtre sur les id ou alors sur un compte
- using accounting/years/{ID_YEAR}/account/journal (GET)
- ou alors req SQL
- On va créer une liste de réponse helloasso à traiter en filtrant les "réponses" d'helloasso. Pour chaque réponse, si l'id helloasso de cette réponse est trouvé dans la liste récupéré avant sur Paheko, alors on l'ignore. Sinon on garde
- Pour chaque réponse à traiter
- On va regarder si l'id de la réponse est trouvé dans une écriture comptable, si oui on ignore
- cela veut dire que la personne est déjà inscrite pour ajd
- On va regarder si l'email est déjà présent dans la liste des adhérents ou ancien adhérents (à N-1)
- Si non, on créé l'utilisateur
- Si oui, on ne créé pas d'utilisateur, on récupère l'id
- On va créer une activité pour cet utilisateur
- On va créer une écriture comptable, le numéro de la pièce comptable étant "HA/{ID_ANSWER}" (à la base je voulais le faire en JSON pour plus de détails, mais c'est trop gros)
PB: l'API permet d'importer des activités, mais pas de lier une écriture comptable
ya une table `acc_transactions_users` qui permet de lier une transaction avec une activité d'un utilisateur
## Import from CSV
```
xlsx2csv -n "Adhérents PAPIER" ./wip_matthieu_b.xlsx > ./to_import.csv
```
```
cat ./tmp/adhesions_papier_nov2023.csv | cargo run -- --source csv
```
```
cat ~/.mnt/etoiledebethleem/copyparty/adhesions/to_import.csv | head -n 25 | sed ':a;N;$!ba;s/\(Champ complémentaire [0-9]\)\n/\1 /g' | sed 's/Champ complémentaire \([0-9]\)/CC \1/g'
```
## Run in prod
```
cargo run -- --source helloasso | tee ./.out/2024-01-21_2_ha.log
cargo run -- --source helloasso | tee ./.out/2024-01-21_2_ha.log
```

64
TODO.md Normal file
View file

@ -0,0 +1,64 @@
Following the dev
like rossman said, you need to split up things
# todos
- Normalize cities
- Verify postal code
- Normalize first name and last name
- Check if a user already exists by quering the first and last name with string distance
- use `strsim` lib
- Lint, format code
- Remove uneeded deps
- find a way to export excel sheet from CSV
- `libreoffice --headless --convert-to 'csv' --outdir csv_exports ./ADH\ 02\ 01\ 2024.xlsx`
# schedule
## 2023-12-23
- have a rust client to fetch list of users from sql
## 2023-12-27
- Normalize address, cities
all of the normalization work should not have been done here,
we should have created our own platform so people can register and pay later (either in cash, or using 3rd party payment like stripe)
## 2023-12-28
TODO:
- more configuration options
- summary of the operations at the end of run
- how many users were added, muted?
- conjoined user: add attached member to paheko
- "Membre lié"
- is this kind of thing even accessible on the API-level ?
- better error handling & report to the user
- handle import error
- handle name of the service or service fee not found
- BUG: quand l'utilisateur est déjà créé, ya un problème d'ID, le user summary n'a pas le bon id, il faut le populer depuis ce qu'on a déjà fetch
## 2024-01-11
- automatically find the tresorerie exercice based on the date of the transaction
query all subscriptions of user byu service label
curl -u $PAHEKO_CLIENT_ID:$PAHEKO_CLIENT_SECRET http://localhost:8082/api/sql -d "SELECT su.id_user,su.date FROM services_users AS su JOIN services AS s ON su.id_service = s.id WHERE s.label = 'Cotisation 2023-2024';"
## 2024-01-20
- handle additional donation amount
## 2024-01-21
- to sort members by date, process older answers first

View file

@ -1,142 +0,0 @@
import uuid
import csv
import fileinput
from datetime import datetime
from pprint import pprint
from dataclasses import dataclass
import json
INPUT_FIELDS_MAPPING = {
"last_name": "Nom",
"first_name_1": "Prénom",
"date": "\ufeffDate",
"email_1": "Email acheteur",
"first_name_2": "Champ complémentaire 1 Prénom conjoint",
"address": "Champ complémentaire 2 ADRESSE",
"postal_code": "Champ complémentaire 3 CODE POSTAL",
"city": "Champ complémentaire 4 VILLE",
"birth_date": "Champ complémentaire 9 DATE DE NAISSANCE",
"email_2": "Champ complémentaire 6 EMAIL",
"job": "Champ complémentaire 7 PROFESSION",
"skills": "Champ complémentaire 8 CENTRE D'INTÉRÊTS / COMPÉTENCES",
"payment_method": "Moyen de paiement",
"status": "Status",
"tarif": "Tarif",
}
def get_matching_keys(src_dict, value):
return [k for k,v in src_dict.items() if v == value]
def load_csv_row_to_dict(field_mapping, first_row, row):
final_dict = {}
for i, cell in enumerate(row):
keys = get_matching_keys(field_mapping, first_row[i])
if len(keys) == 0: continue
final_dict[keys[0]] = cell
return final_dict
def get_id():
return str(uuid.uuid4()).split("-")[0]
@dataclass
class PahekoMember:
name: str
email: str
phone: str
status: str
def import_data():
reader = csv.reader([i for i in fileinput.input()], delimiter=";")
column_line = []
paheko_users = []
for i, line in enumerate(reader):
print(i)
if i == 0:
column_line = line
continue
ha_membership: dict = load_csv_row_to_dict(INPUT_FIELDS_MAPPING, column_line, line)
def get_email(ha_ms):
if ha_ms["email_1"] == None: return ha_ms["email_2"]
if ha_ms["email_2"] == None: return ha_ms["email_1"]
if ha_ms['email_2'] != ha_ms['email_2']:
return ha_ms["email_2"]
return ha_ms["email_1"]
def format_date_time_french(raw_date):
return datetime.strptime(raw_date, "%d/%m/%Y %H:%M:%S")
def format_date_french(raw_date):
return datetime.strptime(raw_date, "%d/%m/%Y")
def format_string(subj):
return subj.strip()
def format_name(subj):
return subj[0:1].upper() + subj[1:].lower()
def format_mode(subj):
return subj
if format_string(ha_membership['status']) != "Validé":
continue
# then normalize dict
paheko_user: dict = {
'id': get_id(),
'first_name': format_name(format_string(ha_membership['first_name_1'])),
'last_name': format_name(format_string(ha_membership['last_name'])),
'mode_adhesion': format_mode(ha_membership['tarif']),
'email': format_string(get_email(ha_membership)),
'date': format_date_time_french(ha_membership['date']),
'birth_date': format_date_french(ha_membership['birth_date']) if ha_membership['birth_date'] and ha_membership['birth_date'].strip() != '' else None,
'linked_users': []
}
keys_to_copy = ['job', 'skills', 'address', 'postal_code', 'city']
for key in keys_to_copy:
if ha_membership[key].strip() == '':
paheko_user[key] = None
continue
paheko_user[key] = format_string(ha_membership[key])
linked_user = None
if ha_membership["first_name_2"].strip() != '':
# we count as two membership
linked_user = {
'id': get_id(),
'first_name': format_name(format_string(ha_membership['first_name_2'])),
'linked_users': [paheko_user['id']]
}
copy_from_parent_user = ['last_name', 'address', 'postal_code', 'city', 'date']
for k in copy_from_parent_user:
linked_user[k] = paheko_user[k]
paheko_user["linked_users"].append(linked_user['id'])
paheko_users.append(paheko_user)
if linked_user:
paheko_users.append(linked_user)
# pprint(paheko_users, sort_dicts=False)
print(json.dumps(paheko_users, sort_keys=True, default=str))
"""
Une fois qu'on est bon au niveau de la liste d'adhesion qu'on veut ajouter
on regarde si on a pas déjà les adhesions dans helloasso
on télécharge tout les adherents et leurs participations à des activités et cotisations
on regarde les adherents sur leur addresse emails
on regarde si l'id de payment n'a pas déjà été traité
on regarde quelle est la période d'adhesion en cours
première étape:
on regarde si le member existe déjà à partir de son email
si non, on ajoute le member
ensuite on regarde si le membre a déjà adhéré pour cette période
on regarde également si il n'y a pas un id existant helloasso (sanity check)
si oui, on discard
sinon, on ajoute la cotisation pour cette année
"""
import_data()

View file

@ -1,141 +0,0 @@
import uuid
import csv
import fileinput
from datetime import datetime
from dataclasses import dataclass
import json
INPUT_FIELDS_MAPPING = {
"last_name": "Nom",
"first_name_1": "Prénom",
"date": "\ufeffDate",
"email_1": "Email acheteur",
"first_name_2": "Champ complémentaire 1 Prénom conjoint",
"address": "Champ complémentaire 2 ADRESSE",
"postal_code": "Champ complémentaire 3 CODE POSTAL",
"city": "Champ complémentaire 4 VILLE",
"birth_date": "Champ complémentaire 9 DATE DE NAISSANCE",
"email_2": "Champ complémentaire 6 EMAIL",
"job": "Champ complémentaire 7 PROFESSION",
"skills": "Champ complémentaire 8 CENTRE D'INTÉRÊTS / COMPÉTENCES",
"payment_method": "Moyen de paiement",
"status": "Status",
"tarif": "Tarif",
}
def get_matching_keys(src_dict, value):
return [k for k,v in src_dict.items() if v == value]
def load_csv_row_to_dict(field_mapping, first_row, row):
final_dict = {}
for i, cell in enumerate(row):
keys = get_matching_keys(field_mapping, first_row[i])
if len(keys) == 0: continue
final_dict[keys[0]] = cell
return final_dict
def get_id():
return str(uuid.uuid4()).split("-")[0]
@dataclass
class PahekoMember:
name: str
email: str
phone: str
status: str
def from_helloasso_members_csv_to_paheko_normalized():
reader = csv.reader([i for i in fileinput.input()], delimiter=";")
column_line = []
paheko_users = []
for i, line in enumerate(reader):
if i == 0:
column_line = line
continue
ha_membership: dict = load_csv_row_to_dict(INPUT_FIELDS_MAPPING, column_line, line)
def get_email(ha_ms):
if ha_ms["email_1"] == None: return ha_ms["email_2"]
if ha_ms["email_2"] == None: return ha_ms["email_1"]
if ha_ms['email_2'] != ha_ms['email_2']:
return ha_ms["email_2"]
return ha_ms["email_1"]
def format_date_time_french(raw_date):
return datetime.strptime(raw_date, "%d/%m/%Y %H:%M:%S")
def format_date_french(raw_date):
return datetime.strptime(raw_date, "%d/%m/%Y")
def format_string(subj):
return subj.strip()
def format_name(subj):
return subj[0:1].upper() + subj[1:].lower()
def format_mode(subj):
return subj
if format_string(ha_membership['status']) != "Validé":
continue
# then normalize dict
paheko_user: dict = {
'id': get_id(),
'first_name': format_name(format_string(ha_membership['first_name_1'])),
'last_name': format_name(format_string(ha_membership['last_name'])),
'mode_adhesion': format_mode(ha_membership['tarif']),
'email': format_string(get_email(ha_membership)),
'date': format_date_time_french(ha_membership['date']),
'birth_date': format_date_french(ha_membership['birth_date']) if ha_membership['birth_date'] and ha_membership['birth_date'].strip() != '' else None,
'linked_users': []
}
keys_to_copy = ['job', 'skills', 'address', 'postal_code', 'city']
for key in keys_to_copy:
if ha_membership[key].strip() == '':
paheko_user[key] = None
continue
paheko_user[key] = format_string(ha_membership[key])
linked_user = None
if ha_membership["first_name_2"].strip() != '':
# we count as two membership
linked_user = {
'id': get_id(),
'first_name': format_name(format_string(ha_membership['first_name_2'])),
'linked_users': [paheko_user['id']]
}
copy_from_parent_user = ['last_name', 'address', 'postal_code', 'city', 'date']
for k in copy_from_parent_user:
linked_user[k] = paheko_user[k]
paheko_user["linked_users"].append(linked_user['id'])
paheko_users.append(paheko_user)
if linked_user:
paheko_users.append(linked_user)
# pprint(paheko_users, sort_dicts=False)
print(json.dumps(paheko_users, sort_keys=True, default=str, indent=4))
from pprint import pprint
from helloasso_paheko_adapter.helloasso import HelloassoClient, Organization
from helloasso_paheko_adapter.paheko import PahekoClient
import vcr
def from_helloasso_payments_api_to_paheko(env, org_slug):
ha_client = HelloassoClient(env.HELLOASSO_API_CLIENT_ID, env.HELLOASSO_API_CLIENT_SECRET)
pk_client = PahekoClient(env.PAHEKO_API_CLIENT_ID, env.PAHEKO_API_CLIENT_SECRET)
ha_org = Organization(ha_client, org_slug)
# 1. get latest adhesion periode
period_id = "1" # or "fee_id"
print(pk_client.get_current_membership_period())
# 2. list payments
with vcr.use_cassette('tmp/vcr_cassettes/list_payments.yaml'):
payments = ha_org.list_payments()['data']
pprint(payments)

View file

@ -1,9 +0,0 @@
from helloasso_paheko_adapter.adapter import from_helloasso_members_csv_to_paheko_normalized
from helloasso_paheko_adapter.adapter import from_helloasso_payments_api_to_paheko
from helloasso_paheko_adapter.env import Env
def main():
# from_helloasso_members_csv_to_paheko_normalized()
env = Env()
org_slug = "l-etoile-de-bethleem-association-des-amis-de-la-chapelle-de-bethleem-d-aubevoye"
from_helloasso_payments_api_to_paheko(env, org_slug)

View file

@ -1,19 +0,0 @@
import os
class Env:
HELLOASSO_API_CLIENT_ID: str
HELLOASSO_API_CLIENT_SECRET: str
PAHEKO_API_CLIENT_ID: str
PAHEKO_API_CLIENT_SECRET: str
def __init__(self):
self.load_from_process_env()
def load_from_process_env(self):
attr_keys = list(self.__class__.__dict__.keys()) + list(self.__annotations__.keys())
for k in attr_keys:
if k.startswith('__'): continue
from_env = None
if k in os.environ:
from_env = os.environ[k]
setattr(self, k, from_env)

View file

@ -1,43 +0,0 @@
from requests_oauth2client import OAuth2Client, OAuth2ClientCredentialsAuth, ApiClient
class HelloassoAppClient:
def __init__(self, client_id, client_secret):
self.client_id = client_id
self.client_secret = client_secret
self.oauth2client = OAuth2Client(
token_endpoint="https://api.helloasso.com/oauth2/token",
client_id=self.client_id,
client_secret=self.client_secret
)
self.auth = OAuth2ClientCredentialsAuth(self.oauth2client)
self.client = ApiClient(
"https://api.helloasso.com/v5",
auth=self.auth
)
class HelloassoUserClient:
def __init__(self, email, password):
self.email = email
self.password = password
self.client = ApiClient(
"https://api.helloasso.com/v5",
headers = {"User-Agent": "Mozilla/5.0", "accept-language": "en-US,en"}
)
def login(self):
self.client.post("/auth/login", data={})
class Organization():
client: HelloassoClient
slug: str
def __init__(self, client, slug):
self.client = client
self.slug = slug
def list_payments(self):
res = self.client.client.get(f"/organizations/{self.slug}/payments")
# FIXME : ahndle pagination , cannot test right now because of not enought ppl
return res.json()

View file

@ -1,80 +0,0 @@
import requests
from datetime import datetime
import json
from requests.auth import HTTPBasicAuth
from requests_oauth2client import ApiClient
class PahekoClient:
BASE_URL = "https://paheko.etoiledebethleem.fr/api"
def __init__(self, client_id: str, client_secret: str):
self.auth = HTTPBasicAuth(client_id, client_secret)
self.client = ApiClient(
self.BASE_URL,
auth=self.auth
)
def sql_query(self, q):
res = self.client.post("/sql", data=q)
if res.status_code != 200:
raise ValueError("Failed to request data to Paheko API")
c = res.content.decode()
# skip first 2 lines
new_json = "[" + "\n".join(c.split("\n")[3:]) + "]"
return json.loads(new_json)
def get_services(self):
return self.sql_query('SELECT * FROM services LIMIT 5;')
def get_members(self):
return self.sql_query('SELECT * FROM services LIMIT 5;')
def get_current_membership_period(self):
services = self.get_services()
for s in services:
sd = datetime.fromisoformat(s['start_date'])
ed = datetime.fromisoformat(s['end_date'])
if sd < datetime.now() < ed:
return {
**s,
'start_date': sd,
'end_date': ed
}
return None
from dataclasses import dataclass
from typing import List, Literal, Optional, Union
class Membership:
ha_payment_id: str
ha_order_id: str
pass
class IndividualMembership(Membership):
pass
class CoupleMembership(Membership):
# relationship membership
partner_id: str
@dataclass
class PahekoMember:
tmp_id: str
paheko_id: str
first_name: str
last_name: str
address: str
postal_code: str
city: str
email: str
phone: str
birth_date: str
occupation: str
skills: str
memberships: List[Membership]

View file

@ -1,43 +0,0 @@
import os
from paheko_helloasso_adapter.helloasso import HelloassoClient, Organization
from pprint import pprint
class Env:
HELLOASSO_API_CLIENT_ID: str
HELLOASSO_API_CLIENT_SECRET: str
def __init__(self):
attr_keys = list(self.__class__.__dict__.keys()) + list(self.__annotations__.keys())
for k in attr_keys:
if k.startswith('__'): continue
from_env = None
if k in os.environ:
from_env = os.environ[k]
setattr(self, k, from_env)
def main():
env = Env()
# api_client = ApiV5Client(
# api_base='api.helloasso.com',
# client_id=env.HELLOASSO_API_CLIENT_ID,
# client_secret=env.HELLOASSO_API_CLIENT_SECRET,
# timeout=60
# )
# class OrganizationApi(object):
# def __init__(self, client):
# self._client = client
# def list(self) -> dict:
# return self._client.call(f"/organizations").json()
# def get_by_slug(self, slug: str) -> dict:
# return self._client.call(f"/organizations/{slug}").json()
# org_api = OrganizationApi(api_client)
org_slug = "l-etoile-de-bethleem-association-des-amis-de-la-chapelle-de-bethleem-d-aubevoye"
client = HelloassoClient(env.HELLOASSO_API_CLIENT_ID, env.HELLOASSO_API_CLIENT_SECRET)
o = Organization(client, org_slug)
pprint(o.list_payments())

393
poetry.lock generated
View file

@ -1,393 +0,0 @@
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
[[package]]
name = "binapy"
version = "0.7.0"
description = "Binary Data manipulation, for humans."
optional = false
python-versions = ">=3.8"
files = [
{file = "binapy-0.7.0-py3-none-any.whl", hash = "sha256:739cd5bebd52715b8c8face6ff815bf5798306cf276b392e959ada85b9a9bee6"},
{file = "binapy-0.7.0.tar.gz", hash = "sha256:e26f10ec6566a670e07dcc9de4c223be60984a7b1a2e5436b7eb6555f1d9d23b"},
]
[package.dependencies]
typing-extensions = ">=4.3.0"
[[package]]
name = "certifi"
version = "2023.7.22"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
{file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"},
{file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
]
[[package]]
name = "cffi"
version = "1.16.0"
description = "Foreign Function Interface for Python calling C code."
optional = false
python-versions = ">=3.8"
files = [
{file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
{file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
{file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
{file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
{file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
{file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
{file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
{file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
{file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
{file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
{file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
{file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
{file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
{file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
{file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
{file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
{file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
{file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
{file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
{file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
{file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
{file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
{file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
{file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
{file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
{file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
{file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
{file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
{file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
{file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
{file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
{file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
{file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"},
{file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"},
{file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"},
{file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"},
{file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"},
{file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"},
{file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"},
{file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"},
{file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"},
{file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"},
{file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"},
{file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"},
{file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"},
{file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"},
{file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"},
{file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"},
{file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"},
{file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"},
{file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"},
{file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
]
[package.dependencies]
pycparser = "*"
[[package]]
name = "charset-normalizer"
version = "3.3.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
files = [
{file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"},
{file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"},
{file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"},
{file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"},
{file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"},
{file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"},
{file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"},
{file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"},
{file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"},
{file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"},
{file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"},
{file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"},
{file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"},
{file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"},
{file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"},
{file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"},
{file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"},
{file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"},
{file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"},
{file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"},
{file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"},
{file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"},
{file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"},
{file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"},
{file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"},
{file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"},
{file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"},
{file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"},
{file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"},
{file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"},
{file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"},
{file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"},
{file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"},
{file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"},
{file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"},
{file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"},
{file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"},
{file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"},
{file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"},
{file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"},
{file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"},
{file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"},
{file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"},
{file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"},
{file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"},
{file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"},
{file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"},
{file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"},
{file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"},
{file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"},
{file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"},
{file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"},
{file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"},
{file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"},
{file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"},
{file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"},
{file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"},
{file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"},
{file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"},
{file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"},
{file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"},
{file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"},
{file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"},
{file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"},
{file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"},
{file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"},
{file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"},
{file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"},
{file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"},
{file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"},
{file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"},
{file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"},
{file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"},
{file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"},
{file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"},
{file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"},
{file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"},
{file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"},
]
[[package]]
name = "cryptography"
version = "41.0.4"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = ">=3.7"
files = [
{file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"},
{file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"},
{file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"},
{file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"},
{file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"},
{file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"},
{file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"},
{file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"},
{file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"},
{file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"},
{file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"},
{file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"},
{file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"},
{file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"},
{file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"},
{file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"},
{file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"},
{file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"},
{file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"},
{file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"},
{file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"},
{file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"},
{file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"},
]
[package.dependencies]
cffi = ">=1.12"
[package.extras]
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
nox = ["nox"]
pep8test = ["black", "check-sdist", "mypy", "ruff"]
sdist = ["build"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test-randomorder = ["pytest-randomly"]
[[package]]
name = "distance"
version = "0.1.3"
description = "Utilities for comparing sequences"
optional = false
python-versions = "*"
files = [
{file = "Distance-0.1.3.tar.gz", hash = "sha256:60807584f5b6003f5c521aa73f39f51f631de3be5cccc5a1d67166fcbf0d4551"},
]
[[package]]
name = "furl"
version = "2.1.3"
description = "URL manipulation made simple."
optional = false
python-versions = "*"
files = [
{file = "furl-2.1.3-py2.py3-none-any.whl", hash = "sha256:9ab425062c4217f9802508e45feb4a83e54324273ac4b202f1850363309666c0"},
{file = "furl-2.1.3.tar.gz", hash = "sha256:5a6188fe2666c484a12159c18be97a1977a71d632ef5bb867ef15f54af39cc4e"},
]
[package.dependencies]
orderedmultidict = ">=1.0.1"
six = ">=1.8.0"
[[package]]
name = "idna"
version = "3.4"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.5"
files = [
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
[[package]]
name = "jwskate"
version = "0.10.0"
description = "A Pythonic implementation of the JOSE / JSON Web Crypto related RFCs (JWS, JWK, JWA, JWT, JWE)"
optional = false
python-versions = ">=3.8"
files = [
{file = "jwskate-0.10.0-py3-none-any.whl", hash = "sha256:c9ffcefe0e4bb04d2ef7251fbd26edbdf2bbe766e88447b35ed70abda7e0b319"},
{file = "jwskate-0.10.0.tar.gz", hash = "sha256:9412043092786c6e029931427ec7dd01503802c4d4867846febdc4a704a12400"},
]
[package.dependencies]
binapy = ">=0.7"
cryptography = ">=3.4"
typing-extensions = ">=4.3"
[[package]]
name = "orderedmultidict"
version = "1.0.1"
description = "Ordered Multivalue Dictionary"
optional = false
python-versions = "*"
files = [
{file = "orderedmultidict-1.0.1-py2.py3-none-any.whl", hash = "sha256:43c839a17ee3cdd62234c47deca1a8508a3f2ca1d0678a3bf791c87cf84adbf3"},
{file = "orderedmultidict-1.0.1.tar.gz", hash = "sha256:04070bbb5e87291cc9bfa51df413677faf2141c73c61d2a5f7b26bea3cd882ad"},
]
[package.dependencies]
six = ">=1.8.0"
[[package]]
name = "pycparser"
version = "2.21"
description = "C parser in Python"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
]
[[package]]
name = "requests"
version = "2.31.0"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.7"
files = [
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
]
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = ">=2,<4"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<3"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-oauth2client"
version = "1.3.0"
description = "An OAuth2.x Client based on requests."
optional = false
python-versions = ">=3.8"
files = [
{file = "requests_oauth2client-1.3.0-py3-none-any.whl", hash = "sha256:f9324af3077503636c52e3d8aefdf2012944680f91e2534dcbe346af4cef510f"},
{file = "requests_oauth2client-1.3.0.tar.gz", hash = "sha256:36557bc20aeaf9b4bf4319c14487885622b7cfbe867415ade38aecf6f4fe6eab"},
]
[package.dependencies]
binapy = ">=0.7"
furl = ">=2.1.2"
jwskate = ">=0.9"
requests = ">=2.19.0"
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
[[package]]
name = "typing-extensions"
version = "4.8.0"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
{file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"},
{file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
]
[[package]]
name = "urllib3"
version = "2.0.5"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.7"
files = [
{file = "urllib3-2.0.5-py3-none-any.whl", hash = "sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e"},
{file = "urllib3-2.0.5.tar.gz", hash = "sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594"},
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "41477473248081b8e4e320eefda836a1e74238b299afdc9431bda9d7331908c5"

View file

@ -1,20 +0,0 @@
[tool.poetry]
name = "paheko-helloasso-adapter"
version = "0.1.0"
description = ""
authors = ["Matthieu Bessat <mail@matthieubessat.fr>"]
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.11"
distance = "^0.1.3"
requests = "^2.31.0"
requests-oauth2client = "^1.3.0"
[tool.poetry.scripts]
adapter = "paheko_helloasso_adapter.cli:main"
ha-sandbox = "paheko_helloasso_adapter.sandbox.sandbox1:main"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

364
src/helloasso.rs Normal file
View file

@ -0,0 +1,364 @@
use anyhow::{Context, Result, anyhow};
use url::Url;
use serde::{Serialize, Deserialize};
use fully_pub::fully_pub;
use chrono::prelude::{DateTime, Utc};
use crate::utils::deserialize_datetime;
use thiserror::Error;
#[derive(Error, Debug)]
enum APIClientError {
#[error("Received non-normal status code from API")]
InvalidStatusCode
}
#[fully_pub]
#[derive(Clone, Serialize, Deserialize, Debug)]
struct WebSession {
jwt: String
}
#[derive(Debug, Clone)]
#[fully_pub]
struct ClientConfig {
base_url: Url,
proxy: Option<reqwest::Proxy>,
user_agent: String
}
impl Default for ClientConfig {
fn default() -> Self {
ClientConfig {
proxy: None,
base_url: Url::parse("https://api.helloasso.com/v5/") // the traling slash is important
.expect("Expected valid helloasso API base URL"),
user_agent: "".to_string()
}
}
}
#[derive(Serialize, Debug)]
#[fully_pub]
struct LoginPayload {
email: String,
password: String
}
#[derive(Debug)]
#[fully_pub]
struct Client {
client: reqwest::Client,
config: ClientConfig
}
impl Default for Client {
fn default() -> Self {
let base_config: ClientConfig = Default::default();
Client {
client: Client::get_base_client_builder(&base_config)
.build()
.expect("Expected reqwest client to be built"),
config: base_config
}
}
}
impl Client {
pub fn new(config: ClientConfig) -> Client {
Client {
client: Client::get_base_client_builder(&config)
.build()
.expect("Expected reqwest client to be built"),
config
}
}
fn get_base_client_builder(config: &ClientConfig) -> reqwest::ClientBuilder {
let mut default_headers = reqwest::header::HeaderMap::new();
default_headers.insert("Accept", "application/json".parse().unwrap());
default_headers.insert("User-Agent", config.user_agent.parse().unwrap());
let mut builder = reqwest::Client::builder()
.default_headers(default_headers);
if let Some(proxy) = &config.proxy {
builder = builder.proxy(proxy.clone());
}
builder
}
pub async fn login(&mut self, payload: LoginPayload) -> Result<AuthentifiedClient> {
let mut login_commons_headers = reqwest::header::HeaderMap::new();
login_commons_headers.insert(
"Origin",
"https://auth.helloasso.com".parse().expect("Header value to be OK")
);
let res = self.client.get(self.config.base_url.join("auth/antiforgerytoken")?)
.headers(login_commons_headers.clone())
.send().await?;
let antiforgerytoken: String = res.json().await?;
let res = self.client.post(self.config.base_url.join("auth/login")?)
.json(&payload)
.headers(login_commons_headers.clone())
.header("x-csrf-token", antiforgerytoken)
.send()
.await?;
if res.status() != 200 {
return Err(anyhow!("Unexpected status code from login"));
}
fn get_jwt_from_cookies_headers(headers: &reqwest::header::HeaderMap) -> Option<String> {
for (name_opt, value_raw) in headers {
let name = String::from(name_opt.as_str());
if name.to_lowercase() != "set-cookie" {
continue
}
let value = String::from(value_raw.to_str().unwrap());
if value.starts_with("tm5-HelloAsso") {
let jwt = value.split("tm5-HelloAsso=").nth(1)?.split(';').next()?.trim().to_string();
return Some(jwt);
}
}
None
}
let jwt = get_jwt_from_cookies_headers(res.headers())
.context("Failed to find or parse JWT from login response")?;
let session = WebSession { jwt };
Ok(self.authentified_client(session))
}
pub fn authentified_client(&self, session: WebSession) -> AuthentifiedClient {
AuthentifiedClient::new(self.config.clone(), session)
}
}
#[derive(Debug, Clone)]
#[fully_pub]
struct AuthentifiedClient {
session: WebSession,
client: reqwest::Client,
config: ClientConfig
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[fully_pub]
struct PaginationMeta {
continuation_token: String,
page_index: u64,
page_size: u64,
total_count: u64,
total_pages: u64
}
#[derive(Debug, Serialize, Deserialize)]
struct PaginationCapsule {
data: serde_json::Value,
pagination: PaginationMeta
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[fully_pub]
struct CustomFieldAnswer {
answer: String,
id: u64,
name: String
// missing type, it's probably always TextInput, if not, serde will fail to parse
}
fn default_country() -> String {
"FRA".to_string()
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[fully_pub]
struct PayerUserDetails {
#[serde(default = "default_country")]
country: String,
email: String,
first_name: String,
last_name: String
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[fully_pub]
struct UserDetails {
first_name: String,
last_name: String
}
impl AuthentifiedClient {
/// each time we need to change the token, we will need to rebuild the client
pub fn new(config: ClientConfig, session: WebSession) -> Self {
let mut auth_headers = reqwest::header::HeaderMap::new();
auth_headers.insert("Authorization", format!("Bearer {}", session.jwt).parse().unwrap());
AuthentifiedClient {
session,
client: Client::get_base_client_builder(&config)
.default_headers(auth_headers)
.build()
.expect("reqwest client to be built"),
config
}
}
pub async fn verify_auth(&self) -> Result<bool> {
let res = self.client
.get(self.config.base_url.join("agg/user")?)
.send().await?;
Ok(res.status() == 200)
}
#[allow(dead_code)]
pub async fn get_user_details(&self) -> Result<serde_json::Value> {
let res = self.client
.get(self.config.base_url.join("agg/user")?)
.send().await?;
if res.status() != 200 {
return Err(APIClientError::InvalidStatusCode.into());
}
let user_details: serde_json::Value = res.json().await?;
Ok(user_details)
}
async fn simple_fetch(&self, path: String) -> Result<serde_json::Value> {
let res = self.client
.get(self.config.base_url.join(path.as_str())?)
.send().await?;
if res.status() != 200 {
return Err(APIClientError::InvalidStatusCode.into());
}
let details: serde_json::Value = res.json().await?;
Ok(details)
}
pub async fn fetch_with_pagination(&self, path: String) -> Result<Vec<serde_json::Value>> {
let mut data: Vec<serde_json::Value> = vec![];
let mut continuation_token: Option<String> = None;
loop {
let mut url = self.config.base_url.join(path.as_str())?;
if let Some(token) = &continuation_token {
url.query_pairs_mut().append_pair("continuationToken", token);
}
let res = self.client
.get(url)
.send().await?;
if res.status() != 200 {
return Err(APIClientError::InvalidStatusCode.into());
}
let capsule: PaginationCapsule = res.json().await?;
// handle pagination
// merge into "data", "pagination" is the key that hold details
let page_items = match capsule.data {
serde_json::Value::Array(inner) => inner,
_ => {
return Err(anyhow!("Unexpected json value in data bundle"));
}
};
if page_items.is_empty() {
return Ok(data);
}
data.extend(page_items);
if capsule.pagination.page_index == capsule.pagination.total_pages {
return Ok(data);
}
continuation_token = Some(capsule.pagination.continuation_token);
}
}
pub fn organization(&self, slug: &str) -> Organization {
Organization { client: self.clone(), slug: slug.to_string() }
}
}
#[derive(Debug, Clone)]
#[fully_pub]
struct Organization {
client: AuthentifiedClient,
slug: String
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[fully_pub]
struct OrderDetails {
id: u64,
#[serde(deserialize_with = "deserialize_datetime", rename="date")]
inception_time: DateTime<Utc>
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[fully_pub]
struct Payment {
id: u64,
#[serde(rename = "type")]
extra: Option<String>,
share_amount: u32,
amount: u32
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[fully_pub]
struct FormAnswer {
amount: u32,
#[serde(rename = "name")]
mode: String,
#[serde(rename = "payer")]
payer_user: PayerUserDetails,
payments: Vec<Payment>,
order: OrderDetails,
#[serde(rename = "user")]
user: UserDetails,
id: u64,
custom_fields: Vec<CustomFieldAnswer>
}
impl Organization {
#[allow(dead_code)]
pub async fn get_details(&self) -> Result<serde_json::Value> {
let details = self.client.simple_fetch(format!("organizations/{}", self.slug)).await?;
Ok(details)
}
pub async fn get_form_answers(&self, form_slug: &str) -> Result<Vec<FormAnswer>> {
let data = self.client.fetch_with_pagination(
format!("organizations/{}/forms/Membership/{}/participants?withDetails=true", self.slug, form_slug)
).await?;
let mut answers: Vec<FormAnswer> = vec![];
for entry in data {
answers.push(serde_json::from_value(entry).context("Cannot parse FormAnswer")?)
}
// sort by date, most older first
answers.sort_by(|a, b| a.order.inception_time.cmp(&b.order.inception_time));
Ok(answers)
}
}

178
src/main.rs Normal file
View file

@ -0,0 +1,178 @@
mod utils;
mod paheko;
mod helloasso;
mod sync_helloasso;
mod sync_csv;
mod sync_paheko;
#[cfg(test)]
mod test_utils;
use thiserror::Error;
use anyhow::{Context, Result, anyhow};
use strum::Display;
use serde::{Serialize, Deserialize};
use url::Url;
use fully_pub::fully_pub;
use argh::FromArgs;
/// permanent config to store long-term config
/// used to ingest env settings
/// config loaded from env variables
#[derive(Deserialize, Serialize, Debug)]
#[fully_pub]
struct Config {
helloasso_proxy: Option<String>,
helloasso_email: String,
helloasso_password: String,
helloasso_organization_slug: String,
helloasso_form_name: String,
paheko_proxy: Option<String>,
paheko_base_url: String,
paheko_client_id: String,
paheko_client_secret: String,
paheko_target_activity_name: String,
paheko_accounting_years_ids: Vec<u32>,
}
// start user cache management
use std::fs;
#[derive(Serialize, Deserialize, Debug)]
#[fully_pub]
struct UserCache {
helloasso_session: Option<helloasso::WebSession>
}
#[derive(Display, Debug, Error)]
#[strum(serialize_all = "snake_case")]
enum LoadError {
XDG,
Fs,
FailedToParse,
FailedToEncode,
FailedToCreate,
FailedToWrite
}
const APP_USER_AGENT: &str = "helloasso_paheko_adapter";
fn write_user_cache(cache: &UserCache) -> Result<(), LoadError> {
let xdg_dirs = xdg::BaseDirectories::with_prefix(env!("CARGO_PKG_NAME"))
.map_err(|_e| { LoadError::XDG })?;
let user_cache_path = xdg_dirs.place_cache_file("session.json").map_err(|_e| { LoadError::FailedToCreate })?;
let encoded_cache = serde_json::to_string(&cache).map_err(|_e| { LoadError::FailedToEncode })?;
fs::write(user_cache_path, encoded_cache.as_str()).map_err(|_e| { LoadError::FailedToWrite })?;
Ok(())
}
fn load_user_cache() -> Result<UserCache, LoadError> {
let xdg_dirs = xdg::BaseDirectories::with_prefix(env!("CARGO_PKG_NAME"))
.map_err(|_e| { LoadError::XDG })?;
let user_cache_path = xdg_dirs.get_cache_file("session.json");
if !user_cache_path.exists() {
let default_cache = UserCache {
helloasso_session: None
};
write_user_cache(&default_cache)?;
}
let session_content = fs::read_to_string(user_cache_path).map_err(|_e| { LoadError::Fs })?;
let cache: UserCache = serde_json::from_str(&session_content).map_err(|_e| { LoadError::FailedToParse })?;
Ok(cache)
}
fn get_proxy_from_url(proxy_url: &Option<String>) -> Result<Option<reqwest::Proxy>> {
Ok(match proxy_url {
Some(p) => Some(reqwest::Proxy::all(p)
.context("Expected to build Proxy from paheko_proxy config value")?),
None => None
})
}
async fn launch_adapter(source: SourceType, config: &Config, dry_run: bool) -> Result<()> {
let mut user_cache = load_user_cache().context("Failed to load user cache")?;
if !&config.paheko_base_url.ends_with('/') {
return Err(anyhow!("Invalid paheko base_url, it must end with a slash"))
}
let mut paheko_client: paheko::Client = paheko::Client::new(paheko::ClientConfig {
base_url: Url::parse(&config.paheko_base_url).expect("Expected paheko base url to be a valid URL"),
proxy: get_proxy_from_url(&config.paheko_proxy)?,
user_agent: APP_USER_AGENT.to_string()
});
let paheko_credentials = paheko::Credentials {
client_id: config.paheko_client_id.clone(),
client_secret: config.paheko_client_secret.clone()
};
let paheko_client = paheko_client.login(paheko_credentials).await.context("Paheko login")?;
match source {
SourceType::Csv => sync_csv::sync_csv(&paheko_client, config, &mut user_cache, dry_run).await?,
SourceType::Helloasso => sync_helloasso::sync_helloasso(&paheko_client, config, &mut user_cache, dry_run).await?
}
Ok(())
}
#[derive(FromArgs)]
/// Members and Membership sync adaper for paheko (support Hellosso and CSV)
struct App {
/// the source of sync (CSV or helloasso)
#[argh(option, short = 'm')]
source: Option<String>,
/// output debug info
#[argh(switch, short = 'i')]
env_info: bool,
/// dry run
#[argh(switch, short = 'd')]
dry_run: bool
}
enum SourceType {
Helloasso,
Csv
}
#[tokio::main]
async fn main() {
let app: App = argh::from_env();
match dotenvy::dotenv() {
Ok(_) => eprintln!("Loaded env vars from .env"),
Err(err) => eprintln!("WARNING: loading .env failed {:?}", err),
}
let config: Config = envy::from_env().expect("Failed to load env vars");
if app.env_info {
dbg!(config);
return;
}
let source = match app.source.unwrap().as_ref() {
"helloasso" => SourceType::Helloasso,
"csv" => SourceType::Csv,
_ => {
eprintln!("Must provide a valid source argument.");
return;
}
};
let res = launch_adapter(source, &config, app.dry_run).await;
match res {
Err(err) => {
eprintln!("Program failed, details bellow");
eprintln!("{:?}", err);
},
Ok(()) => {
eprintln!("Program done");
}
}
}

585
src/paheko.rs Normal file
View file

@ -0,0 +1,585 @@
use async_recursion::async_recursion;
use anyhow::{Context, Result, anyhow};
use url::Url;
use serde::{Serialize, Deserialize};
use fully_pub::fully_pub;
use crate::utils::Id;
use chrono::prelude::{DateTime, Utc};
use chrono::NaiveDate;
use thiserror::Error;
use crate::sync_paheko::GeneralizedAnswer;
use crate::utils::{deserialize_date, deserialize_json_list, complete_date};
#[derive(Debug, Serialize, Deserialize, Clone)]
#[fully_pub]
struct HelloassoReferences {
answer_id: u64,
order_id: u64
// payment_id: u64,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[fully_pub]
struct ExternalReferences {
helloasso_refs: HelloassoReferences
}
/// for now we include the custom fields into the paheko user
/// we don't have time to implement user settings to change the custom fields mapping
/// for now, manual mapping
#[derive(Debug, Serialize, Deserialize, Clone)]
#[fully_pub]
struct User {
id: Id,
first_name: Option<String>,
last_name: String,
email: Option<String>,
phone: Option<String>,
address: String,
city: String,
postal_code: String,
country: String,
skills: Option<String>,
job: Option<String>,
birth_year: Option<u32>,
register_time: DateTime<Utc>
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[fully_pub]
struct UserSummary {
id: Id,
first_name: Option<String>,
last_name: String,
email: Option<String>,
phone: Option<String>
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[fully_pub]
struct Membership {
id: Id,
users_ids: Vec<Id>,
service_name: String,
mode_name: String,
start_time: DateTime<Utc>,
end_time: DateTime<Utc>,
payed_amount: f64
}
#[derive(Debug, Clone)]
#[fully_pub]
enum TransactionKind {
Expense,
Revenue
}
impl From<TransactionKind> for String {
fn from(val: TransactionKind) -> Self {
match val {
TransactionKind::Expense => "EXPENSE".to_string(),
TransactionKind::Revenue => "REVENUE".to_string()
}
}
}
#[derive(Debug, Clone)]
#[fully_pub]
struct SimpleTransaction {
label: String,
kind: TransactionKind,
inception_time: DateTime<Utc>,
amount: f64,
credit_account_code: String,
debit_account_code: String,
reference: Option<String>,
linked_users: Vec<Id>,
linked_subscriptions: Vec<Id>,
accounting_year: Id,
payment_reference: Option<String>,
notes: Option<String>
}
#[derive(Debug, Clone, Deserialize)]
#[fully_pub]
struct AccountingYear {
id: Id,
label: String,
closed: u32,
#[serde(deserialize_with = "deserialize_date", rename="start_date")]
start_date: NaiveDate,
#[serde(deserialize_with = "deserialize_date", rename="end_date")]
end_date: NaiveDate
}
#[derive(Error, Debug)]
enum APIClientError {
#[error("Received non-normal status code from API")]
InvalidStatusCode
}
#[derive(Debug, Clone)]
#[fully_pub]
struct Credentials {
client_id: String,
client_secret: String
}
#[derive(Debug, Clone)]
#[fully_pub]
struct ClientConfig {
base_url: Url,
proxy: Option<reqwest::Proxy>,
user_agent: String
}
impl Default for ClientConfig {
fn default() -> Self {
ClientConfig {
proxy: None,
base_url: Url::parse("https://paheko.example.org/api/") // the traling slash is important
.expect("Expected valid paheko API base URL"),
user_agent: "".to_string()
}
}
}
#[derive(Debug)]
#[fully_pub]
struct Client {
client: reqwest::Client,
config: ClientConfig
}
impl Default for Client {
fn default() -> Self {
let base_config: ClientConfig = Default::default();
Client {
client: Client::get_base_client_builder(&base_config)
.build()
.expect("Expected reqwest client to be built"),
config: base_config
}
}
}
use base64_light::base64_encode;
fn build_auth_headers(credentials: &Credentials) -> reqwest::header::HeaderMap {
let mut login_headers = reqwest::header::HeaderMap::new();
login_headers.insert(
"Authorization",
format!("Basic {}", &base64_encode(
&format!("{}:{}", &credentials.client_id, &credentials.client_secret)
)).parse().expect("Header value to be OK")
);
login_headers
}
impl Client {
pub fn new(config: ClientConfig) -> Client {
Client {
client: Client::get_base_client_builder(&config)
.build()
.expect("Expected reqwest client to be built"),
config
}
}
fn get_base_client_builder(config: &ClientConfig) -> reqwest::ClientBuilder {
let mut default_headers = reqwest::header::HeaderMap::new();
default_headers.insert("Accept", "application/json".parse().unwrap());
default_headers.insert("User-Agent", config.user_agent.parse().unwrap());
let mut builder = reqwest::Client::builder()
.default_headers(default_headers);
if let Some(proxy) = &config.proxy {
builder = builder.proxy(proxy.clone());
}
builder
}
pub async fn login(&mut self, credentials: Credentials) -> Result<AuthentifiedClient> {
let hypothetic_client = self.authentified_client(credentials);
let query: String = r#"
SELECT key,value FROM config WHERE key="org_name"
"#.to_string();
match hypothetic_client.sql_query(query).await {
Ok(_value) => {
Ok(hypothetic_client)
},
Err(err) => {
Err(anyhow!("Failed to authenticate: Credentials provided are invalids, {:?}", err))
}
}
}
pub fn authentified_client(&self, credentials: Credentials) -> AuthentifiedClient {
AuthentifiedClient::new(self.config.clone(), credentials)
}
}
#[derive(Debug, Clone)]
pub struct AuthentifiedClient {
_credentials: Credentials,
client: reqwest::Client,
config: ClientConfig
}
// SELECT id,nom AS first_name,last_name,email,external_custom_data FROM users LIMIT 5;
#[derive(Debug, Deserialize)]
#[fully_pub]
struct SimpleUser {
id: u32,
first_name: String,
last_name: String,
email: Option<String>,
external_custom_data: Option<String>
}
#[derive(Debug, Deserialize)]
#[fully_pub]
struct SqlQueryOutput {
count: u64,
results: serde_json::Value
}
#[derive(Debug)]
#[fully_pub]
struct UserServiceRegistration {
id: Id
}
impl AuthentifiedClient {
pub fn new(config: ClientConfig, credentials: Credentials) -> Self {
AuthentifiedClient {
client: Client::get_base_client_builder(&config)
.default_headers(build_auth_headers(&credentials))
.build()
.expect("Expect client to be built"),
_credentials: credentials,
config
}
}
pub async fn sql_query(&self, query: String) -> Result<SqlQueryOutput> {
#[derive(Serialize)]
struct Payload {
sql: String
}
let payload = Payload { sql: query };
let path = self.config.base_url.join("sql")?;
let res = self.client
.post(path)
.json(&payload)
.send().await?;
if res.status() != 200 {
self.show_paheko_err(res).await;
return Err(APIClientError::InvalidStatusCode.into());
}
res.json().await.context("Sql query")
}
pub async fn get_users(&self) -> Result<Vec<UserSummary>> {
let query: String = r#"
SELECT id,nom AS first_name,last_name,email,telephone AS phone FROM users;
"#.to_string();
let users_val = self.sql_query(query).await.context("Fetching users")?;
Ok(serde_json::from_value(users_val.results)?)
}
pub async fn get_next_id(&self, table_name: &str) -> Result<u64> {
let query: String = format!(r#"
SELECT id FROM {} ORDER BY id DESC LIMIT 1
"#, table_name).to_string();
let data = self.sql_query(query).await.context("Fetching next id from table")?;
#[derive(Deserialize)]
struct Entry {
id: u64
}
let ids: Vec<Entry> = serde_json::from_value(data.results)?;
Ok(ids.get(0).map(|x| x.id).unwrap_or(1)+1)
}
pub async fn get_transactions(&self, id_years: &[u32])
-> Result<Vec<SimpleTransaction>>
{
#[derive(Debug, Deserialize)]
struct Row {
#[allow(dead_code)]
id: u64,
label: String,
reference: Option<String>,
#[serde(deserialize_with = "deserialize_json_list")]
accounts_codes: Vec<String>,
year_id: u64,
#[serde(deserialize_with = "deserialize_date")]
inception_date: NaiveDate
}
let id_years_joined = id_years.iter()
.map(|x| x.to_string())
.collect::<Vec<String>>()
.join(",");
let query: String = format!(r#"
SELECT act.id, act.date AS inception_date, act.id_year AS year_id, act.label, act.reference, acc.code, JSON_GROUP_ARRAY(acc.code) AS accounts_codes
FROM acc_transactions AS act
INNER JOIN acc_transactions_lines AS actl ON actl.id_transaction = act.id
INNER JOIN acc_accounts AS acc ON acc.id = actl.id_account
WHERE act.id_year IN ({})
GROUP BY act.id;
"#, id_years_joined).to_string();
let val = self.sql_query(query).await.context("Fetching transactions")?;
let raw_vals: Vec<Row> = serde_json::from_value(val.results)
.context("Cannot deserialize SQL transactions rows")?;
// we will assume that the first acc code is always the credit, and second the debit
Ok(raw_vals.iter().map(|x| SimpleTransaction {
label: x.label.clone(),
reference: x.reference.clone(),
credit_account_code: x.accounts_codes.get(0).unwrap().to_string(),
debit_account_code: x.accounts_codes.get(1).unwrap().to_string(),
inception_time: complete_date(x.inception_date),
accounting_year: Id(x.year_id),
amount: 0.0,
kind: TransactionKind::Expense,
linked_subscriptions: vec![],
linked_users: vec![],
payment_reference: None,
notes: None
}).collect())
}
pub async fn get_accounting_years(&self)
-> Result<Vec<AccountingYear>>
{
let path = self.config.base_url.join("accounting/years")?;
let res = self.client
.get(path)
.send().await?;
if res.status() != 200 {
self.show_paheko_err(res).await;
return Err(APIClientError::InvalidStatusCode.into());
}
res.json().await.context("Get accounting years")
}
/// get a list of membership
pub async fn get_service_subscriptions(&self, service_name: &str)
-> Result<Vec<Membership>>
{
let query: String = format!(r#"
SELECT su.id,su.id_user,su.date,su.expiry_date FROM services_users AS su JOIN services AS s ON su.id_service = s.id WHERE s.label = '{}';
"#, service_name);
let val = self.sql_query(query).await.context("Fetching service subscriptions")?;
#[derive(Deserialize)]
struct Row {
id: u64,
id_user: u64,
#[serde(deserialize_with = "deserialize_date")]
date: NaiveDate,
#[serde(deserialize_with = "deserialize_date")]
expiry_date: NaiveDate
}
let intermidiate: Vec<Row> = serde_json::from_value(val.results)?;
// group the rows with the same id
Ok(intermidiate
.chunk_by(|a,b| a.id == b.id)
.map(|rows| {
let base = rows.first().unwrap();
Membership {
id: Id(base.id),
mode_name: service_name.to_string(),
service_name: "".to_string(),
start_time: DateTime::<Utc>::from_naive_utc_and_offset(
base.date.and_hms_opt(0, 0, 0).unwrap(),
Utc
),
end_time: DateTime::<Utc>::from_naive_utc_and_offset(
base.expiry_date.and_hms_opt(0, 0, 0).unwrap(),
Utc
),
users_ids: rows.iter().map(|x| Id(x.id_user)).collect(),
payed_amount: 0.0
}
}).collect()
)
}
#[async_recursion]
pub async fn create_user(&self, user: &GeneralizedAnswer, next_id: u64)
-> Result<UserSummary>
{
// single-user import
// create virtual file
let u = user.clone();
let mut csv_content: String = String::new();
csv_content.push_str("numero,nom,last_name,adresse,code_postal,ville,pays,telephone,email,annee_naissance,profession,interets,lettre_infos,date_inscription\n");
csv_content.push_str(
format!("{},{:?},{:?},{:?},{:?},{:?},{:?},{:?},{:?},{},{:?},{:?},{},{}\n",
next_id.to_string(),
u.first_name.clone().unwrap_or("".to_string()),
u.last_name.clone(),
u.address,
u.postal_code,
u.city,
u.country,
u.phone.clone().unwrap_or("".to_string()),
u.email.clone().unwrap_or("".to_string()),
u.birth_year.map(|x| format!("{}", x)).unwrap_or("".to_string()),
u.job.clone().unwrap_or("".to_string()),
u.skills.clone().unwrap_or("".to_string()),
1,
user.inception_time.format("%d/%m/%Y")
).as_str());
use reqwest::multipart::Form;
use reqwest::multipart::Part;
let part = Part::text(csv_content).file_name("file");
let form = Form::new()
.part("file", part);
let res = self.client
.post(self.config.base_url.join("user/import/")?)
.multipart(form)
.send().await?;
if res.status() != 200 {
let res_text = res.text().await.unwrap();
if res_text.contains("E-Mail") && res_text.contains("unique") {
eprintln!("WARN: Detected duplicated email, will retry without email");
// email detected as duplicated by paheko server
let mut new_data = user.clone();
new_data.email = None;
return self.create_user(&new_data, next_id).await;
}
// self.show_paheko_err(res).await;
return Err(APIClientError::InvalidStatusCode.into());
}
Ok(
UserSummary {
id: Id(next_id),
first_name: u.first_name,
last_name: u.last_name,
email: u.email,
phone: u.phone
}
)
}
pub async fn register_user_to_service(&self, user: &UserSummary, user_membership: &Membership, next_id: u64)
-> Result<UserServiceRegistration>
{
// single-user import
// create virtual file
let u = user.clone();
let mut csv_content: String = String::new();
csv_content.push_str(
r#""Numéro de membre","Activité","Tarif","Date d'inscription","Date d'expiration","Montant à régler","Payé ?""#);
csv_content.push('\n');
csv_content.push_str(
format!("{},{:?},{:?},{:?},{:?},{:?},{:?}\n",
u.id,
user_membership.service_name,
user_membership.mode_name,
user_membership.start_time.format("%d/%m/%Y").to_string(),
user_membership.end_time.format("%d/%m/%Y").to_string(),
format!("{}", user_membership.payed_amount),
"Oui"
).as_str());
use reqwest::multipart::Form;
use reqwest::multipart::Part;
let part = Part::text(csv_content).file_name("file");
let form = Form::new()
.part("file", part);
let res = self.client
.post(self.config.base_url.join("services/subscriptions/import")?)
.multipart(form)
.send().await?;
if res.status() != 200 {
self.show_paheko_err(res).await;
return Err(APIClientError::InvalidStatusCode.into());
}
Ok(UserServiceRegistration {
id: Id(next_id)
})
}
pub async fn register_transaction(&self, transaction: SimpleTransaction)
-> Result<()>
{
use reqwest::multipart::Form;
let mut form = Form::new()
.text("id_year", transaction.accounting_year.to_string())
.text("label", transaction.label)
.text("date", transaction.inception_time.format("%d/%m/%Y").to_string())
.text("type", Into::<String>::into(transaction.kind))
.text("amount", format!("{}", transaction.amount))
.text("debit", transaction.debit_account_code)
.text("credit", transaction.credit_account_code)
// "Numéro pièce comptable" enregistré au niveau de la transaction
;
if let Some(val) = transaction.reference {
form = form.text("reference", val);
}
if let Some(val) = transaction.payment_reference {
form = form.text("reference", val);
}
if let Some(val) = transaction.notes {
form = form.text("notes", val);
}
for linked_id in transaction.linked_users {
form = form.text("linked_users[]", format!("{}", linked_id.0));
}
for linked_id in transaction.linked_subscriptions {
form = form.text("linked_subscriptions[]", format!("{}", linked_id.0));
}
let res = self.client
.post(self.config.base_url.join("accounting/transaction")?)
.multipart(form)
.send().await?;
if res.status() != 200 {
self.show_paheko_err(res).await;
return Err(APIClientError::InvalidStatusCode.into());
}
Ok(())
}
async fn show_paheko_err(&self, err_response: reqwest::Response) {
eprintln!("Paheko error details: {:?} {:?}", err_response.status(), err_response.text().await.unwrap())
}
}

170
src/sync_csv.rs Normal file
View file

@ -0,0 +1,170 @@
use crate::paheko;
use crate::{
Config, UserCache,
};
use anyhow::Result;
use crate::utils::{normalize_str, parse_date_iso, parse_normalize_phone};
use crate::sync_paheko::{sync_paheko, GeneralizedAnswer, PaymentMode};
use email_address::EmailAddress;
use chrono::prelude::Datelike;
use std::io::BufRead;
use csv::ReaderBuilder;
use std::io;
const CAISSE_ACCOUNT_CODE: &str = "530"; // 530 - Caisse
fn process_csv_value(value: String) -> Option<String> {
let value = normalize_str(value);
if value.is_empty() {
return None
}
Some(value)
}
fn process_price(value: String) -> f64 {
value
.trim()
.chars().filter(|c| c.is_numeric() || *c == '.')
.collect::<String>()
.parse().unwrap_or(0.0)
}
// read csv from stdin
pub async fn sync_csv(
paheko_client: &paheko::AuthentifiedClient,
config: &Config,
_user_cache: &mut UserCache,
dry_run: bool
) -> Result<()> {
// raw row record directly from CSV
#[derive(Debug, serde::Deserialize)]
struct AnswerRecord {
// Ref BP
reference: String,
inception_date: String,
email: String,
first_name: String,
last_name: String,
// Mode / Tarif "Individuel" "Couple"
membership_mode: String,
// CC 1 Prénom conjoint
linked_user_first_name: String,
// CC 2 ADRESSE
address: String,
// CC 3 CODE POSTAL
postal_code: String,
// CC 4 VILLE
city: String,
// CC 5 TÉLÉPHONE
phone: String,
// CC 7 PROFESSION
job: String,
// CC 8 CENTRE D'INTÉRÊTS / COMPÉTENCES
skills: String,
// CC 9 DATE DE NAISSANCE
birth_date: String,
// Cotisation (€)
subscription_amount: String,
// Don (€)
donation_amount: String,
// Mode de paiement (Espèce or Cheque, ESP or CHQ)
payment_mode: String
}
let stdin = io::stdin();
let mut intermediate_inp = "".to_string();
for line_res in stdin.lock().lines() {
let line = line_res.unwrap();
eprintln!("{:?}",&line);
if line.starts_with(',') {
continue;
}
if line.contains("\\FIN_DES_DONNES") {
break;
}
intermediate_inp.push_str(&line);
intermediate_inp.push('\n');
}
let mut rdr = ReaderBuilder::new()
.from_reader(intermediate_inp.as_bytes());
let mut generalized_answers: Vec<GeneralizedAnswer> = vec![];
eprintln!("Reading from stdin");
for parsed_record_res in rdr.deserialize() {
let parsed_record: AnswerRecord = parsed_record_res?;
eprintln!("Parsed_record: {:?}", parsed_record);
let generalized_answer = GeneralizedAnswer {
first_name: Some(normalize_str(parsed_record.first_name)),
last_name: normalize_str(parsed_record.last_name),
email: process_csv_value(parsed_record.email).and_then(|s| EmailAddress::is_valid(&s).then_some(s)),
phone: process_csv_value(parsed_record.phone).and_then(parse_normalize_phone),
skills: process_csv_value(parsed_record.skills),
address: process_csv_value(parsed_record.address)
.expect("Expected answer to have address"),
postal_code: process_csv_value(parsed_record.postal_code)
.expect("Expected answer to have postalcode"),
city: process_csv_value(parsed_record.city)
.expect("Expected answer answer to have city"),
country: "fr".to_string(),
job: process_csv_value(parsed_record.job),
birth_year: process_csv_value(parsed_record.birth_date)
.and_then(|raw_date| parse_date_iso(&raw_date))
.map(|d| d.year() as u32),
inception_time: process_csv_value(parsed_record.inception_date)
.map(|s|
parse_date_iso(&s).expect("Record must have a valid date")
)
.expect("Record must have a date"),
reference: format!("BP/{}", process_csv_value(parsed_record.reference).expect("Row must have reference")), // BP as Bulletin Papier
donation_amount: process_price(parsed_record.donation_amount),
subscription_amount: process_price(parsed_record.subscription_amount), // FIXME: get subscription from mode
membership_mode: serde_json::from_value(serde_json::Value::String(parsed_record.membership_mode.clone()))
.expect("Expected a membership mode to be valid"),
linked_user_first_name: process_csv_value(parsed_record.linked_user_first_name),
payment_mode: match process_csv_value(parsed_record.payment_mode) {
Some(payment_mode_name) => serde_json::from_str(
&format!(
"\"{}\"",
payment_mode_name.to_ascii_uppercase()
)
).expect("Could not parse payment mode"),
None => PaymentMode::Cheque
}
};
generalized_answers.push(generalized_answer);
}
// sort by date, most older first
generalized_answers.sort_by(|a, b| a.inception_time.cmp(&b.inception_time));
eprintln!("Generated GeneralizedAnswers");
if dry_run {
dbg!(generalized_answers);
eprintln!("Stopping here, dry run");
return Ok(());
}
sync_paheko(
paheko_client,
config,
generalized_answers,
CAISSE_ACCOUNT_CODE,
"Papier"
).await?;
eprintln!("CSV sync done.");
Ok(())
}

219
src/sync_helloasso.rs Normal file
View file

@ -0,0 +1,219 @@
use crate::helloasso;
use crate::paheko;
use crate::sync_paheko::PaymentMode;
use crate::{
Config, UserCache,
get_proxy_from_url, write_user_cache
};
use crate::utils::{parse_and_get_birthday_year, parse_normalize_phone, normalize_str};
use crate::sync_paheko::{GeneralizedAnswer, sync_paheko};
use anyhow::{Context, Result};
use url::Url;
use email_address::EmailAddress;
/// Get authentified HelloAsso client
async fn get_auth_client_from_cache(
user_cache: &mut UserCache,
ha_client: &mut helloasso::Client,
login_payload: helloasso::LoginPayload
) -> Result<helloasso::AuthentifiedClient> {
// TODO: find a better way to have the logic implemented
async fn login(
user_cache: &mut UserCache,
ha_client: &mut helloasso::Client,
login_payload: helloasso::LoginPayload
) -> Result<helloasso::AuthentifiedClient> {
let auth_client = ha_client.login(login_payload).await.context("Failed to login to HelloAsso")?;
user_cache.helloasso_session = Some(auth_client.session.clone());
write_user_cache(user_cache).expect("Unable to write user cache");
eprintln!("HelloAsso: Logged in and wrote token to cache");
Ok(auth_client)
}
eprintln!("Initializing HA client…");
match &user_cache.helloasso_session {
Some(cached_session) => {
let auth_client = ha_client.authentified_client(cached_session.clone());
if !auth_client.verify_auth().await? {
println!("HelloAsso: Need to relog, token invalid");
return login(user_cache, ha_client, login_payload).await
}
eprintln!("HelloAsso: Used anterior token");
Ok(auth_client)
},
None => {
eprintln!("HelloAsso: First time login");
login(user_cache, ha_client, login_payload).await
}
}
}
/// rust how to access inner enum value
#[derive(Debug, PartialEq, Clone, Copy)]
enum HelloassoCustomFieldType {
Email,
Address,
PostalCode,
City,
Phone,
Job,
Skills,
Birthday,
LinkedUserFirstName
}
impl TryFrom<&str> for HelloassoCustomFieldType {
type Error = ();
fn try_from(subject: &str) -> Result<Self, Self::Error> {
match subject {
"Prénom conjoint" => Ok(HelloassoCustomFieldType::LinkedUserFirstName),
"ADRESSE" => Ok(HelloassoCustomFieldType::Address),
"CODE POSTAL" => Ok(HelloassoCustomFieldType::PostalCode),
"VILLE" => Ok(HelloassoCustomFieldType::City),
"EMAIL" => Ok(HelloassoCustomFieldType::Email),
"PROFESSION" => Ok(HelloassoCustomFieldType::Job),
"TÉLÉPHONE" => Ok(HelloassoCustomFieldType::Phone),
"DATE DE NAISSANCE" => Ok(HelloassoCustomFieldType::Birthday),
"CENTRE D'INTÉRÊTS / COMPÉTENCES" => Ok(HelloassoCustomFieldType::Skills),
_ => Err(())
}
}
}
fn read_custom_field(form_answer: &helloasso::FormAnswer, custom_field: HelloassoCustomFieldType) -> Option<String> {
// FIXME: compute the type directly at deserialization with serde
form_answer.custom_fields.iter()
.find(|f| HelloassoCustomFieldType::try_from(f.name.as_str()) == Ok(custom_field))
.map(|cf| cf.answer.clone())
}
pub async fn sync_helloasso(
paheko_client: &paheko::AuthentifiedClient,
config: &Config,
user_cache: &mut UserCache,
dry_run: bool
) -> Result<()> {
let mut ha_client: helloasso::Client = helloasso::Client::new(helloasso::ClientConfig {
base_url: Url::parse("https://api.helloasso.com/v5/")
.expect("Expected valid helloasso API base URL"),
proxy: get_proxy_from_url(&config.helloasso_proxy)?,
user_agent: "Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/112.0".to_string()
});
let login_payload = helloasso::LoginPayload {
email: config.helloasso_email.clone(),
password: config.helloasso_password.clone()
};
let auth_client: helloasso::AuthentifiedClient =
get_auth_client_from_cache(user_cache, &mut ha_client, login_payload).await?;
let org = auth_client.organization(&config.helloasso_organization_slug);
let answers = org.get_form_answers(&config.helloasso_form_name).await?;
println!("Got {} answers to the membership form. Processing...", &answers.len());
fn choose_email(answer: &helloasso::FormAnswer) -> Option<String> {
read_custom_field(answer, HelloassoCustomFieldType::Email)
.and_then(|x| {
if !EmailAddress::is_valid(&x) {
None
} else {
Some(x)
}
})
.or(Some(answer.payer_user.email.clone()))
}
let mut kept_original_answers: Vec<helloasso::FormAnswer> = vec![];
let mut generalized_answers: Vec<GeneralizedAnswer> = vec![];
for answer in answers {
let email = choose_email(&answer);
// skip answers that were imported later and are stranger from helloasso
let payment = answer.payments.get(0).expect("Expected payment to exists");
if payment.extra == Some("Offline".to_string()) {
continue;
}
let subscription_amount = f64::from(payment.share_amount)/100.0;
let donation_amount = f64::from(payment.amount - payment.share_amount)/100.0;
let mut generalized_answer = GeneralizedAnswer {
first_name: Some(normalize_str(answer.user.first_name.clone())),
last_name: normalize_str(answer.user.last_name.clone()),
email,
phone: read_custom_field(&answer, HelloassoCustomFieldType::Phone)
.map(normalize_str)
.and_then(parse_normalize_phone),
skills: read_custom_field(&answer, HelloassoCustomFieldType::Skills)
.map(normalize_str),
address: read_custom_field(&answer, HelloassoCustomFieldType::Address)
.map(normalize_str)
.expect("Expected ha answer to have address"),
postal_code: read_custom_field(&answer, HelloassoCustomFieldType::PostalCode)
.expect("Expected ha answer to have postalcode"),
city: read_custom_field(&answer, HelloassoCustomFieldType::City)
.map(normalize_str)
.expect("Expected ha answer to have city"),
country: answer.payer_user.country.clone().trim()[..=1].to_string(), // we expect country code ISO 3166-1 alpha-2
job: read_custom_field(&answer, HelloassoCustomFieldType::Job).map(normalize_str),
birth_year: read_custom_field(&answer, HelloassoCustomFieldType::Birthday).and_then(parse_and_get_birthday_year),
inception_time: answer.order.inception_time,
reference: format!("HA/{}", answer.id),
donation_amount,
subscription_amount,
membership_mode: serde_json::from_value(serde_json::Value::String(answer.mode.clone()))
.expect("Expected a membership mode to be valid"),
linked_user_first_name: read_custom_field(&answer, HelloassoCustomFieldType::LinkedUserFirstName),
payment_mode: PaymentMode::CreditCard
};
// apply custom user override
// this particular answer had duplicate phone and email from another answer
if answer.id == 64756582 {
generalized_answer.email = None;
generalized_answer.phone = None;
}
if answer.id == 64756581 {
if let Some(wrong_first_name) = generalized_answer.first_name {
let compos: Vec<&str> = wrong_first_name.split(" et ").collect();
generalized_answer.first_name = Some(compos.get(0).unwrap().to_string());
generalized_answer.linked_user_first_name = Some(compos.get(1).unwrap().to_string());
}
}
generalized_answers.push(generalized_answer);
kept_original_answers.push(answer);
}
println!("Generated GeneralizedAnswers");
if dry_run {
eprintln!("Will stop because dry run mode is enabled.");
list_answers(kept_original_answers, generalized_answers);
return Ok(());
}
sync_paheko(
paheko_client,
config,
generalized_answers,
"512",
"HelloAsso"
).await
}
fn list_answers(original_answers: Vec<helloasso::FormAnswer>, generalized_answers: Vec<GeneralizedAnswer>) {
for (ha_answer, ga) in std::iter::zip(original_answers, generalized_answers) {
println!(
"{} {:?} {:?} {:?} {:?}",
ha_answer.id, ga.first_name, ga.last_name,
ga.email, ga.linked_user_first_name
);
}
}

316
src/sync_paheko.rs Normal file
View file

@ -0,0 +1,316 @@
use crate::paheko;
use crate::paheko::{AccountingYear, SimpleTransaction};
use crate::Config;
use crate::utils::{generate_id, normalize_first_name, normalize_last_name};
use anyhow::{Context, Result};
use chrono::prelude::{NaiveDate, DateTime, Utc};
use fully_pub::fully_pub;
use serde::{Serialize, Deserialize};
const DONATION_ACCOUNT_CODE: &str = "754";
const SUBSCRIPTION_ACCOUNT_CODE: &str = "756";
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
#[fully_pub]
enum MembershipMode {
#[serde(rename = "Individuel")]
Individual,
#[serde(rename = "Couple")]
Couple,
#[serde(rename = "Individuel bienfaiteur")]
BenefactorIndividual,
#[serde(rename = "Couple bienfaiteur")]
BenefactorCouple,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
#[fully_pub]
enum PaymentMode {
#[serde(rename = "ESP")]
Cash,
#[serde(rename = "CB")]
CreditCard,
#[serde(rename = "CHQ")]
Cheque
}
#[derive(Debug, Clone)]
#[fully_pub]
struct GeneralizedAnswer {
// TODO: users are unique via their first and last name, instead of emails
first_name: Option<String>,
last_name: String,
email: Option<String>,
phone: Option<String>,
address: String,
city: String,
postal_code: String,
country: String,
skills: Option<String>,
job: Option<String>,
birth_year: Option<u32>,
membership_mode: MembershipMode,
inception_time: DateTime<Utc>,
subscription_amount: f64,
donation_amount: f64,
reference: String,
payment_mode: PaymentMode,
linked_user_first_name: Option<String>
}
fn get_accounting_year_for_time<'a>(
accounting_years: &'a [AccountingYear], time: &'a DateTime<Utc>
) -> Option<&'a AccountingYear> {
let date_ref = time.date_naive();
accounting_years.iter().find(|year| year.start_date <= date_ref && date_ref <= year.end_date)
}
pub async fn sync_paheko(
paheko_client: &paheko::AuthentifiedClient,
config: &Config,
answers: Vec<GeneralizedAnswer>,
debit_account_code: &str,
via_name: &str
) -> Result<()> {
// FIXME: search existing paheko users using the first name and last name, some ppl don't have
// emails
struct Stats {
subscriptions_created: u32,
transaction_created: u32,
users_created: u32
}
let mut stats = Stats { transaction_created: 0, subscriptions_created: 0, users_created: 0 };
let mut pk_memberships: Vec<paheko::Membership> = vec![];
let accounting_years = paheko_client.get_accounting_years().await.context("Get acc years")?;
// 1. get summary of existing paheko users
let mut existing_users = paheko_client.get_users().await.context("Get users")?;
// 2. get summary of transactions for the years we will use
let existing_transactions = paheko_client.get_transactions(&config.paheko_accounting_years_ids)
.await.context("Get transactions")?;
// 3. get summary of services_users for that year
let existing_subscriptions = paheko_client.get_service_subscriptions(&config.paheko_target_activity_name)
.await.context("Get existing paheko subscriptions to the target activity")?;
// query paheko to get top ids
// IMPORTANT: this mean that while the script is running, there must be NO mutations to the
// users and services_users table on the paheko side
let mut pk_next_user_id = paheko_client.get_next_id("users")
.await.context("Get paheko users next id")?;
let mut pk_next_user_service_id = paheko_client.get_next_id("services_users")
.await.context("Get paheko services_users next id")?;
for answer_inp in &answers {
let mut answer = answer_inp.clone();
answer.first_name = answer.first_name.map(normalize_first_name);
answer.last_name = normalize_last_name(answer.last_name);
eprintln!("Processing answer:");
eprintln!(" name: {} {}", &answer.last_name, answer.first_name.clone().unwrap_or("".to_string()));
eprintln!(" email: {:?}", answer.email);
// list of users involved in this answer
let mut pk_users_summaries: Vec<paheko::UserSummary> = vec![];
let mut pk_user_service_registrations: Vec<paheko::UserServiceRegistration> = vec![];
let existing_matching_transactions: Vec<&SimpleTransaction> = existing_transactions
.iter()
.filter(|t| t.reference == Some(answer.reference.clone()))
.collect();
// check for existing user in paheko by email
// TODO: check user with fuzzing first name and last name
let existing_user_opt = existing_users
.iter()
.find(|user|
user.first_name.clone().map(|x| x.to_lowercase()) == answer.first_name.clone().map(|x| x.to_lowercase()) &&
user.last_name.to_lowercase() == answer.last_name.to_lowercase()
)
.cloned();
let pk_user_summary = match existing_user_opt.clone() {
Some(user) => {
eprintln!(" Found existing paheko user by matching name.");
// TODO: if existing user is different, update the details of user
user
},
None => {
// create paheko user
let c = paheko_client.create_user(
&answer, pk_next_user_id
).await.context("Expected to create paheko user")?;
eprintln!(" Created paheko user");
pk_next_user_id += 1;
existing_users.push(c.clone());
stats.users_created += 1;
c
}
};
pk_users_summaries.push(pk_user_summary.clone());
let mut pk_membership = paheko::Membership {
id: generate_id(),
service_name: config.paheko_target_activity_name.clone(),
// FIXME: handle errors when mode is invalid
mode_name: serde_json::to_value(answer.membership_mode.clone())
.unwrap().as_str().unwrap().to_string(),
start_time: answer.inception_time,
end_time:
DateTime::<Utc>::from_naive_utc_and_offset(
NaiveDate::from_ymd_opt(2024, 12, 31).unwrap().and_hms_opt(23, 59, 59).unwrap(),
Utc
),
payed_amount: answer.subscription_amount,
users_ids: vec![pk_user_summary.id.clone()]
};
// check if the user is already subscribed to the target activity
if
existing_user_opt.is_some() &&
existing_subscriptions.iter()
.any(|membership| membership.users_ids.iter().any(|i| *i == pk_user_summary.id))
{
eprintln!(" User is already subscribed to this activity");
} else {
// add activity for first member
let user_registration = paheko_client.register_user_to_service(
pk_users_summaries.get(0).unwrap(),
&pk_membership,
pk_next_user_service_id
).await.context("Expected to register user activity to paheko")?;
pk_user_service_registrations.push(user_registration);
pk_next_user_service_id += 1;
stats.subscriptions_created += 1;
eprintln!(" Created paheko activity registration");
// then create optional linked user
// FIXME: reuse a previous user
// TODO: get existing linked user from previous year
if answer.membership_mode == MembershipMode::Couple {
let mut second_answer = answer.clone();
second_answer.email = None;
second_answer.phone = None;
second_answer.skills = None;
second_answer.job = None;
second_answer.birth_year = None;
// add first_name
match answer.linked_user_first_name {
Some(name) => {
second_answer.first_name = Some(normalize_first_name(name));
},
None => {
second_answer.first_name = None;
eprintln!("Warn: Got a user with Couple mode but no additional name given!")
}
}
if existing_user_opt.is_none() {
// only create the linked user in paheko, if the first user was also created
let second_pk_user_summary = paheko_client.create_user(&second_answer, pk_next_user_id)
.await.context("Expected to create second paheko user")?;
eprintln!(" Created conjoint paheko user");
pk_users_summaries.push(second_pk_user_summary.clone());
pk_next_user_id += 1;
// create activity of second user
let user_registration = paheko_client.register_user_to_service(
pk_users_summaries.get(1).unwrap(), // pass user, for the id
&pk_membership,
pk_next_user_service_id
).await.context("Registering service to second paheko server")?;
pk_user_service_registrations.push(user_registration);
pk_next_user_service_id += 1;
stats.subscriptions_created += 1;
eprintln!(" Created paheko activity registration for conjoint user");
pk_membership.users_ids.push(second_pk_user_summary.id)
}
}
}
if !existing_matching_transactions.iter().any(|t| t.credit_account_code == SUBSCRIPTION_ACCOUNT_CODE) {
// add transaction for subscription
let transaction = paheko::SimpleTransaction {
accounting_year: match get_accounting_year_for_time(&accounting_years, &answer.inception_time) {
None => {
eprintln!("Cannot find an accounting year on paheko that include the inception date {:?} given", &answer.inception_time);
panic!();
},
Some(s) => s
}.id.clone(),
// TODO: make the label template configurable
label: format!("{} {:?} via {}", pk_membership.service_name, pk_membership.mode_name, via_name),
amount: pk_membership.payed_amount,
reference: Some(answer.reference.clone()),
credit_account_code: "756".to_string(), // cotisations account
debit_account_code: debit_account_code.to_string(), // helloasso account
inception_time: answer.inception_time,
kind: paheko::TransactionKind::Revenue,
linked_users: pk_users_summaries.iter().map(|x| x.id.clone()).collect(),
linked_subscriptions: pk_user_service_registrations.iter().map(|x| x.id.clone()).collect(),
payment_reference: serde_variant::to_variant_name(&answer.payment_mode)
.map(|x| x.to_string())
.ok(),
notes: serde_json::to_string(&answer.payment_mode)
.map(|x| format!("Mode de paiement: {}", x)).ok()
};
paheko_client.register_transaction(transaction)
.await.context("Expected to create new paheko transaction")?;
stats.transaction_created += 1;
eprintln!(" Created paheko transaction for subscription");
} else {
eprintln!(" Skipped creation of paheko transaction for subscription");
}
// check if donation is already reference
// look for an existing donation regisered in DONATION_ACCOUNT_CODE with the same ref
if answer.donation_amount > 0.0 {
if !existing_matching_transactions.iter().any(|t| t.credit_account_code == DONATION_ACCOUNT_CODE) {
// add transaction for donation
let transaction = paheko::SimpleTransaction {
accounting_year: match get_accounting_year_for_time(&accounting_years, &answer.inception_time) {
None => {
eprintln!("Cannot find an accounting year on paheko that include the inception date {:?} given", &answer.inception_time);
panic!();
},
Some(s) => s
}.id.clone(),
label: format!("Don lié à une adhésion via {}", via_name),
amount: answer.donation_amount,
reference: Some(answer.reference.clone()),
credit_account_code: DONATION_ACCOUNT_CODE.to_string(), // account 754 - Ressources liées à la générosité du public
debit_account_code: debit_account_code.to_string(), // compte d'encaissement
inception_time: answer.inception_time,
kind: paheko::TransactionKind::Revenue,
linked_users: pk_users_summaries.iter().map(|x| x.id.clone()).collect(),
linked_subscriptions: vec![],
payment_reference: serde_variant::to_variant_name(&answer.payment_mode)
.map(|x| x.to_string())
.ok(),
notes: serde_json::to_string(&answer.payment_mode)
.map(|x| format!("Mode de paiement: {}", x)).ok()
};
paheko_client.register_transaction(transaction)
.await.context("Expected to create new paheko transaction for donation")?;
stats.transaction_created += 1;
eprintln!(" Created paheko transaction for donation");
} else {
eprintln!(" Skipped creation of transaction donation");
}
}
pk_memberships.push(pk_membership);
}
eprintln!("{via_name} sync done.");
eprintln!("{} subs created; {} users created; {} transactions created", stats.subscriptions_created, stats.users_created, stats.transaction_created);
Ok(())
}

46
src/test_utils.rs Normal file
View file

@ -0,0 +1,46 @@
use crate::utils::{normalize_str, normalize_first_name};
use crate::sync_paheko::PaymentMode;
#[test]
fn test_normalize_str() {
let out = normalize_str(" hello world ".to_string());
assert_eq!(out, "hello world");
}
#[test]
fn test_normalize_first_name() {
let out = normalize_first_name("jpp jpp".to_string());
assert_eq!(out, "Jpp-Jpp");
let out = normalize_first_name("jean pierre".to_string());
assert_eq!(out, "Jean-Pierre");
let out = normalize_first_name("JEAN-PIERRE".to_string());
assert_eq!(out, "Jean-Pierre");
let out = normalize_first_name("JEAN PIERRE".to_string());
assert_eq!(out, "Jean-Pierre");
let out = normalize_first_name("jeffrey".to_string());
assert_eq!(out, "Jeffrey");
}
#[test]
fn test_parse_payment_mode() {
let tmp = "\"ESP\"".to_string().to_ascii_uppercase();
dbg!(&tmp);
assert_eq!(
serde_json::from_str::<PaymentMode>(&tmp).unwrap(),
PaymentMode::Cash
)
}
#[test]
fn test_payment_mode_to_string() {
assert_eq!(
serde_json::to_string(&PaymentMode::Cash).unwrap(),
"\"ESP\""
)
}

134
src/utils.rs Normal file
View file

@ -0,0 +1,134 @@
use serde::{Serialize, Deserialize, Deserializer};
use std::fmt;
use rand::{thread_rng, Rng};
use chrono::prelude::{DateTime, Utc, NaiveDate, Datelike};
/// ID
#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq, Hash)]
pub struct Id(pub u64);
impl From<Id> for String {
fn from(val: Id) -> Self {
format!("{:x}", val.0)
}
}
impl fmt::Display for Id {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
pub fn generate_id() -> Id {
Id(thread_rng().gen())
}
pub fn parse_date_iso(inp: &str) -> Option<DateTime<Utc>> {
let date = NaiveDate::parse_from_str(inp, "%Y-%m-%d").ok()?;
Some(DateTime::<Utc>::from_naive_utc_and_offset(
date.and_hms_opt(0, 0, 0).unwrap(),
Utc
))
}
pub fn parse_datetime_american(inp: &str) -> Option<DateTime<Utc>> {
let date = NaiveDate::parse_from_str(inp, "%m/%d/%Y").ok()?;
Some(DateTime::<Utc>::from_naive_utc_and_offset(
date.and_hms_opt(0, 0, 0).unwrap(),
Utc
))
}
pub fn parse_datetime_french(inp: &str) -> Option<DateTime<Utc>> {
let date = NaiveDate::parse_from_str(inp, "%d/%m/%Y").ok()?;
Some(DateTime::<Utc>::from_naive_utc_and_offset(
date.and_hms_opt(0, 0, 0).unwrap(),
Utc
))
}
pub fn parse_normalize_phone(inp: String) -> Option<String> {
let parsed = match phonenumber::parse(
Some(phonenumber::country::Id::FR), inp
) {
Ok(r) => r,
Err(_e) => {
return None;
}
};
Some(parsed.to_string())
}
pub fn complete_date(inp: NaiveDate) -> DateTime<Utc> {
DateTime::<Utc>::from_naive_utc_and_offset(
inp.and_hms_opt(0, 0, 0).unwrap(),
Utc
)
}
pub fn normalize_str(subject: String) -> String {
subject.trim().replace('\n', ";").to_string()
}
/// remove year precision to comply with GDPR eu rules
pub fn parse_and_get_birthday_year(raw_date: String) -> Option<u32> {
let d_res = NaiveDate::parse_from_str(raw_date.trim(), "%d/%m/%Y");
let d = d_res.ok()?;
d.year().try_into().ok()
}
pub fn capitalize(s: &str) -> String {
let mut c = s.chars();
match c.next() {
None => String::new(),
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
}
}
/// normalisation d'un prénom
pub fn normalize_first_name(subject: String) -> String {
subject
.trim()
.to_lowercase()
.replace(' ', "-")
.split('-')
.map(capitalize)
.collect::<Vec<String>>()
.join("-")
}
pub fn normalize_last_name(subject: String) -> String {
subject
.to_uppercase()
}
/// https://serde.rs/field-attrs.html
pub fn deserialize_datetime<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
DateTime::parse_from_rfc3339(&s)
.map_err(serde::de::Error::custom)
.map(|dt| dt.with_timezone(&Utc))
}
pub fn deserialize_date<'de, D>(deserializer: D) -> Result<NaiveDate, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
NaiveDate::parse_from_str(&s, "%Y-%m-%d")
.map_err(serde::de::Error::custom)
}
pub fn deserialize_json_list<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
serde_json::from_str(&s).map_err(serde::de::Error::custom)
}

7
sync_csv.sh Executable file
View file

@ -0,0 +1,7 @@
#!/usr/bin/sh
xlsx2csv -n "Adhérents PAPIER" /warmd/etoiledebethleem/copyparty/adhesions/nouvelles_adhesions.xlsx \
| sed ':a;N;$!ba;s/\(Champ complémentaire [0-9]\)\n/\1 /g' \
| sed 's/Champ complémentaire \([0-9]\)/CC \1/g' \
| target/release/paheko_helloasso_adapter_rs --source csv

View file