🔖 Bump version: 0.1.0 → 1.0.0
This commit is contained in:
parent
cbf68294ca
commit
b7311d088d
21 changed files with 1993 additions and 223 deletions
2
src/adgroupsync/__init__.py
Normal file
2
src/adgroupsync/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
|||
__version__ = "1.0.0"
|
||||
__author__ = "Marc Koch"
|
289
src/adgroupsync/__main__.py
Normal file
289
src/adgroupsync/__main__.py
Normal file
|
@ -0,0 +1,289 @@
|
|||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from ldap3 import SIMPLE
|
||||
from ms_active_directory import ADDomain, ADGroup, ADUser, ADObject
|
||||
|
||||
from .conf import (
|
||||
AD_DOMAIN,
|
||||
AD_USER_NAME,
|
||||
AD_PASSWORD,
|
||||
AD_LDAP_SERVER,
|
||||
AD_PARENT_GROUP,
|
||||
AD_TIMEZONE,
|
||||
STDOUT_LOG_LEVEL,
|
||||
FILE_LOG_LEVEL,
|
||||
LOG_DIR,
|
||||
CIVICRM_BASE_URL,
|
||||
CIVICRM_API_KEY,
|
||||
CIVICRM_BATCH_SIZE,
|
||||
CIVICRM_RETRIES,
|
||||
CIVICRM_IGNORE_SSL,
|
||||
NTFY_URL,
|
||||
NTFY_TOPIC,
|
||||
NTFY_ACCESS_TOKEN,
|
||||
)
|
||||
from .logger import setup_logging
|
||||
from .models import RecentRun, CiviCrm, Ntfy
|
||||
|
||||
logger = logging.getLogger(__package__)
|
||||
|
||||
civicrm_credentials = {
|
||||
'base_url': CIVICRM_BASE_URL,
|
||||
'api_key': CIVICRM_API_KEY,
|
||||
'batch_size': CIVICRM_BATCH_SIZE,
|
||||
'ignore_ssl': CIVICRM_IGNORE_SSL,
|
||||
}
|
||||
|
||||
|
||||
def is_user_disabled(user: ADUser):
|
||||
"""
|
||||
Check if the user account is disabled.
|
||||
:param user: ADUser object
|
||||
:return: True if the user is disabled, False otherwise
|
||||
"""
|
||||
user_account_control = user.get('userAccountControl')
|
||||
is_disabled = user_account_control is not None and (
|
||||
user_account_control & 0b10) != 0
|
||||
if is_disabled:
|
||||
logger.debug(f"User '{user.name}' is disabled",
|
||||
extra={'user': user.__dict__,
|
||||
'status': 'disabled',
|
||||
'userAccountControl': user_account_control})
|
||||
return is_disabled
|
||||
|
||||
|
||||
def has_changed(object_: ADObject, recent_run):
|
||||
"""
|
||||
Check if the object has changed since the last run
|
||||
:param object_: Object to check
|
||||
:param recent_run: RecentRun object
|
||||
:return: True if the object has changed, False otherwise
|
||||
"""
|
||||
modify_timestamp = object_.get('modifyTimestamp')
|
||||
if modify_timestamp is None:
|
||||
message = (f"Object '{object_.get('sn')}' of type '{type(object_)}' "
|
||||
"does not have 'modifyTimestamp' attribute")
|
||||
raise ValueError(message)
|
||||
if recent_run.datetime is None:
|
||||
return True
|
||||
return modify_timestamp > recent_run
|
||||
|
||||
|
||||
def check_group_changes(group, level_dict, recent_run, visited):
|
||||
"""
|
||||
Check if the group or any of its child groups have changed.
|
||||
:param group: The group to check.
|
||||
:param level_dict: The dictionary containing the hierarchy.
|
||||
:param recent_run: The RecentRun object.
|
||||
:param visited: Set of visited groups to avoid infinite loops.
|
||||
:return: True if the group or any of its child groups have changed, False otherwise.
|
||||
"""
|
||||
if group in visited:
|
||||
return False
|
||||
|
||||
visited.add(group)
|
||||
|
||||
# First, check if the group itself has changed
|
||||
if has_changed(group, recent_run):
|
||||
return True
|
||||
|
||||
# Then, check if any child group has changed
|
||||
for child in level_dict.get(group, []):
|
||||
if isinstance(child, ADGroup) and check_group_changes(
|
||||
child, level_dict, recent_run, visited):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def collect_members(group, level_dict, visited):
|
||||
if group in visited:
|
||||
return set()
|
||||
|
||||
visited.add(group)
|
||||
members = set()
|
||||
|
||||
for child in level_dict.get(group, []):
|
||||
if isinstance(child, ADUser) and not is_user_disabled(
|
||||
child): # If it is an active user, add to members # If it is a user, add to members
|
||||
members.add(child)
|
||||
elif isinstance(child,
|
||||
ADGroup): # If it is a group, recursively collect members
|
||||
members.update(collect_members(child, level_dict, visited))
|
||||
|
||||
return members
|
||||
|
||||
|
||||
def build_group_membership(level_dict):
|
||||
all_groups = {}
|
||||
|
||||
for group in level_dict.keys():
|
||||
visited = set()
|
||||
members = collect_members(group, level_dict, visited)
|
||||
if members:
|
||||
all_groups[group] = members
|
||||
|
||||
return all_groups
|
||||
|
||||
|
||||
def find_changed(all_groups, level_dict, recent_run):
|
||||
changed_groups = set()
|
||||
changed_users = set()
|
||||
|
||||
for group in level_dict.keys():
|
||||
if check_group_changes(group, level_dict, recent_run, set()):
|
||||
changed_groups.add(group)
|
||||
logger.debug(f"Group '{group.name}' has changed", extra={
|
||||
'group': group.__dict__,
|
||||
'modifyTimestamp': group.get('modifyTimestamp'),
|
||||
})
|
||||
|
||||
for group, members in all_groups.items():
|
||||
for user in members:
|
||||
if isinstance(user, ADUser) and not is_user_disabled(
|
||||
user) and has_changed(user, recent_run):
|
||||
changed_users.add(user)
|
||||
logger.debug(f"User '{user.name}' has changed", extra={
|
||||
'user': user.__dict__,
|
||||
'modifyTimestamp': user.get('modifyTimestamp'),
|
||||
})
|
||||
|
||||
return changed_groups, changed_users
|
||||
|
||||
|
||||
def sync_groups(recent_run: RecentRun):
|
||||
# Setup ntfy if URL and topic are provided
|
||||
ntfy = False
|
||||
if NTFY_URL and NTFY_TOPIC:
|
||||
ntfy = Ntfy(NTFY_URL, NTFY_ACCESS_TOKEN)
|
||||
|
||||
# Setup session
|
||||
domain = ADDomain(AD_DOMAIN, ldap_servers_or_uris=AD_LDAP_SERVER)
|
||||
logger.debug(f"Connecting to Domain '{domain.domain}'",
|
||||
extra={'domain': domain.__dict__})
|
||||
try:
|
||||
session = domain.create_session_as_user(
|
||||
AD_USER_NAME, AD_PASSWORD, authentication_mechanism=SIMPLE,
|
||||
read_only=True)
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating session: {e}")
|
||||
exit(1)
|
||||
logger.debug(f"Session opened: {session.is_open()}",
|
||||
extra={'session': session.__dict__})
|
||||
|
||||
# Get parent group
|
||||
parent_group = session.find_group_by_name(
|
||||
AD_PARENT_GROUP, ['modifyTimestamp'])
|
||||
|
||||
# Get child groups
|
||||
group_attrs = [
|
||||
'modifyTimestamp',
|
||||
'objectSid',
|
||||
'givenName',
|
||||
'sn',
|
||||
'mail',
|
||||
'sAMAccountName',
|
||||
'description',
|
||||
'userAccountControl',
|
||||
]
|
||||
|
||||
mailinglists_levels = session.find_members_of_group_recursive(
|
||||
parent_group, group_attrs)
|
||||
|
||||
level_dict = {k: v for level in mailinglists_levels for k, v in
|
||||
level.items()}
|
||||
|
||||
groups = build_group_membership(level_dict)
|
||||
|
||||
mailinglists = {group: members for group, members in groups.items()
|
||||
if group in mailinglists_levels[1].keys()}
|
||||
|
||||
changed_groups, changed_users = find_changed(mailinglists, level_dict,
|
||||
recent_run)
|
||||
|
||||
groups_to_update = {group: members for group, members
|
||||
in mailinglists.items() if group in changed_groups}
|
||||
|
||||
users_to_update = set(user for user in changed_users
|
||||
if not any((user in members) for members
|
||||
in groups_to_update.values()))
|
||||
|
||||
# Break if there are no requests to send
|
||||
if not groups_to_update and not users_to_update:
|
||||
logger.info('No changes detected. Exiting...')
|
||||
return
|
||||
|
||||
# Connect to CiviCRM
|
||||
with CiviCrm(**civicrm_credentials) as civicrm:
|
||||
|
||||
# Prepare request for changed users
|
||||
civicrm.update_users(users_to_update)
|
||||
|
||||
# Prepare requests for changed groups
|
||||
civicrm.update_groups(groups_to_update)
|
||||
|
||||
# Send requests and retry 3 times
|
||||
retry_count = 0
|
||||
while retry_count < CIVICRM_RETRIES \
|
||||
and (error_count := civicrm.send_requests()) != 0:
|
||||
retry_count += 1
|
||||
logger.warning(f"A total of {error_count} requests failed."
|
||||
f" Retrying {retry_count}/3")
|
||||
|
||||
if retry_count >= CIVICRM_RETRIES:
|
||||
logger.error(
|
||||
f"Failed to send requests after {CIVICRM_RETRIES} retries.")
|
||||
|
||||
# Send notification if ntfy is set
|
||||
if ntfy:
|
||||
logger.info('Sending notification via ntfy')
|
||||
ntfy_message = (
|
||||
f"Failed to send requests after {CIVICRM_RETRIES} retries.\n"
|
||||
'## Errors\n```json'
|
||||
f"{json.dumps(civicrm.error_bag, indent=2)}\n```"
|
||||
)
|
||||
ntfy.send(
|
||||
topic=NTFY_TOPIC,
|
||||
title='Failed to sync AD groups with CiviCRM',
|
||||
message=ntfy_message,
|
||||
priority=ntfy.PRIORITY.HIGH,
|
||||
markdown=True,
|
||||
)
|
||||
else:
|
||||
logger.info('All requests were sent successfully!')
|
||||
|
||||
|
||||
def main():
|
||||
setup_logging(file_log_level=FILE_LOG_LEVEL,
|
||||
stdout_log_level=STDOUT_LOG_LEVEL,
|
||||
logdir=LOG_DIR)
|
||||
|
||||
try:
|
||||
logger.info('Running group sync')
|
||||
|
||||
# Get the recent run timestamp
|
||||
file_path = Path().home() / '.recent_run'
|
||||
with RecentRun(file_path, tz=AD_TIMEZONE) as recent_run:
|
||||
if recent_run.datetime is None:
|
||||
logger.info('No recent run found')
|
||||
else:
|
||||
rr_time = recent_run.datetime.strftime('%Y-%m-%d %H:%M:%S %Z')
|
||||
logger.info(
|
||||
f"Recent run at: {rr_time}")
|
||||
|
||||
# Synchronize groups
|
||||
sync_groups(recent_run)
|
||||
|
||||
# Log the current run timestamp
|
||||
started_at = recent_run.started_at.strftime('%Y-%m-%d %H:%M:%S %Z')
|
||||
logger.info(f"Setting previous run to: {started_at}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"An error occurred: {e}", exc_info=True)
|
||||
exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
128
src/adgroupsync/conf.py
Normal file
128
src/adgroupsync/conf.py
Normal file
|
@ -0,0 +1,128 @@
|
|||
import argparse
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import pytz
|
||||
import yaml
|
||||
|
||||
logger = logging.getLogger(__package__)
|
||||
|
||||
|
||||
def create_config_file(dest: Path):
|
||||
if dest.is_dir():
|
||||
dest = dest / f"{__package__}_config.yml"
|
||||
example_conf = Path(__file__).parent / 'resources' / 'example_config.yml'
|
||||
with open(example_conf, "r") as source:
|
||||
with open(dest, "w") as d:
|
||||
d.writelines(source)
|
||||
|
||||
|
||||
# Assign environment variables or configuration file values
|
||||
AD_DOMAIN = os.getenv('AD_DOMAIN')
|
||||
AD_USER_NAME = os.getenv('AD_USER')
|
||||
AD_PASSWORD = os.getenv('AD_PASSWORD')
|
||||
AD_LDAP_SERVER = [s.strip() for s in os.getenv('AD_LDAP_SERVER').split(',')] \
|
||||
if os.getenv('AD_LDAP_SERVER') is not None else None
|
||||
AD_TIMEZONE = pytz.timezone(os.getenv('AD_TIMEZONE')) \
|
||||
if os.getenv('AD_TIMEZONE') else None
|
||||
AD_PARENT_GROUP = os.getenv('AD_PARENT_GROUP')
|
||||
STDOUT_LOG_LEVEL = os.getenv('STDOUT_LOG_LEVEL')
|
||||
FILE_LOG_LEVEL = os.getenv('FILE_LOG_LEVEL')
|
||||
LOG_DIR = os.getenv('LOG_DIR')
|
||||
CIVICRM_BASE_URL = os.getenv('CIVICRM_BASE_URL')
|
||||
CIVICRM_API_KEY = os.getenv('CIVICRM_API_KEY')
|
||||
CIVICRM_BATCH_SIZE = int(os.getenv('CIVICRM_BATCH_SIZE')) \
|
||||
if os.getenv('CIVICRM_BATCH_SIZE') is not None else None
|
||||
CIVICRM_RETRIES = int(os.getenv('CIVICRM_RETRIES')) \
|
||||
if os.getenv('CIVICRM_RETRIES') is not None else None
|
||||
CIVICRM_IGNORE_SSL = bool(os.getenv('CIVICRM_IGNORE_SSL')) \
|
||||
if os.getenv('CIVICRM_IGNORE_SSL') is not None else None
|
||||
NTFY_URL = os.getenv('NTFY_URL')
|
||||
NTFY_TOPIC = os.getenv('NTFY_TOPIC')
|
||||
NTFY_ACCESS_TOKEN = os.getenv('NTFY_ACCESS_TOKEN')
|
||||
|
||||
try:
|
||||
argparser = argparse.ArgumentParser(
|
||||
description='This program synchronizes Active Directory groups with '
|
||||
'CiviCRM groups.')
|
||||
|
||||
argparser.add_argument(
|
||||
"--conf",
|
||||
action="store",
|
||||
type=Path,
|
||||
help="Path the configuration file",
|
||||
)
|
||||
|
||||
argparser.add_argument(
|
||||
"--create-conf",
|
||||
action="store_true",
|
||||
help="Create a configuration file",
|
||||
)
|
||||
|
||||
args = argparser.parse_args()
|
||||
|
||||
# If a path to a config file was provided
|
||||
if args.conf:
|
||||
|
||||
# Check if configuration file exists
|
||||
config_file = Path(args.conf)
|
||||
if not config_file.is_file() and not args.create_conf:
|
||||
raise FileNotFoundError(
|
||||
f"Configuration file '{config_file}' does not exist.")
|
||||
|
||||
# Create configuration file if requested and exit
|
||||
if args.create_conf:
|
||||
create_config_file(config_file)
|
||||
exit(0)
|
||||
|
||||
# Load configuration file
|
||||
with open(config_file, 'r') as file:
|
||||
config = yaml.safe_load(file)
|
||||
|
||||
# Get values from configuration file
|
||||
AD_DOMAIN = AD_DOMAIN or config['AD']['DOMAIN']
|
||||
AD_USER_NAME = AD_USER_NAME or config['AD']['USER']
|
||||
AD_PASSWORD = AD_PASSWORD or config['AD']['PASSWORD']
|
||||
AD_LDAP_SERVER = AD_LDAP_SERVER or config['AD'].get('LDAP_SERVER')
|
||||
AD_TIMEZONE = AD_TIMEZONE \
|
||||
or pytz.timezone(config['AD'].get('TIMEZONE', 'UTC'))
|
||||
AD_PARENT_GROUP = AD_PARENT_GROUP or config['AD']['PARENT_GROUP']
|
||||
STDOUT_LOG_LEVEL = STDOUT_LOG_LEVEL \
|
||||
or config['LOGGING'].get('STDOUT_LOG_LEVEL', 'INFO')
|
||||
FILE_LOG_LEVEL = FILE_LOG_LEVEL \
|
||||
or config['LOGGING'].get('FILE_LOG_LEVEL', 'WARNING')
|
||||
LOG_DIR = LOG_DIR or config['LOGGING'].get('LOG_DIR')
|
||||
CIVICRM_BASE_URL = CIVICRM_BASE_URL or config['CIVICRM']['BASE_URL']
|
||||
CIVICRM_API_KEY = CIVICRM_API_KEY or config['CIVICRM']['API_KEY']
|
||||
CIVICRM_BATCH_SIZE = CIVICRM_BATCH_SIZE \
|
||||
or config['CIVICRM']['BATCH_SIZE']
|
||||
CIVICRM_RETRIES = CIVICRM_RETRIES \
|
||||
or config['CIVICRM'].get('RETRIES', 3)
|
||||
CIVICRM_IGNORE_SSL = CIVICRM_IGNORE_SSL \
|
||||
or bool(config['CIVICRM'].get('IGNORE_SSL', False))
|
||||
NTFY_URL = NTFY_URL or config['NTFY'].get('URL') if 'NTFY' in config else None
|
||||
NTFY_TOPIC = NTFY_TOPIC or config['NTFY'].get('TOPIC') if 'NTFY' in config else None
|
||||
NTFY_ACCESS_TOKEN = NTFY_ACCESS_TOKEN \
|
||||
or config['NTFY'].get('ACCESS_TOKEN') if 'NTFY' in config else None
|
||||
|
||||
# Check if some required values are missing
|
||||
required = {
|
||||
"AD_DOMAIN": AD_DOMAIN,
|
||||
"AD_USER_NAME": AD_USER_NAME,
|
||||
"AD_PASSWORD": AD_PASSWORD,
|
||||
"AD_LDAP_SERVER": AD_LDAP_SERVER,
|
||||
"AD_PARENT_GROUP": AD_PARENT_GROUP,
|
||||
"CIVICRM_BASE_URL": CIVICRM_BASE_URL,
|
||||
"CIVICRM_API_KEY": CIVICRM_API_KEY,
|
||||
}
|
||||
if len(missing := [k for k, v in required.items() if v is None]) > 0:
|
||||
raise ValueError('Some required values are missing. '
|
||||
'Please use a configuration file '
|
||||
'or provide all required environment variables. '
|
||||
'Missing: %s'
|
||||
% ','.join(missing))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(e, exc_info=True)
|
||||
exit(1)
|
11
src/adgroupsync/enums.py
Normal file
11
src/adgroupsync/enums.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
from enum import Enum
|
||||
|
||||
class Priority(Enum):
|
||||
"""
|
||||
Enum for the different priority levels.
|
||||
"""
|
||||
MIN = 1
|
||||
LOW = 2
|
||||
DEFAULT = 3
|
||||
HIGH = 4
|
||||
MAX = URGENT = 5
|
154
src/adgroupsync/logger.py
Normal file
154
src/adgroupsync/logger.py
Normal file
|
@ -0,0 +1,154 @@
|
|||
import atexit
|
||||
import datetime as dt
|
||||
import json
|
||||
import logging
|
||||
import logging.config
|
||||
from pathlib import Path
|
||||
from typing import override
|
||||
|
||||
PROJECT_ROOT = Path(__file__).parent
|
||||
LOG_RECORD_BUILTIN_ATTRS = {
|
||||
"args",
|
||||
"asctime",
|
||||
"created",
|
||||
"exc_info",
|
||||
"exc_text",
|
||||
"filename",
|
||||
"funcName",
|
||||
"levelname",
|
||||
"levelno",
|
||||
"lineno",
|
||||
"module",
|
||||
"msecs",
|
||||
"message",
|
||||
"msg",
|
||||
"name",
|
||||
"pathname",
|
||||
"process",
|
||||
"processName",
|
||||
"relativeCreated",
|
||||
"stack_info",
|
||||
"thread",
|
||||
"threadName",
|
||||
"taskName",
|
||||
}
|
||||
|
||||
|
||||
def setup_logging(
|
||||
logdir: Path | str | None = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Setup logging configuration
|
||||
:param logdir: Directory to store the log file
|
||||
:keyword file_log_level: Log level for the file handler
|
||||
:keyword stdout_log_level: Log level for the stdout handler
|
||||
:return:
|
||||
"""
|
||||
handlers = ["file", "stdout"]
|
||||
|
||||
config_file = PROJECT_ROOT / "resources" / "logging_config.json"
|
||||
with open(config_file, "r") as file:
|
||||
config = json.load(file)
|
||||
|
||||
# Override log level if provided
|
||||
for handler in handlers:
|
||||
if log_level := kwargs.get(f"{handler}_log_level"):
|
||||
level_str = logging.getLevelName(log_level.upper())
|
||||
if not isinstance(level_str, str):
|
||||
level_str = logging.getLevelName(level_str)
|
||||
config["handlers"][handler]["level"] = level_str
|
||||
|
||||
# Set log file path to user log directory
|
||||
if logdir:
|
||||
logdir = Path(logdir)
|
||||
config["handlers"]["file"]["filename"] = logdir / "adGroupSync.log.jsonl"
|
||||
|
||||
# Create path and file if it does not exist
|
||||
Path(config["handlers"]["file"]["filename"]).parent.mkdir(
|
||||
parents=True, exist_ok=True)
|
||||
Path(config["handlers"]["file"]["filename"]).touch()
|
||||
|
||||
logging.config.dictConfig(config)
|
||||
queue_handler = logging.getHandlerByName("queue_handler")
|
||||
if queue_handler is not None:
|
||||
queue_handler.listener.start()
|
||||
atexit.register(queue_handler.listener.stop)
|
||||
|
||||
|
||||
class JSONFormatter(logging.Formatter):
|
||||
"""
|
||||
A custom JSON formatter for logging
|
||||
"""
|
||||
HIDE_KEYS = ["password", "token", "api_key", "site_key"]
|
||||
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
fmt_keys: dict[str, str] | None = None,
|
||||
):
|
||||
super().__init__()
|
||||
self.fmt_keys = fmt_keys if fmt_keys is not None else {}
|
||||
|
||||
@override
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
message = self._prepare_log_dict(record)
|
||||
|
||||
# Exclude passwords from the log
|
||||
self._hide_passwords(record)
|
||||
|
||||
return json.dumps(message, default=str)
|
||||
|
||||
def _hide_passwords(self, log_record: logging.LogRecord|dict):
|
||||
"""
|
||||
Recursively replace all values with keys containing 'password',
|
||||
'token', etc. with '********'
|
||||
:param log_record:
|
||||
:return:
|
||||
"""
|
||||
if not isinstance(log_record, dict):
|
||||
dict_obj = log_record.__dict__
|
||||
else:
|
||||
dict_obj = log_record
|
||||
|
||||
for key, value in dict_obj.items():
|
||||
if isinstance(value, dict):
|
||||
dict_obj = self._hide_passwords(value)
|
||||
elif any(hide_key in key.lower() for hide_key in self.HIDE_KEYS):
|
||||
dict_obj[key] = "********"
|
||||
|
||||
if isinstance(log_record, logging.LogRecord):
|
||||
for key, value in dict_obj.items():
|
||||
setattr(log_record, key, value)
|
||||
return log_record
|
||||
else:
|
||||
return dict_obj
|
||||
|
||||
def _prepare_log_dict(self, record: logging.LogRecord) -> dict:
|
||||
always_fields = {
|
||||
"message": record.getMessage(),
|
||||
"timestamp": dt.datetime.fromtimestamp(
|
||||
record.created, tz=dt.timezone.utc
|
||||
).isoformat()
|
||||
}
|
||||
if record.exc_info is not None:
|
||||
always_fields["exc_info"] = self.formatException(record.exc_info)
|
||||
|
||||
if record.stack_info is not None:
|
||||
always_fields["stack_info"] = self.formatStack(record.stack_info)
|
||||
|
||||
message = {
|
||||
key: msg_val
|
||||
if (msg_val := always_fields.pop(val, None)) is not None
|
||||
else getattr(record, val)
|
||||
for key, val in self.fmt_keys.items()
|
||||
}
|
||||
message.update(always_fields)
|
||||
|
||||
# Include all other attributes
|
||||
for key, val, in record.__dict__.items():
|
||||
if key not in LOG_RECORD_BUILTIN_ATTRS:
|
||||
message[key] = val
|
||||
|
||||
return message
|
548
src/adgroupsync/models.py
Normal file
548
src/adgroupsync/models.py
Normal file
|
@ -0,0 +1,548 @@
|
|||
import json
|
||||
import logging
|
||||
from collections import deque
|
||||
from datetime import datetime as dt, timezone
|
||||
from pathlib import Path
|
||||
|
||||
import pytz
|
||||
from civifang import api
|
||||
from httpx import post
|
||||
from ms_active_directory import ADUser, ADGroup
|
||||
|
||||
from .enums import Priority
|
||||
|
||||
logger = logging.getLogger(__package__)
|
||||
|
||||
|
||||
class RecentRun:
|
||||
"""
|
||||
Class to manage the last run of the script
|
||||
"""
|
||||
|
||||
def __init__(self, file_path: Path, tz: pytz = pytz.utc):
|
||||
"""
|
||||
Initialize the class
|
||||
:param file_path: File path to store the recent run timestamp
|
||||
:param tz: Timezone to use for the timestamp
|
||||
"""
|
||||
self._datetime = None
|
||||
self._timezone = tz
|
||||
self._file_path = file_path
|
||||
self._is_running = False
|
||||
self._started_at = None
|
||||
|
||||
# Create the file if it does not exist
|
||||
self._file_path.touch(exist_ok=True)
|
||||
|
||||
self._read_data_from_file()
|
||||
|
||||
def _sync_file(
|
||||
self,
|
||||
recent_run: dt | None = None,
|
||||
is_running: bool = False
|
||||
):
|
||||
"""
|
||||
Write the recent run timestamp and running status to the file
|
||||
:param recent_run:
|
||||
:param is_running:
|
||||
:return:
|
||||
"""
|
||||
# Convert the is_running boolean to a string
|
||||
is_running = 'true' if is_running else 'false' \
|
||||
if is_running is not None else None
|
||||
|
||||
# Read the file and update the values if they are different
|
||||
with open(self._file_path, 'r+') as f:
|
||||
# Read the data from the file
|
||||
data = f.readlines()
|
||||
old_recent_run, old_is_running = self._read_data(data)
|
||||
|
||||
# Update the values if they were provided
|
||||
timestamp = recent_run.timestamp() if recent_run else old_recent_run
|
||||
is_running = is_running or old_is_running
|
||||
new_data = [
|
||||
f"recent-run:{timestamp}",
|
||||
'\n',
|
||||
f"is-running:{is_running}",
|
||||
]
|
||||
|
||||
# Write the new data to the file
|
||||
f.seek(0)
|
||||
f.truncate()
|
||||
f.writelines(new_data)
|
||||
|
||||
@staticmethod
|
||||
def _read_data(data: list):
|
||||
"""
|
||||
Read data
|
||||
:param data:
|
||||
:return: Tuple of recent_run and is_running ('true'/'false')
|
||||
"""
|
||||
time = None
|
||||
is_running = None
|
||||
for line in data:
|
||||
line = line.strip()
|
||||
if line.startswith('recent-run:'):
|
||||
time = line.split(':', 1)[1].strip()
|
||||
elif line.startswith('is-running:'):
|
||||
is_running = line.split(':', 1)[1].strip()
|
||||
|
||||
return float(time), is_running
|
||||
|
||||
def _read_data_from_file(self):
|
||||
"""
|
||||
Read the recent run time from the file
|
||||
:return:
|
||||
"""
|
||||
with open(self._file_path, 'r') as f:
|
||||
data = f.readlines()
|
||||
recent_run, is_running = self._read_data(data)
|
||||
|
||||
# Read running status
|
||||
self._is_running = is_running == 'true'
|
||||
|
||||
# Set the datetime to the recent run time
|
||||
if not recent_run:
|
||||
return
|
||||
try:
|
||||
self._datetime = dt.fromtimestamp(float(recent_run)) \
|
||||
.astimezone(self._timezone)
|
||||
except ValueError as e:
|
||||
raise ValueError(
|
||||
f"Invalid timestamp '{recent_run}' in {self._file_path}: {e}")
|
||||
|
||||
@property
|
||||
def datetime(self) -> dt | None:
|
||||
"""
|
||||
Get the recent run timestamp
|
||||
:return:
|
||||
"""
|
||||
return self._datetime
|
||||
|
||||
@property
|
||||
def started_at(self) -> dt | None:
|
||||
"""
|
||||
Get the time the script was started
|
||||
:return:
|
||||
"""
|
||||
return self._started_at
|
||||
|
||||
@property
|
||||
def timestamp(self) -> float:
|
||||
"""
|
||||
Get the recent run timestamp as a timestamp
|
||||
:return:
|
||||
"""
|
||||
return self._datetime.timestamp()
|
||||
|
||||
@property
|
||||
def is_running(self):
|
||||
"""
|
||||
Get the running status
|
||||
:return:
|
||||
"""
|
||||
return self._is_running
|
||||
|
||||
@datetime.setter
|
||||
def datetime(self, value: datetime):
|
||||
"""
|
||||
Set the recent run timestamp
|
||||
:param value:
|
||||
:return:
|
||||
"""
|
||||
if value.tzinfo is None:
|
||||
value = value.astimezone(self._timezone)
|
||||
self._datetime = value
|
||||
|
||||
@staticmethod
|
||||
def _to_datetime(value: dt | str | float) -> datetime:
|
||||
"""
|
||||
Convert the value to a datetime object
|
||||
:param value:
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
if isinstance(value, str):
|
||||
value = float(value)
|
||||
if isinstance(value, float):
|
||||
value = dt.fromtimestamp(value).astimezone(timezone.utc)
|
||||
except ValueError:
|
||||
raise ValueError(f"Invalid timestamp '{value}'")
|
||||
return value
|
||||
|
||||
def __enter__(self):
|
||||
self._started_at = dt.now(self._timezone)
|
||||
self._is_running = True
|
||||
self._sync_file(is_running=self._is_running)
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
datetime = None
|
||||
self._is_running = False
|
||||
|
||||
# If an exception occurred, do not update the recent run timestamp
|
||||
if exc_type is None:
|
||||
self.datetime = datetime = self._started_at
|
||||
|
||||
self._sync_file(datetime, is_running=self._is_running)
|
||||
|
||||
def __gt__(self, other: dt | str | float):
|
||||
return self.datetime > self._to_datetime(other)
|
||||
|
||||
def __lt__(self, other: dt | str | float):
|
||||
return self.datetime < self._to_datetime(other)
|
||||
|
||||
def __eq__(self, other: dt | str | float):
|
||||
return self.datetime == self._to_datetime(other)
|
||||
|
||||
def __ge__(self, other: dt | str | float):
|
||||
return self.datetime >= self._to_datetime(other)
|
||||
|
||||
def __le__(self, other: dt | str | float):
|
||||
return self.datetime <= self._to_datetime(other)
|
||||
|
||||
|
||||
class CiviCrm:
|
||||
"""
|
||||
Class to interact with CiviCRM via the API
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
base_url: str,
|
||||
api_key: str,
|
||||
batch_size: int,
|
||||
ignore_ssl: bool = False,
|
||||
):
|
||||
"""
|
||||
Initialize the class
|
||||
:param base_url: Base URL of the CiviCRM installation
|
||||
:param api_key: API key for CiviCRM
|
||||
:param batch_size: Number of users to send in one request
|
||||
:param ignore_ssl: Accept unencrypted connections
|
||||
"""
|
||||
self._base_url = base_url
|
||||
self._api_key = api_key
|
||||
self._auth_flow = api.AUTH_FLOWS.XHEADER
|
||||
self._batch_size = batch_size
|
||||
self._ignore_ssl = ignore_ssl
|
||||
self._requests = {'groups': deque(), 'users': deque()}
|
||||
self._failed_requests = {'groups': [], 'users': []}
|
||||
self._error_bag = []
|
||||
|
||||
def __enter__(self):
|
||||
self._setup()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
if isinstance(exc_val, Exception):
|
||||
logger.exception(
|
||||
"The connection to CiviCRM was closed due to an exception",
|
||||
extra={'exc_type': exc_type, 'exc_val': exc_val,
|
||||
'exc_tb': exc_tb})
|
||||
exit(1)
|
||||
|
||||
def _setup(self):
|
||||
api_params = {
|
||||
"url": self._base_url,
|
||||
"api_key": self._api_key,
|
||||
"auth_flow": self._auth_flow,
|
||||
"ignore_ssl": self._ignore_ssl,
|
||||
}
|
||||
|
||||
# Check for missing parameters
|
||||
if any([v for v in api_params.values() if not v]):
|
||||
missing_params = [k for k, v in api_params.items() if not v]
|
||||
raise ValueError(
|
||||
f"Missing API parameters: {', '.join(missing_params)}")
|
||||
|
||||
# Connect to CiviCRM
|
||||
try:
|
||||
logger.debug("Connecting to CiviCRM", extra=api_params)
|
||||
api.setup(**api_params)
|
||||
except Exception as e:
|
||||
logger.exception(f"Error connecting to CiviCRM: {e}",
|
||||
extra=api_params)
|
||||
raise e
|
||||
|
||||
def update_groups(self, groups: dict[ADGroup, set]):
|
||||
"""
|
||||
Update the groups in CiviCRM via Mailingslistsync.Adgroupsync API
|
||||
:param groups:
|
||||
:return:
|
||||
"""
|
||||
groups_data = []
|
||||
|
||||
for group, users in groups.items():
|
||||
|
||||
# Filter users for missing values and wrong types
|
||||
users = self._filter_users(users)
|
||||
|
||||
group_data = {
|
||||
'sid': group.get("objectSid"),
|
||||
'email': group.get("mail"),
|
||||
'name': group.name,
|
||||
'description': group.get("description"),
|
||||
'recipients': json.dumps(users),
|
||||
}
|
||||
|
||||
# Check group for missing values
|
||||
name = group.name or 'Unknown'
|
||||
sid = group.get('objectSid') or 'Unknown'
|
||||
message = f"Missing values for group '{name}' ({sid}): %s"
|
||||
if self.check_values(group_data, message, ['description']):
|
||||
groups_data.append(self._filter_data(group_data))
|
||||
|
||||
# Add the groups to the request list
|
||||
for group in groups_data:
|
||||
self._requests['groups'].append({
|
||||
'entity': 'Mailinglistsync',
|
||||
'action': 'Adgroupsync',
|
||||
'query': group,
|
||||
'method': api.HTTP_METHODS.POST,
|
||||
})
|
||||
|
||||
def update_users(self, users: set[ADUser]):
|
||||
"""
|
||||
Update the users in CiviCRM via Mailingslistsync.Adgroupsync API
|
||||
:param users:
|
||||
:return:
|
||||
"""
|
||||
|
||||
# Filter users for missing values and wrong types
|
||||
users = self._filter_users(users)
|
||||
|
||||
# Split the users into batches
|
||||
data_batches = self._chunks(users, self._batch_size)
|
||||
|
||||
# Add the users to the request list
|
||||
for batch in data_batches:
|
||||
self._requests['users'].append({
|
||||
'entity': 'Mailinglistsync',
|
||||
'action': 'Adgroupsync',
|
||||
'query': {'recipients': json.dumps(batch)},
|
||||
'method': api.HTTP_METHODS.POST,
|
||||
})
|
||||
|
||||
def send_requests(self) -> int:
|
||||
"""
|
||||
Run the tasks in the task queue
|
||||
:return: Number of failed requests
|
||||
"""
|
||||
error_count = 0
|
||||
failed_requests = {'groups': deque(), 'users': deque()}
|
||||
|
||||
for name, requests in self._requests.items():
|
||||
logger.info(f"Sending {len(requests)} {name}")
|
||||
|
||||
while requests:
|
||||
request = requests.popleft()
|
||||
|
||||
try:
|
||||
result = api.api3(**request)
|
||||
logger.info(f"Result: {result}", extra={'result': result})
|
||||
if result.get('is_error', False):
|
||||
raise Exception(result.get('error_message'))
|
||||
|
||||
except Exception as e:
|
||||
self._error_bag.append({
|
||||
'name': name,
|
||||
'request': {
|
||||
'entity': request['entity'],
|
||||
'action': request['action'],
|
||||
'query': {
|
||||
k: (json.loads(v) if k == 'recipients' else v)
|
||||
for k, v in request['query'].items()},
|
||||
'method': str(request['method']),
|
||||
},
|
||||
'error': str(e),
|
||||
})
|
||||
logger.exception(f"Error sending request: {e}",
|
||||
extra=request)
|
||||
failed_requests[name].append(request)
|
||||
error_count += 1
|
||||
|
||||
# Append failed requests to requests again
|
||||
for name, requests in failed_requests.items():
|
||||
while requests:
|
||||
self._requests[name].append(requests.popleft())
|
||||
|
||||
return error_count
|
||||
|
||||
@staticmethod
|
||||
def _chunks(lst, n):
|
||||
"""Yield successive n-sized chunks from lst."""
|
||||
for i in range(0, len(lst), n):
|
||||
yield lst[i:i + n]
|
||||
|
||||
@classmethod
|
||||
def _filter_users(cls, users: set) -> list | None:
|
||||
"""
|
||||
Filter users for missing values and wrong types
|
||||
:param users: Set of users
|
||||
:return: List of filtered users
|
||||
"""
|
||||
result = []
|
||||
for user in users:
|
||||
if isinstance(user, ADUser):
|
||||
data = {
|
||||
'sid': user.get("objectSid"),
|
||||
'email': user.get("mail"),
|
||||
'first_name': user.get("givenName"),
|
||||
'last_name': user.get("sn"),
|
||||
}
|
||||
# Check for missing values and log them
|
||||
name = user.get('sn') or 'Unknown'
|
||||
sid = user.get('objectSid') or 'Unknown'
|
||||
message = f"Missing values for user '{name}' ({sid}): %s"
|
||||
if cls.check_values(data, message,
|
||||
['first_name', 'last_name']):
|
||||
result.append(cls._filter_data(data))
|
||||
else:
|
||||
raise ValueError(f"Invalid user type: {type(user)}")
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _filter_data(data):
|
||||
"""
|
||||
Filter the data for missing values
|
||||
:return:
|
||||
"""
|
||||
return {k: v for k, v in data.items() if v is not None}
|
||||
|
||||
@staticmethod
|
||||
def check_values(data: dict, message: str, ignore_keys: list[str] = None):
|
||||
"""
|
||||
Check for missing values in the data and log them.
|
||||
:param data:
|
||||
:param message: Should contain an %s placeholder for the missing values
|
||||
:param ignore_keys: List of keys to ignore
|
||||
:return: True if all values are present, False otherwise
|
||||
"""
|
||||
if ignore_keys is None:
|
||||
ignore_keys = []
|
||||
|
||||
missing_values = {
|
||||
key: value for key, value in data.items() if
|
||||
not value and key not in ignore_keys
|
||||
}
|
||||
|
||||
if missing_values:
|
||||
message = message % ', '.join(missing_values.keys())
|
||||
log_data = {}
|
||||
for key, value in data.items(): # Sanitize the data
|
||||
if key in ['name']:
|
||||
log_data['name_'] = value
|
||||
else:
|
||||
log_data[key] = value
|
||||
logger.debug(
|
||||
message,
|
||||
extra={'data': log_data}
|
||||
)
|
||||
|
||||
return not bool(missing_values)
|
||||
|
||||
@staticmethod
|
||||
def close():
|
||||
"""
|
||||
Close the connection to CiviCRM
|
||||
:return:
|
||||
"""
|
||||
api.disconnect()
|
||||
|
||||
@property
|
||||
def requests(self) -> dict:
|
||||
"""
|
||||
Get the requests
|
||||
:return:
|
||||
"""
|
||||
return self._requests
|
||||
|
||||
@property
|
||||
def error_bag(self) -> list:
|
||||
"""
|
||||
Get the error bag
|
||||
:return:
|
||||
"""
|
||||
return self._error_bag
|
||||
|
||||
|
||||
class Ntfy:
|
||||
"""
|
||||
Class to send notifications via ntfy
|
||||
"""
|
||||
|
||||
PRIORITY = Priority
|
||||
|
||||
def __init__(self, url: str, access_token: str = None):
|
||||
"""
|
||||
Initialize the class
|
||||
:param url: nfyt URL
|
||||
:param access_token: Access token if required
|
||||
"""
|
||||
self.url = url if url.endswith('/') else f"{url}/"
|
||||
self.access_token = access_token
|
||||
|
||||
def send(
|
||||
self,
|
||||
topic,
|
||||
message: str = None,
|
||||
title: str = None,
|
||||
tags: str | list = None,
|
||||
priority: int | PRIORITY = None,
|
||||
link: str = None,
|
||||
markdown: bool = False,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Send a notification via ntfy
|
||||
:param topic: Topic to send the notification to
|
||||
:param message: Message to send
|
||||
:param title: Message title
|
||||
:param tags: Tags to add to the message (see ntfy documentation)
|
||||
:param priority: See Priority enum
|
||||
:param link: A link to add to the message
|
||||
:param markdown: Whether to use markdown
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
if self.access_token:
|
||||
headers = {
|
||||
'Authorization': f'Bearer {self.access_token}',
|
||||
} | kwargs.get('headers', {})
|
||||
else:
|
||||
headers = kwargs.get('headers', {})
|
||||
|
||||
match priority:
|
||||
case self.PRIORITY.MIN:
|
||||
headers['Priority'] = 'min'
|
||||
case self.PRIORITY.LOW:
|
||||
headers['Priority'] = 'low'
|
||||
case self.PRIORITY.DEFAULT:
|
||||
headers['Priority'] = 'default'
|
||||
case self.PRIORITY.HIGH:
|
||||
headers['Priority'] = 'high'
|
||||
case self.PRIORITY.MAX:
|
||||
headers['Priority'] = 'max'
|
||||
case _:
|
||||
headers['Priority'] = 'default'
|
||||
|
||||
if title:
|
||||
headers['Title'] = title
|
||||
if tags:
|
||||
headers['Tags'] = tags if isinstance(tags, str) else ','.join(tags)
|
||||
if link:
|
||||
headers['Click'] = link
|
||||
if markdown:
|
||||
headers['Markdown'] = 'yes'
|
||||
|
||||
try:
|
||||
post(f"{self.url}{topic}", headers=headers, data=message)
|
||||
except Exception as e:
|
||||
logger.exception(f"Error sending notification: {e}", extra={
|
||||
'url': self.url,
|
||||
'topic': topic,
|
||||
'headers': headers,
|
||||
'message': message,
|
||||
})
|
25
src/adgroupsync/resources/example_config.yml
Normal file
25
src/adgroupsync/resources/example_config.yml
Normal file
|
@ -0,0 +1,25 @@
|
|||
AD:
|
||||
DOMAIN: ad.example.com
|
||||
USER: example\username
|
||||
PASSWORD: xxxxxxxx
|
||||
LDAP_SERVER:
|
||||
- ldaps://server1.ad.example.com:636
|
||||
PARENT_GROUP: Mailinglists
|
||||
TIMEZONE: UTC
|
||||
|
||||
LOGGING:
|
||||
STDOUT_LOG_LEVEL: info
|
||||
FILE_LOG_LEVEL: info
|
||||
LOG_DIR: /var/log/adGroupSync/
|
||||
|
||||
CIVICRM:
|
||||
BASE_URL: https://civicrm.example.com
|
||||
API_KEY: xxxxxxxx
|
||||
BATCH_SIZE: 50
|
||||
RETRIES: 3
|
||||
# IGNORE_SSL: yes
|
||||
|
||||
NTFY:
|
||||
URL: https://ntfy.example.com
|
||||
TOPIC: adGroupSync
|
||||
ACCESS_TOKEN: tk_xxxxxxxxxxxxxxxxxxx
|
55
src/adgroupsync/resources/logging_config.json
Normal file
55
src/adgroupsync/resources/logging_config.json
Normal file
|
@ -0,0 +1,55 @@
|
|||
{
|
||||
"version": 1,
|
||||
"disable_existing_loggers": false,
|
||||
"formatters": {
|
||||
"simple": {
|
||||
"format": "%(levelname)s - %(message)s",
|
||||
"datefmt": "%Y-%m-%dT%H:%M:%Sz"
|
||||
},
|
||||
"json": {
|
||||
"()": "adgroupsync.logger.JSONFormatter",
|
||||
"fmt_keys": {
|
||||
"level": "levelname",
|
||||
"message": "message",
|
||||
"timestamp": "timestamp",
|
||||
"logger": "name",
|
||||
"module": "module",
|
||||
"function": "funcName",
|
||||
"line": "lineno",
|
||||
"thread_name": "threadName"
|
||||
}
|
||||
}
|
||||
},
|
||||
"handlers": {
|
||||
"stdout": {
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "simple",
|
||||
"stream": "ext://sys.stdout",
|
||||
"level": "INFO"
|
||||
},
|
||||
"file": {
|
||||
"class": "logging.handlers.RotatingFileHandler",
|
||||
"formatter": "json",
|
||||
"filename": "adgroupsync.log.jsonl",
|
||||
"level": "INFO",
|
||||
"maxBytes": 10000000,
|
||||
"backupCount": 3
|
||||
},
|
||||
"queue_handler": {
|
||||
"class": "logging.handlers.QueueHandler",
|
||||
"handlers": [
|
||||
"stdout",
|
||||
"file"
|
||||
],
|
||||
"respect_handler_level": true
|
||||
}
|
||||
},
|
||||
"loggers": {
|
||||
"root": {
|
||||
"handlers": [
|
||||
"queue_handler"
|
||||
],
|
||||
"level": "DEBUG"
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue