2015-03-31 10:22:31 +02:00
|
|
|
#
|
2023-11-21 21:29:58 +01:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
2024-01-23 12:26:48 +01:00
|
|
|
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2023-11-21 21:29:58 +01:00
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2015-03-31 10:22:31 +02:00
|
|
|
#
|
|
|
|
#
|
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
import logging
|
2022-04-11 18:07:23 +02:00
|
|
|
from typing import Any, Dict, List
|
2020-06-16 14:51:47 +02:00
|
|
|
from urllib import parse as urlparse
|
2016-05-17 12:28:58 +02:00
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
import yaml
|
2018-06-28 21:31:53 +02:00
|
|
|
from netaddr import IPSet
|
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
from synapse.appservice import ApplicationService
|
2021-12-01 13:28:23 +01:00
|
|
|
from synapse.types import JsonDict, UserID
|
2016-05-17 12:28:58 +02:00
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
from ._base import Config, ConfigError
|
2018-04-07 00:37:36 +02:00
|
|
|
|
2016-05-17 12:28:58 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
2015-03-31 10:22:31 +02:00
|
|
|
|
|
|
|
|
|
|
|
class AppServiceConfig(Config):
|
2019-10-10 10:39:35 +02:00
|
|
|
section = "appservice"
|
|
|
|
|
2022-04-11 18:07:23 +02:00
|
|
|
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
2015-04-30 05:24:44 +02:00
|
|
|
self.app_service_config_files = config.get("app_service_config_files", [])
|
2023-04-12 13:28:46 +02:00
|
|
|
if not isinstance(self.app_service_config_files, list) or not all(
|
2023-08-29 15:41:43 +02:00
|
|
|
isinstance(x, str) for x in self.app_service_config_files
|
2023-04-12 13:28:46 +02:00
|
|
|
):
|
|
|
|
raise ConfigError(
|
|
|
|
"Expected '%s' to be a list of AS config files:"
|
|
|
|
% (self.app_service_config_files),
|
2023-05-18 11:58:13 +02:00
|
|
|
("app_service_config_files",),
|
2023-04-12 13:28:46 +02:00
|
|
|
)
|
|
|
|
|
2018-12-04 12:44:41 +01:00
|
|
|
self.track_appservice_user_ips = config.get("track_appservice_user_ips", False)
|
2023-08-03 20:43:51 +02:00
|
|
|
self.use_appservice_legacy_authorization = config.get(
|
|
|
|
"use_appservice_legacy_authorization", False
|
|
|
|
)
|
|
|
|
if self.use_appservice_legacy_authorization:
|
|
|
|
logger.warning(
|
|
|
|
"The use of appservice legacy authorization via query params is deprecated"
|
|
|
|
" and should be considered insecure."
|
|
|
|
)
|
2015-03-31 10:22:31 +02:00
|
|
|
|
2016-05-17 12:28:58 +02:00
|
|
|
|
2021-12-01 13:28:23 +01:00
|
|
|
def load_appservices(
|
|
|
|
hostname: str, config_files: List[str]
|
|
|
|
) -> List[ApplicationService]:
|
2016-05-17 12:28:58 +02:00
|
|
|
"""Returns a list of Application Services from the config files."""
|
|
|
|
|
|
|
|
# Dicts of value -> filename
|
2021-07-15 12:02:43 +02:00
|
|
|
seen_as_tokens: Dict[str, str] = {}
|
|
|
|
seen_ids: Dict[str, str] = {}
|
2016-05-17 12:28:58 +02:00
|
|
|
|
|
|
|
appservices = []
|
|
|
|
|
|
|
|
for config_file in config_files:
|
|
|
|
try:
|
2021-07-19 16:28:05 +02:00
|
|
|
with open(config_file) as f:
|
2019-03-22 11:20:17 +01:00
|
|
|
appservice = _load_appservice(hostname, yaml.safe_load(f), config_file)
|
2016-05-17 12:28:58 +02:00
|
|
|
if appservice.id in seen_ids:
|
|
|
|
raise ConfigError(
|
|
|
|
"Cannot reuse ID across application services: "
|
|
|
|
"%s (files: %s, %s)"
|
|
|
|
% (appservice.id, config_file, seen_ids[appservice.id])
|
|
|
|
)
|
|
|
|
seen_ids[appservice.id] = config_file
|
|
|
|
if appservice.token in seen_as_tokens:
|
|
|
|
raise ConfigError(
|
|
|
|
"Cannot reuse as_token across application services: "
|
|
|
|
"%s (files: %s, %s)"
|
|
|
|
% (
|
|
|
|
appservice.token,
|
|
|
|
config_file,
|
|
|
|
seen_as_tokens[appservice.token],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
seen_as_tokens[appservice.token] = config_file
|
|
|
|
logger.info("Loaded application service: %s", appservice)
|
|
|
|
appservices.append(appservice)
|
|
|
|
except Exception as e:
|
|
|
|
logger.error("Failed to load appservice from '%s'", config_file)
|
|
|
|
logger.exception(e)
|
|
|
|
raise
|
|
|
|
return appservices
|
|
|
|
|
|
|
|
|
2021-12-01 13:28:23 +01:00
|
|
|
def _load_appservice(
|
|
|
|
hostname: str, as_info: JsonDict, config_filename: str
|
|
|
|
) -> ApplicationService:
|
2016-08-30 18:16:00 +02:00
|
|
|
required_string_fields = ["id", "as_token", "hs_token", "sender_localpart"]
|
2016-05-17 12:28:58 +02:00
|
|
|
for field in required_string_fields:
|
2020-06-16 14:51:47 +02:00
|
|
|
if not isinstance(as_info.get(field), str):
|
2016-05-17 12:28:58 +02:00
|
|
|
raise KeyError(
|
|
|
|
"Required string field: '%s' (%s)" % (field, config_filename)
|
|
|
|
)
|
|
|
|
|
2016-08-30 18:16:00 +02:00
|
|
|
# 'url' must either be a string or explicitly null, not missing
|
|
|
|
# to avoid accidentally turning off push for ASes.
|
2020-06-16 14:51:47 +02:00
|
|
|
if not isinstance(as_info.get("url"), str) and as_info.get("url", "") is not None:
|
2016-08-30 18:16:00 +02:00
|
|
|
raise KeyError(
|
|
|
|
"Required string field or explicit null: 'url' (%s)" % (config_filename,)
|
|
|
|
)
|
|
|
|
|
2016-05-17 12:28:58 +02:00
|
|
|
localpart = as_info["sender_localpart"]
|
2018-04-15 17:15:16 +02:00
|
|
|
if urlparse.quote(localpart) != localpart:
|
2016-05-17 12:28:58 +02:00
|
|
|
raise ValueError("sender_localpart needs characters which are not URL encoded.")
|
|
|
|
user = UserID(localpart, hostname)
|
|
|
|
user_id = user.to_string()
|
|
|
|
|
2016-10-18 18:04:09 +02:00
|
|
|
# Rate limiting for users of this AS is on by default (excludes sender)
|
2021-12-01 13:28:23 +01:00
|
|
|
rate_limited = as_info.get("rate_limited")
|
|
|
|
if not isinstance(rate_limited, bool):
|
|
|
|
rate_limited = True
|
2016-10-18 18:04:09 +02:00
|
|
|
|
2016-05-17 12:28:58 +02:00
|
|
|
# namespace checks
|
|
|
|
if not isinstance(as_info.get("namespaces"), dict):
|
|
|
|
raise KeyError("Requires 'namespaces' object.")
|
|
|
|
for ns in ApplicationService.NS_LIST:
|
|
|
|
# specific namespaces are optional
|
|
|
|
if ns in as_info["namespaces"]:
|
|
|
|
# expect a list of dicts with exclusive and regex keys
|
|
|
|
for regex_obj in as_info["namespaces"][ns]:
|
|
|
|
if not isinstance(regex_obj, dict):
|
|
|
|
raise ValueError(
|
2019-11-21 13:00:14 +01:00
|
|
|
"Expected namespace entry in %s to be an object, but got %s",
|
2016-05-17 12:28:58 +02:00
|
|
|
ns,
|
|
|
|
regex_obj,
|
|
|
|
)
|
2020-06-16 14:51:47 +02:00
|
|
|
if not isinstance(regex_obj.get("regex"), str):
|
2016-05-17 12:28:58 +02:00
|
|
|
raise ValueError("Missing/bad type 'regex' key in %s", regex_obj)
|
|
|
|
if not isinstance(regex_obj.get("exclusive"), bool):
|
|
|
|
raise ValueError(
|
|
|
|
"Missing/bad type 'exclusive' key in %s", regex_obj
|
|
|
|
)
|
2016-08-18 15:56:02 +02:00
|
|
|
# protocols check
|
|
|
|
protocols = as_info.get("protocols")
|
|
|
|
if protocols:
|
2021-12-14 18:02:46 +01:00
|
|
|
if not isinstance(protocols, list):
|
2016-08-18 15:56:02 +02:00
|
|
|
raise KeyError("Optional 'protocols' must be a list if present.")
|
|
|
|
for p in protocols:
|
|
|
|
if not isinstance(p, str):
|
|
|
|
raise KeyError("Bad value for 'protocols' item")
|
2016-08-30 17:21:16 +02:00
|
|
|
|
2016-08-30 18:20:31 +02:00
|
|
|
if as_info["url"] is None:
|
2016-08-30 17:21:16 +02:00
|
|
|
logger.info(
|
2016-08-30 18:16:00 +02:00
|
|
|
"(%s) Explicitly empty 'url' provided. This application service"
|
|
|
|
" will not receive events or queries.",
|
2016-08-30 17:21:16 +02:00
|
|
|
config_filename,
|
|
|
|
)
|
2018-06-28 21:31:53 +02:00
|
|
|
|
2018-06-28 21:56:07 +02:00
|
|
|
ip_range_whitelist = None
|
2018-06-28 21:31:53 +02:00
|
|
|
if as_info.get("ip_range_whitelist"):
|
|
|
|
ip_range_whitelist = IPSet(as_info.get("ip_range_whitelist"))
|
|
|
|
|
2020-10-15 18:33:28 +02:00
|
|
|
supports_ephemeral = as_info.get("de.sorunome.msc2409.push_ephemeral", False)
|
|
|
|
|
2022-02-24 18:55:45 +01:00
|
|
|
# Opt-in flag for the MSC3202-specific transactional behaviour.
|
|
|
|
# When enabled, appservice transactions contain the following information:
|
|
|
|
# - device One-Time Key counts
|
|
|
|
# - device unused fallback key usage states
|
2022-03-30 15:39:27 +02:00
|
|
|
# - device list changes
|
2022-02-24 18:55:45 +01:00
|
|
|
msc3202_transaction_extensions = as_info.get("org.matrix.msc3202", False)
|
|
|
|
if not isinstance(msc3202_transaction_extensions, bool):
|
|
|
|
raise ValueError(
|
|
|
|
"The `org.matrix.msc3202` option should be true or false if specified."
|
|
|
|
)
|
|
|
|
|
2016-05-17 12:28:58 +02:00
|
|
|
return ApplicationService(
|
|
|
|
token=as_info["as_token"],
|
|
|
|
url=as_info["url"],
|
|
|
|
namespaces=as_info["namespaces"],
|
|
|
|
hs_token=as_info["hs_token"],
|
|
|
|
sender=user_id,
|
|
|
|
id=as_info["id"],
|
2016-08-18 15:56:02 +02:00
|
|
|
protocols=protocols,
|
2018-06-28 21:31:53 +02:00
|
|
|
rate_limited=rate_limited,
|
|
|
|
ip_range_whitelist=ip_range_whitelist,
|
2022-02-24 18:55:45 +01:00
|
|
|
supports_ephemeral=supports_ephemeral,
|
|
|
|
msc3202_transaction_extensions=msc3202_transaction_extensions,
|
2016-05-17 12:28:58 +02:00
|
|
|
)
|