1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326
|
"""KafkaBackend class with methods for supported APIs."""
import uuid
from datetime import datetime
from typing import Any, Optional
from moto.core.base_backend import BackendDict, BaseBackend
from moto.core.common_models import BaseModel
from moto.utilities.utils import get_partition
from ..utilities.tagging_service import TaggingService
class FakeKafkaCluster(BaseModel):
def __init__(
self,
cluster_name: str,
account_id: str,
region_name: str,
cluster_type: str,
tags: Optional[dict[str, str]] = None,
broker_node_group_info: Optional[dict[str, Any]] = None,
kafka_version: Optional[str] = None,
number_of_broker_nodes: Optional[int] = None,
configuration_info: Optional[dict[str, Any]] = None,
serverless_config: Optional[dict[str, Any]] = None,
encryption_info: Optional[dict[str, Any]] = None,
enhanced_monitoring: str = "DEFAULT",
open_monitoring: Optional[dict[str, Any]] = None,
logging_info: Optional[dict[str, Any]] = None,
storage_mode: str = "LOCAL",
current_version: str = "1.0",
client_authentication: Optional[dict[str, Any]] = None,
state: str = "CREATING",
active_operation_arn: Optional[str] = None,
zookeeper_connect_string: Optional[str] = None,
zookeeper_connect_string_tls: Optional[str] = None,
):
# General attributes
self.cluster_id = str(uuid.uuid4())
self.cluster_name = cluster_name
self.account_id = account_id
self.region_name = region_name
self.cluster_type = cluster_type
self.tags = tags or {}
self.state = state
self.creation_time = datetime.now().isoformat()
self.current_version = current_version
self.active_operation_arn = active_operation_arn
self.arn = self._generate_arn()
# Attributes specific to PROVISIONED clusters
self.broker_node_group_info = broker_node_group_info
self.kafka_version = kafka_version
self.number_of_broker_nodes = number_of_broker_nodes
self.configuration_info = configuration_info
self.encryption_info = encryption_info
self.enhanced_monitoring = enhanced_monitoring
self.open_monitoring = open_monitoring
self.logging_info = logging_info
self.storage_mode = storage_mode
self.client_authentication = client_authentication
self.zookeeper_connect_string = zookeeper_connect_string
self.zookeeper_connect_string_tls = zookeeper_connect_string_tls
# Attributes specific to SERVERLESS clusters
self.serverless_config = serverless_config
def _generate_arn(self) -> str:
resource_type = (
"cluster" if self.cluster_type == "PROVISIONED" else "serverless-cluster"
)
partition = get_partition(self.region_name)
return f"arn:{partition}:kafka:{self.region_name}:{self.account_id}:{resource_type}/{self.cluster_id}"
class KafkaBackend(BaseBackend):
"""Implementation of Kafka APIs."""
def __init__(self, region_name: str, account_id: str):
super().__init__(region_name, account_id)
self.clusters: dict[str, FakeKafkaCluster] = {}
self.tagger = TaggingService()
def create_cluster_v2(
self,
cluster_name: str,
tags: Optional[dict[str, str]],
provisioned: Optional[dict[str, Any]],
serverless: Optional[dict[str, Any]],
) -> tuple[str, str, str, str]:
if provisioned:
cluster_type = "PROVISIONED"
broker_node_group_info = provisioned.get("brokerNodeGroupInfo")
kafka_version = provisioned.get("kafkaVersion", "default-kafka-version")
number_of_broker_nodes = int(provisioned.get("numberOfBrokerNodes", 1))
storage_mode = provisioned.get("storageMode", "LOCAL")
serverless_config = None
elif serverless:
cluster_type = "SERVERLESS"
broker_node_group_info = None
kafka_version = None
number_of_broker_nodes = None
storage_mode = None
serverless_config = serverless
new_cluster = FakeKafkaCluster(
cluster_name=cluster_name,
account_id=self.account_id,
region_name=self.region_name,
cluster_type=cluster_type,
broker_node_group_info=broker_node_group_info,
kafka_version=kafka_version,
number_of_broker_nodes=number_of_broker_nodes,
serverless_config=serverless_config,
tags=tags,
state="CREATING",
storage_mode=storage_mode if storage_mode else "LOCAL",
current_version="1.0",
)
self.clusters[new_cluster.arn] = new_cluster
if tags:
self.tag_resource(new_cluster.arn, tags)
return (
new_cluster.arn,
new_cluster.cluster_name,
new_cluster.state,
new_cluster.cluster_type,
)
def describe_cluster_v2(self, cluster_arn: str) -> dict[str, Any]:
cluster = self.clusters[cluster_arn]
cluster_info: dict[str, Any] = {
"activeOperationArn": "arn:aws:kafka:region:account-id:operation/active-operation",
"clusterArn": cluster.arn,
"clusterName": cluster.cluster_name,
"clusterType": cluster.cluster_type,
"creationTime": cluster.creation_time,
"currentVersion": cluster.current_version,
"state": cluster.state,
"stateInfo": {
"code": "string",
"message": "Cluster state details.",
},
"tags": self.list_tags_for_resource(cluster.arn),
}
if cluster.cluster_type == "PROVISIONED":
cluster_info.update(
{
"provisioned": {
"brokerNodeGroupInfo": cluster.broker_node_group_info or {},
"clientAuthentication": cluster.client_authentication or {},
"currentBrokerSoftwareInfo": {
"configurationArn": (cluster.configuration_info or {}).get(
"arn", "string"
),
"configurationRevision": (
cluster.configuration_info or {}
).get("revision", 1),
"kafkaVersion": cluster.kafka_version,
},
"encryptionInfo": cluster.encryption_info or {},
"enhancedMonitoring": cluster.enhanced_monitoring,
"openMonitoring": cluster.open_monitoring or {},
"loggingInfo": cluster.logging_info or {},
"numberOfBrokerNodes": cluster.number_of_broker_nodes or 0,
"zookeeperConnectString": cluster.zookeeper_connect_string
or "zookeeper.example.com:2181",
"zookeeperConnectStringTls": cluster.zookeeper_connect_string_tls
or "zookeeper.example.com:2181",
"storageMode": cluster.storage_mode,
"customerActionStatus": "NONE",
}
}
)
elif cluster.cluster_type == "SERVERLESS":
cluster_info.update(
{
"serverless": {
"vpcConfigs": cluster.serverless_config.get("vpcConfigs", [])
if cluster.serverless_config
else [],
"clientAuthentication": cluster.serverless_config.get(
"clientAuthentication", {}
)
if cluster.serverless_config
else {},
}
}
)
return cluster_info
def list_clusters_v2(
self,
cluster_name_filter: Optional[str],
cluster_type_filter: Optional[str],
max_results: Optional[int],
next_token: Optional[str],
) -> tuple[list[dict[str, Any]], Optional[str]]:
cluster_info_list = []
for cluster_arn in self.clusters.keys():
cluster_info = self.describe_cluster_v2(cluster_arn)
cluster_info_list.append(cluster_info)
return cluster_info_list, None
def create_cluster(
self,
broker_node_group_info: dict[str, Any],
client_authentication: Optional[dict[str, Any]],
cluster_name: str,
configuration_info: Optional[dict[str, Any]] = None,
encryption_info: Optional[dict[str, Any]] = None,
enhanced_monitoring: str = "DEFAULT",
open_monitoring: Optional[dict[str, Any]] = None,
kafka_version: str = "2.8.1",
logging_info: Optional[dict[str, Any]] = None,
number_of_broker_nodes: int = 1,
tags: Optional[dict[str, str]] = None,
storage_mode: str = "LOCAL",
) -> tuple[str, str, str]:
new_cluster = FakeKafkaCluster(
cluster_name=cluster_name,
account_id=self.account_id,
region_name=self.region_name,
cluster_type="PROVISIONED",
broker_node_group_info=broker_node_group_info,
client_authentication=client_authentication,
kafka_version=kafka_version,
number_of_broker_nodes=number_of_broker_nodes,
configuration_info=configuration_info,
encryption_info=encryption_info,
enhanced_monitoring=enhanced_monitoring,
open_monitoring=open_monitoring,
logging_info=logging_info,
storage_mode=storage_mode,
)
self.clusters[new_cluster.arn] = new_cluster
if tags:
self.tag_resource(new_cluster.arn, tags)
return new_cluster.arn, new_cluster.cluster_name, new_cluster.state
def describe_cluster(self, cluster_arn: str) -> dict[str, Any]:
cluster = self.clusters[cluster_arn]
return {
"activeOperationArn": "arn:aws:kafka:region:account-id:operation/active-operation",
"brokerNodeGroupInfo": cluster.broker_node_group_info or {},
"clientAuthentication": cluster.client_authentication or {},
"clusterArn": cluster.arn,
"clusterName": cluster.cluster_name,
"creationTime": cluster.creation_time,
"currentBrokerSoftwareInfo": {
"configurationArn": (cluster.configuration_info or {}).get(
"arn", "string"
),
"configurationRevision": (cluster.configuration_info or {}).get(
"revision", 1
),
"kafkaVersion": cluster.kafka_version,
},
"currentVersion": cluster.current_version,
"encryptionInfo": cluster.encryption_info or {},
"enhancedMonitoring": cluster.enhanced_monitoring,
"openMonitoring": cluster.open_monitoring or {},
"loggingInfo": cluster.logging_info or {},
"numberOfBrokerNodes": cluster.number_of_broker_nodes or 0,
"state": cluster.state,
"stateInfo": {
"code": "string",
"message": "Cluster state details.",
},
"tags": self.list_tags_for_resource(cluster.arn),
"zookeeperConnectString": cluster.zookeeper_connect_string
or "zookeeper.example.com:2181",
"zookeeperConnectStringTls": cluster.zookeeper_connect_string_tls
or "zookeeper.example.com:2181",
"storageMode": cluster.storage_mode,
"customerActionStatus": "NONE",
}
def list_clusters(
self,
cluster_name_filter: Optional[str],
max_results: Optional[int],
next_token: Optional[str],
) -> list[dict[str, Any]]:
cluster_info_list = [
{
"clusterArn": cluster.arn,
"clusterName": cluster.cluster_name,
"state": cluster.state,
"creationTime": cluster.creation_time,
"clusterType": cluster.cluster_type,
}
for cluster_arn, cluster in self.clusters.items()
]
return cluster_info_list
def delete_cluster(self, cluster_arn: str, current_version: str) -> tuple[str, str]:
cluster = self.clusters.pop(cluster_arn)
return cluster_arn, cluster.state
def list_tags_for_resource(self, resource_arn: str) -> dict[str, str]:
return self.tagger.get_tag_dict_for_resource(resource_arn)
def tag_resource(self, resource_arn: str, tags: dict[str, str]) -> None:
tags_list = [{"Key": k, "Value": v} for k, v in tags.items()]
self.tagger.tag_resource(resource_arn, tags_list)
def untag_resource(self, resource_arn: str, tag_keys: list[str]) -> None:
self.tagger.untag_resource_using_names(resource_arn, tag_keys)
kafka_backends = BackendDict(KafkaBackend, "kafka")
|