From 3710b4bab2f46d5f61a05fd2d44e824731d2e8e1 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Wed, 11 Sep 2024 16:36:23 +0200 Subject: [PATCH 01/49] added enum for either vector of autentication classes or kerberos secret name --- rust/crd/src/authentication.rs | 4 +- rust/crd/src/lib.rs | 105 +++++++++++++++++++++++++++++++-- rust/crd/src/security.rs | 18 +++++- 3 files changed, 119 insertions(+), 8 deletions(-) diff --git a/rust/crd/src/authentication.rs b/rust/crd/src/authentication.rs index 04932eed..4fd15ad6 100644 --- a/rust/crd/src/authentication.rs +++ b/rust/crd/src/authentication.rs @@ -32,7 +32,7 @@ pub enum Error { #[derive(Clone, Deserialize, Debug, Eq, JsonSchema, PartialEq, Serialize)] #[serde(rename_all = "camelCase")] -pub struct KafkaAuthentication { +pub struct KafkaAuthenticationClass { /// The AuthenticationClass to use. /// /// ## TLS provider @@ -64,7 +64,7 @@ impl ResolvedAuthenticationClasses { /// - Validation failed pub async fn from_references( client: &Client, - auth_classes: &Vec, + auth_classes: &Vec, ) -> Result { let mut resolved_authentication_classes: Vec = vec![]; diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index efe2d8e7..b021b9f8 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -5,11 +5,11 @@ pub mod listener; pub mod security; pub mod tls; -use crate::authentication::KafkaAuthentication; use crate::authorization::KafkaAuthorization; use crate::tls::KafkaTls; use affinity::get_affinity; +use authentication::KafkaAuthenticationClass; use serde::{Deserialize, Serialize}; use snafu::{OptionExt, ResultExt, Snafu}; use stackable_operator::{ @@ -132,9 +132,9 @@ pub struct KafkaClusterSpec { #[derive(Clone, Deserialize, Debug, Eq, JsonSchema, PartialEq, Serialize)] #[serde(rename_all = "camelCase")] pub struct KafkaClusterConfig { - /// Authentication class settings for Kafka like mTLS authentication. - #[serde(default)] - pub authentication: Vec, + /// Authentication class settings for Kafka like mTLS authentication or Kerberos secret name. + #[serde(flatten)] + pub authentication: Option, /// Authorization settings for Kafka like OPA. #[serde(default)] @@ -161,6 +161,32 @@ pub struct KafkaClusterConfig { pub zookeeper_config_map_name: String, } +#[derive(Clone, Debug, Deserialize, Display, JsonSchema, Eq, PartialEq, Serialize)] +#[serde(untagged)] +pub enum KafkaAuthenticationEnum { + AuthenticationClasses { + #[serde(default)] + authentication: Vec, + }, + + KerberosAuthentication { + authentication: AuthenticationConfig, + }, +} + +#[derive(Clone, Debug, Deserialize, Eq, Hash, JsonSchema, PartialEq, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct AuthenticationConfig { + pub kerberos: KerberosConfig, +} + +#[derive(Clone, Debug, Deserialize, Eq, Hash, JsonSchema, PartialEq, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct KerberosConfig { + /// Name of the SecretClass providing the keytab for the Kafka services. + pub secret_class: String, +} + impl KafkaCluster { /// The name of the load-balanced Kubernetes Service providing the bootstrap address. Kafka clients will use this /// to get a list of broker addresses and will use those to transmit data to the correct broker. @@ -671,4 +697,75 @@ mod tests { tls::internal_tls_default() ); } + + #[test] + fn test_get_kerberos_config() { + let kafka_cluster = r#" + apiVersion: kafka.stackable.tech/v1alpha1 + kind: KafkaCluster + metadata: + name: simple-kafka + namespace: default + spec: + image: + productVersion: 3.7.1 + clusterConfig: + authentication: + kerberos: + secretClass: kafka-kerberos + zookeeperConfigMapName: xyz + "#; + let kafka: KafkaCluster = serde_yaml::from_str(kafka_cluster).expect("illegal test input"); + println!("{:#?}", kafka); + + assert_eq!( + Some(KafkaAuthenticationEnum::KerberosAuthentication { + authentication: AuthenticationConfig { + kerberos: KerberosConfig { + secret_class: "kafka-kerberos".to_string() + } + } + }), + kafka.spec.cluster_config.authentication + ); + } + + #[test] + fn test_get_kafka_tls_config() { + let kafka_cluster = r#" + apiVersion: kafka.stackable.tech/v1alpha1 + kind: KafkaCluster + metadata: + name: simple-kafka + namespace: default + spec: + image: + productVersion: 3.7.1 + clusterConfig: + authentication: + - authenticationClass: kafka-client-tls1 + - authenticationClass: kafka-client-tls2 + tls: + internalSecretClass: internalTls + serverSecretClass: tls + zookeeperConfigMapName: xyz + "#; + let kafka: KafkaCluster = serde_yaml::from_str(kafka_cluster).expect("illegal test input"); + + println!("{:#?}", kafka); + + assert_eq!( + Some(KafkaAuthenticationEnum::AuthenticationClasses { + authentication: vec![ + KafkaAuthenticationClass { + authentication_class: "kafka-client-tls1".to_string() + }, + KafkaAuthenticationClass { + authentication_class: "kafka-client-tls2".to_string() + } + ] + }), + kafka.spec.cluster_config.authentication + ); + } } diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index fb4dfa30..06b2c159 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -23,7 +23,7 @@ use stackable_operator::{ utils::COMMON_BASH_TRAP_FUNCTIONS, }; -use crate::STACKABLE_LOG_DIR; +use crate::{authentication::KafkaAuthenticationClass, KafkaAuthenticationEnum, STACKABLE_LOG_DIR}; use crate::{ authentication::{self, ResolvedAuthenticationClasses}, listener::{self, KafkaListenerConfig}, @@ -126,11 +126,25 @@ impl<'a> KafkaTlsSecurity<'a> { client: &Client, kafka: &'a KafkaCluster, ) -> Result { + let kafka_authentication_classes: Vec = + if let Some(authentication) = &kafka.spec.cluster_config.authentication { + match authentication { + KafkaAuthenticationEnum::AuthenticationClasses { authentication } => { + authentication.to_vec() + } + KafkaAuthenticationEnum::KerberosAuthentication { .. } => { + vec![] + } + } + } else { + vec![] + }; + Ok(KafkaTlsSecurity { kafka, resolved_authentication_classes: ResolvedAuthenticationClasses::from_references( client, - &kafka.spec.cluster_config.authentication, + &kafka_authentication_classes, ) .await .context(InvalidAuthenticationClassConfigurationSnafu)?, From b98c155796a6b9ace93a01927276db00dcb1082b Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Fri, 13 Sep 2024 15:01:42 +0200 Subject: [PATCH 02/49] initial kerberos impl --- deploy/helm/kafka-operator/crds/crds.yaml | 43 ++++++++++----- rust/crd/src/lib.rs | 67 ++++++++++++++--------- rust/crd/src/security.rs | 10 ++-- 3 files changed, 75 insertions(+), 45 deletions(-) diff --git a/deploy/helm/kafka-operator/crds/crds.yaml b/deploy/helm/kafka-operator/crds/crds.yaml index b8e7e981..32b17959 100644 --- a/deploy/helm/kafka-operator/crds/crds.yaml +++ b/deploy/helm/kafka-operator/crds/crds.yaml @@ -544,24 +544,37 @@ spec: description: Kafka settings that affect all roles and role groups. The settings in the `clusterConfig` are cluster wide settings that do not need to be configurable at role or role group level. properties: authentication: - default: [] - description: Authentication class settings for Kafka like mTLS authentication. - items: - properties: - authenticationClass: - description: |- - The AuthenticationClass to use. + anyOf: + - items: + properties: + authenticationClass: + description: |- + The AuthenticationClass to use. - ## TLS provider + ## TLS provider - Only affects client connections. This setting controls: - If clients need to authenticate themselves against the broker via TLS - Which ca.crt to use when validating the provided client certs + Only affects client connections. This setting controls: - If clients need to authenticate themselves against the broker via TLS - Which ca.crt to use when validating the provided client certs - This will override the server TLS settings (if set) in `spec.clusterConfig.tls.serverSecretClass`. - type: string - required: - - authenticationClass - type: object - type: array + This will override the server TLS settings (if set) in `spec.clusterConfig.tls.serverSecretClass`. + type: string + required: + - authenticationClass + type: object + type: array + - required: + - kerberos + description: Authentication class settings for Kafka like mTLS authentication or Kerberos secret name. + nullable: true + properties: + kerberos: + properties: + secretClass: + description: Name of the SecretClass providing the keytab for the Kafka services. + type: string + required: + - secretClass + type: object + type: object authorization: default: opa: null diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index b021b9f8..69b90986 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -133,8 +133,7 @@ pub struct KafkaClusterSpec { #[serde(rename_all = "camelCase")] pub struct KafkaClusterConfig { /// Authentication class settings for Kafka like mTLS authentication or Kerberos secret name. - #[serde(flatten)] - pub authentication: Option, + pub authentication: Option, /// Authorization settings for Kafka like OPA. #[serde(default)] @@ -163,15 +162,9 @@ pub struct KafkaClusterConfig { #[derive(Clone, Debug, Deserialize, Display, JsonSchema, Eq, PartialEq, Serialize)] #[serde(untagged)] -pub enum KafkaAuthenticationEnum { - AuthenticationClasses { - #[serde(default)] - authentication: Vec, - }, - - KerberosAuthentication { - authentication: AuthenticationConfig, - }, +pub enum KafkaAuthenticationMethod { + AuthenticationClasses(Vec), + KerberosAuthentication(AuthenticationConfig), } #[derive(Clone, Debug, Deserialize, Eq, Hash, JsonSchema, PartialEq, Serialize)] @@ -699,7 +692,7 @@ mod tests { } #[test] - fn test_get_kerberos_config() { + fn test_get_auth_kerberos() { let kafka_cluster = r#" apiVersion: kafka.stackable.tech/v1alpha1 kind: KafkaCluster @@ -719,19 +712,19 @@ mod tests { println!("{:#?}", kafka); assert_eq!( - Some(KafkaAuthenticationEnum::KerberosAuthentication { - authentication: AuthenticationConfig { + Some(KafkaAuthenticationMethod::KerberosAuthentication( + AuthenticationConfig { kerberos: KerberosConfig { secret_class: "kafka-kerberos".to_string() } } - }), + )), kafka.spec.cluster_config.authentication ); } #[test] - fn test_get_kafka_tls_config() { + fn test_get_auth_tls() { let kafka_cluster = r#" apiVersion: kafka.stackable.tech/v1alpha1 kind: KafkaCluster @@ -755,17 +748,39 @@ mod tests { println!("{:#?}", kafka); assert_eq!( - Some(KafkaAuthenticationEnum::AuthenticationClasses { - authentication: vec![ - KafkaAuthenticationClass { - authentication_class: "kafka-client-tls1".to_string() - }, - KafkaAuthenticationClass { - authentication_class: "kafka-client-tls2".to_string() - } - ] - }), + Some(KafkaAuthenticationMethod::AuthenticationClasses(vec![ + KafkaAuthenticationClass { + authentication_class: "kafka-client-tls1".to_string() + }, + KafkaAuthenticationClass { + authentication_class: "kafka-client-tls2".to_string() + } + ])), kafka.spec.cluster_config.authentication ); } + + #[test] + fn test_get_auth_none() { + let kafka_cluster = r#" + apiVersion: kafka.stackable.tech/v1alpha1 + kind: KafkaCluster + metadata: + name: simple-kafka + namespace: default + spec: + image: + productVersion: 3.7.1 + clusterConfig: + tls: + internalSecretClass: internalTls + serverSecretClass: tls + zookeeperConfigMapName: xyz + "#; + let kafka: KafkaCluster = serde_yaml::from_str(kafka_cluster).expect("illegal test input"); + + println!("{:#?}", kafka); + + assert_eq!(None, kafka.spec.cluster_config.authentication); + } } diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index 06b2c159..04aae1e1 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -23,7 +23,9 @@ use stackable_operator::{ utils::COMMON_BASH_TRAP_FUNCTIONS, }; -use crate::{authentication::KafkaAuthenticationClass, KafkaAuthenticationEnum, STACKABLE_LOG_DIR}; +use crate::{ + authentication::KafkaAuthenticationClass, KafkaAuthenticationMethod, STACKABLE_LOG_DIR, +}; use crate::{ authentication::{self, ResolvedAuthenticationClasses}, listener::{self, KafkaListenerConfig}, @@ -129,10 +131,10 @@ impl<'a> KafkaTlsSecurity<'a> { let kafka_authentication_classes: Vec = if let Some(authentication) = &kafka.spec.cluster_config.authentication { match authentication { - KafkaAuthenticationEnum::AuthenticationClasses { authentication } => { - authentication.to_vec() + KafkaAuthenticationMethod::AuthenticationClasses(auth_classes) => { + auth_classes.to_vec() } - KafkaAuthenticationEnum::KerberosAuthentication { .. } => { + KafkaAuthenticationMethod::KerberosAuthentication(_) => { vec![] } } From 471403dd5be46031b386ddc89f3b4175307f0617 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Fri, 13 Sep 2024 15:20:33 +0200 Subject: [PATCH 03/49] implement kerberos specifics --- rust/crd/src/lib.rs | 38 ++++++++++++ rust/crd/src/listener.rs | 61 +++++++++++++++----- rust/crd/src/security.rs | 20 ++++++- rust/operator-binary/src/kafka_controller.rs | 46 +++++++++++---- rust/operator-binary/src/kerberos.rs | 52 +++++++++++++++++ rust/operator-binary/src/main.rs | 1 + 6 files changed, 191 insertions(+), 27 deletions(-) create mode 100644 rust/operator-binary/src/kerberos.rs diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index 69b90986..75970e6d 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -285,6 +285,23 @@ impl KafkaCluster { tracing::debug!("Merged config: {:?}", conf_role_group); fragment::validate(conf_role_group).context(FragmentValidationFailureSnafu) } + + pub fn has_kerberos_enabled(&self) -> bool { + self.kerberos_secret_class().is_some() + } + + pub fn kerberos_secret_class(&self) -> Option { + if let Some(authentication) = self.spec.cluster_config.authentication.clone() { + match authentication { + KafkaAuthenticationMethod::AuthenticationClasses(_) => None, + KafkaAuthenticationMethod::KerberosAuthentication(kerberos_authentication) => { + Some(kerberos_authentication.kerberos.secret_class) + } + } + } else { + None + } + } } /// Reference to a single `Pod` that is a component of a [`KafkaCluster`] @@ -345,6 +362,12 @@ impl KafkaRole { } roles } + + /// A Kerberos principal has three parts, with the form username/fully.qualified.domain.name@YOUR-REALM.COM. + /// We only have one role and will use "kafka" everywhere (which e.g. differs from the current hdfs implementation). + pub fn kerberos_service_name(&self) -> &'static str { + "kafka" + } } #[derive(Clone, Debug, Default, PartialEq, Fragment, JsonSchema)] @@ -495,6 +518,21 @@ impl Configuration for KafkaConfigFragment { Some("true".to_string()), ); } + // Kerberos + if resource.has_kerberos_enabled() { + config.insert( + "sasl.enabled.mechanisms".to_string(), + Some("GSSAPI".to_string()), + ); + config.insert( + "sasl.kerberos.service.name".to_string(), + Some(KafkaRole::Broker.kerberos_service_name().to_string()), + ); + config.insert( + "sasl.mechanism.inter.broker.protocol".to_string(), + Some("GSSAPI".to_string()), + ); + } } Ok(config) diff --git a/rust/crd/src/listener.rs b/rust/crd/src/listener.rs index b7afe04c..771cc057 100644 --- a/rust/crd/src/listener.rs +++ b/rust/crd/src/listener.rs @@ -24,6 +24,9 @@ pub enum KafkaListenerProtocol { /// Encrypted and server-authenticated HTTPS connections #[strum(serialize = "SSL")] Ssl, + /// Kerberos authentication + #[strum(serialize = "SASL_SSL")] + SaslSsl, } #[derive(strum::Display, Debug, EnumString, Ord, Eq, PartialEq, PartialOrd)] @@ -90,13 +93,13 @@ impl Display for KafkaListener { pub fn get_kafka_listener_config( kafka: &KafkaCluster, kafka_security: &KafkaTlsSecurity, - object_name: &str, + pod_fqdn: &String, ) -> Result { - let pod_fqdn = pod_fqdn(kafka, object_name)?; let mut listeners = vec![]; let mut advertised_listeners = vec![]; let mut listener_security_protocol_map = BTreeMap::new(); + // CLIENT if kafka_security.tls_client_authentication_class().is_some() { // 1) If client authentication required, we expose only CLIENT_AUTH connection with SSL listeners.push(KafkaListener { @@ -125,8 +128,22 @@ pub fn get_kafka_listener_config( }); listener_security_protocol_map .insert(KafkaListenerName::Client, KafkaListenerProtocol::Ssl); + } else if kafka.has_kerberos_enabled() { + // 3) Kerberos and TLS authentication classes are mutually exclusive + listeners.push(KafkaListener { + name: KafkaListenerName::Client, + host: LISTENER_LOCAL_ADDRESS.to_string(), + port: KafkaTlsSecurity::SECURE_CLIENT_PORT.to_string(), + }); + advertised_listeners.push(KafkaListener { + name: KafkaListenerName::Client, + host: pod_fqdn.clone(), + port: KafkaTlsSecurity::SECURE_CLIENT_PORT.to_string(), + }); + listener_security_protocol_map + .insert(KafkaListenerName::Client, KafkaListenerProtocol::SaslSsl); } else { - // 3) If no client auth or tls is required we expose CLIENT with PLAINTEXT + // 4) If no client auth or tls is required we expose CLIENT with PLAINTEXT listeners.push(KafkaListener { name: KafkaListenerName::Client, host: LISTENER_LOCAL_ADDRESS.to_string(), @@ -141,8 +158,9 @@ pub fn get_kafka_listener_config( .insert(KafkaListenerName::Client, KafkaListenerProtocol::Plaintext); } + // INTERNAL if kafka_security.tls_internal_secret_class().is_some() { - // 4) If internal tls is required we expose INTERNAL as SSL + // 5) If internal tls is required we expose INTERNAL as SSL listeners.push(KafkaListener { name: KafkaListenerName::Internal, host: LISTENER_LOCAL_ADDRESS.to_string(), @@ -150,13 +168,27 @@ pub fn get_kafka_listener_config( }); advertised_listeners.push(KafkaListener { name: KafkaListenerName::Internal, - host: pod_fqdn, + host: pod_fqdn.to_string(), port: kafka_security.internal_port().to_string(), }); listener_security_protocol_map .insert(KafkaListenerName::Internal, KafkaListenerProtocol::Ssl); + } else if kafka.has_kerberos_enabled() { + // 6) Kerberos and TLS authentication classes are mutually exclusive + listeners.push(KafkaListener { + name: KafkaListenerName::Internal, + host: LISTENER_LOCAL_ADDRESS.to_string(), + port: KafkaTlsSecurity::SECURE_INTERNAL_PORT.to_string(), + }); + advertised_listeners.push(KafkaListener { + name: KafkaListenerName::Internal, + host: pod_fqdn.to_string(), + port: KafkaTlsSecurity::SECURE_INTERNAL_PORT.to_string(), + }); + listener_security_protocol_map + .insert(KafkaListenerName::Internal, KafkaListenerProtocol::Ssl); } else { - // 5) If no internal tls is required we expose INTERNAL as PLAINTEXT + // 7) If no internal tls is required we expose INTERNAL as PLAINTEXT listeners.push(KafkaListener { name: KafkaListenerName::Internal, host: LISTENER_LOCAL_ADDRESS.to_string(), @@ -164,7 +196,7 @@ pub fn get_kafka_listener_config( }); advertised_listeners.push(KafkaListener { name: KafkaListenerName::Internal, - host: pod_fqdn, + host: pod_fqdn.to_string(), port: kafka_security.internal_port().to_string(), }); listener_security_protocol_map.insert( @@ -184,7 +216,7 @@ fn node_port_cmd(directory: &str, port_name: &str) -> String { format!("$(cat {directory}/{port_name}_nodeport)") } -fn pod_fqdn(kafka: &KafkaCluster, object_name: &str) -> Result { +pub fn pod_fqdn(kafka: &KafkaCluster, object_name: &str) -> Result { Ok(format!( "$POD_NAME.{}.{}.svc.cluster.local", object_name, @@ -240,7 +272,8 @@ mod tests { "internalTls".to_string(), Some("tls".to_string()), ); - let config = get_kafka_listener_config(&kafka, &kafka_security, object_name).unwrap(); + let pod_fqdn = pod_fqdn(&kafka, object_name).unwrap(); + let config = get_kafka_listener_config(&kafka, &kafka_security, &pod_fqdn).unwrap(); assert_eq!( config.listeners(), @@ -263,7 +296,7 @@ mod tests { host = LISTENER_NODE_ADDRESS, port = node_port_cmd(STACKABLE_TMP_DIR, kafka_security.client_port_name()), internal_name = KafkaListenerName::Internal, - internal_host = pod_fqdn(&kafka, object_name).unwrap(), + internal_host = &pod_fqdn, internal_port = kafka_security.internal_port(), ) ); @@ -300,7 +333,7 @@ mod tests { "tls".to_string(), Some("tls".to_string()), ); - let config = get_kafka_listener_config(&kafka, &kafka_security, object_name).unwrap(); + let config = get_kafka_listener_config(&kafka, &kafka_security, &pod_fqdn).unwrap(); assert_eq!( config.listeners(), @@ -323,7 +356,7 @@ mod tests { host = LISTENER_NODE_ADDRESS, port = node_port_cmd(STACKABLE_TMP_DIR, kafka_security.client_port_name()), internal_name = KafkaListenerName::Internal, - internal_host = pod_fqdn(&kafka, object_name).unwrap(), + internal_host = &pod_fqdn, internal_port = kafka_security.internal_port(), ) ); @@ -362,7 +395,7 @@ mod tests { "".to_string(), None, ); - let config = get_kafka_listener_config(&kafka, &kafka_security, object_name).unwrap(); + let config = get_kafka_listener_config(&kafka, &kafka_security, &pod_fqdn).unwrap(); assert_eq!( config.listeners(), @@ -385,7 +418,7 @@ mod tests { host = LISTENER_NODE_ADDRESS, port = node_port_cmd(STACKABLE_TMP_DIR, kafka_security.client_port_name()), internal_name = KafkaListenerName::Internal, - internal_host = pod_fqdn(&kafka, object_name).unwrap(), + internal_host = &pod_fqdn, internal_port = kafka_security.internal_port(), ) ); diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index 04aae1e1..cae1ae5e 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -24,7 +24,8 @@ use stackable_operator::{ }; use crate::{ - authentication::KafkaAuthenticationClass, KafkaAuthenticationMethod, STACKABLE_LOG_DIR, + authentication::KafkaAuthenticationClass, KafkaAuthenticationMethod, KafkaRole, + STACKABLE_LOG_DIR, }; use crate::{ authentication::{self, ResolvedAuthenticationClasses}, @@ -260,12 +261,15 @@ impl<'a> KafkaTlsSecurity<'a> { &self, kafka_listeners: &KafkaListenerConfig, opa_connect_string: Option<&str>, + kerberos_enabled: bool, + pod_fqdn: &String, ) -> Vec { vec![formatdoc! {" {COMMON_BASH_TRAP_FUNCTIONS} {remove_vector_shutdown_file_command} prepare_signal_handlers - bin/kafka-server-start.sh {STACKABLE_CONFIG_DIR}/{SERVER_PROPERTIES_FILE} --override \"zookeeper.connect=$ZOOKEEPER\" --override \"listeners={listeners}\" --override \"advertised.listeners={advertised_listeners}\" --override \"listener.security.protocol.map={listener_security_protocol_map}\"{opa_config} & + {set_realm_env} + bin/kafka-server-start.sh {STACKABLE_CONFIG_DIR}/{SERVER_PROPERTIES_FILE} --override \"zookeeper.connect=$ZOOKEEPER\" --override \"listeners={listeners}\" --override \"advertised.listeners={advertised_listeners}\" --override \"listener.security.protocol.map={listener_security_protocol_map}\"{opa_config}{jaas_config} & wait_for_termination $! {create_vector_shutdown_file_command} ", @@ -273,13 +277,23 @@ impl<'a> KafkaTlsSecurity<'a> { remove_vector_shutdown_file_command(STACKABLE_LOG_DIR), create_vector_shutdown_file_command = create_vector_shutdown_file_command(STACKABLE_LOG_DIR), + set_realm_env = match kerberos_enabled { + true => "KERBEROS_REALM=$(grep -oP 'default_realm = \\K.*' /stackable/kerberos/krb5.conf)", + false => "", + }, listeners = kafka_listeners.listeners(), advertised_listeners = kafka_listeners.advertised_listeners(), listener_security_protocol_map = kafka_listeners.listener_security_protocol_map(), opa_config = match opa_connect_string { None => "".to_string(), Some(opa_connect_string) => format!(" --override \"opa.authorizer.url={opa_connect_string}\""), - } + }, + jaas_config = match kerberos_enabled { + true => { + let service_name = KafkaRole::Broker.kerberos_service_name(); + format!(" --override \"listener.name.client.gssapi.sasl.jaas.config=com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true storeKey=true keyTab=\"/stackable/kerberos/keytab\" principal=\"{service_name}/{pod_fqdn}@$KERBEROS_REALM\";\"")}, + false => "".to_string(), + }, }] } diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index 7d629cfd..d96292fb 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -12,10 +12,11 @@ use product_config::{ }; use snafu::{OptionExt, ResultExt, Snafu}; use stackable_kafka_crd::{ - listener::get_kafka_listener_config, security::KafkaTlsSecurity, Container, KafkaCluster, - KafkaClusterStatus, KafkaConfig, KafkaRole, APP_NAME, DOCKER_IMAGE_BASE_NAME, - JVM_SECURITY_PROPERTIES_FILE, KAFKA_HEAP_OPTS, LOG_DIRS_VOLUME_NAME, METRICS_PORT, - METRICS_PORT_NAME, OPERATOR_NAME, SERVER_PROPERTIES_FILE, STACKABLE_CONFIG_DIR, + listener::{get_kafka_listener_config, pod_fqdn, KafkaListenerError}, + security::KafkaTlsSecurity, + Container, KafkaCluster, KafkaClusterStatus, KafkaConfig, KafkaRole, APP_NAME, + DOCKER_IMAGE_BASE_NAME, JVM_SECURITY_PROPERTIES_FILE, KAFKA_HEAP_OPTS, LOG_DIRS_VOLUME_NAME, + METRICS_PORT, METRICS_PORT_NAME, OPERATOR_NAME, SERVER_PROPERTIES_FILE, STACKABLE_CONFIG_DIR, STACKABLE_DATA_DIR, STACKABLE_LOG_CONFIG_DIR, STACKABLE_LOG_DIR, STACKABLE_TMP_DIR, }; use stackable_operator::{ @@ -71,6 +72,7 @@ use strum::{EnumDiscriminants, IntoStaticStr}; use crate::{ discovery::{self, build_discovery_configmaps}, + kerberos::{self, add_kerberos_pod_config}, operations::{ graceful_shutdown::{add_graceful_shutdown_config, graceful_shutdown_config_properties}, pdb::add_pdbs, @@ -308,6 +310,12 @@ pub enum Error { AddVolumesAndVolumeMounts { source: stackable_kafka_crd::security::Error, }, + + #[snafu(display("failed to resolve the fully-qualified pod name"))] + ResolveNamespace { source: KafkaListenerError }, + + #[snafu(display("failed to add kerberos config"))] + AddKerberosConfig { source: kerberos::Error }, } type Result = std::result::Result; @@ -365,6 +373,8 @@ impl ReconcilerError for Error { Error::MetadataBuild { .. } => None, Error::LabelBuild { .. } => None, Error::AddVolumesAndVolumeMounts { .. } => None, + Error::ResolveNamespace { .. } => None, + Error::AddKerberosConfig { .. } => None, } } } @@ -749,7 +759,7 @@ fn build_broker_rolegroup_service( #[allow(clippy::too_many_arguments)] fn build_broker_rolegroup_statefulset( kafka: &KafkaCluster, - role: &KafkaRole, + kafka_role: &KafkaRole, resolved_product_image: &ResolvedProductImage, rolegroup_ref: &RoleGroupRef, broker_config: &HashMap>, @@ -758,7 +768,7 @@ fn build_broker_rolegroup_statefulset( merged_config: &KafkaConfig, sa_name: &str, ) -> Result { - let role = kafka.role(role).context(InternalOperatorSnafu)?; + let role = kafka.role(kafka_role).context(InternalOperatorSnafu)?; let rolegroup = kafka .rolegroup(rolegroup_ref) .context(InternalOperatorSnafu)?; @@ -788,6 +798,17 @@ fn build_broker_rolegroup_statefulset( .add_volume_and_volume_mounts(&mut pod_builder, &mut cb_kcat_prober, &mut cb_kafka) .context(AddVolumesAndVolumeMountsSnafu)?; + if kafka.has_kerberos_enabled() { + add_kerberos_pod_config( + kafka, + kafka_role, + &mut cb_kcat_prober, + &mut cb_kafka, + &mut pod_builder, + ) + .context(AddKerberosConfigSnafu)?; + } + cb_get_svc .image_from_product_image(resolved_product_image) .command(vec![ @@ -882,9 +903,9 @@ fn build_broker_rolegroup_statefulset( let jvm_args = format!( "-Djava.security.properties={STACKABLE_CONFIG_DIR}/{JVM_SECURITY_PROPERTIES_FILE} -javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar={METRICS_PORT}:/stackable/jmx/broker.yaml", ); - let kafka_listeners = - get_kafka_listener_config(kafka, kafka_security, &rolegroup_ref.object_name()) - .context(InvalidKafkaListenersSnafu)?; + let pod_fqdn = pod_fqdn(kafka, &rolegroup_ref.object_name()).context(ResolveNamespaceSnafu)?; + let kafka_listeners = get_kafka_listener_config(kafka, kafka_security, &pod_fqdn) + .context(InvalidKafkaListenersSnafu)?; cb_kafka .image_from_product_image(resolved_product_image) @@ -896,7 +917,12 @@ fn build_broker_rolegroup_statefulset( "-c".to_string(), ]) .args(vec![kafka_security - .kafka_container_commands(&kafka_listeners, opa_connect_string) + .kafka_container_commands( + &kafka_listeners, + opa_connect_string, + kafka.has_kerberos_enabled(), + &pod_fqdn, + ) .join("\n")]) .add_env_var("EXTRA_ARGS", jvm_args) .add_env_var( diff --git a/rust/operator-binary/src/kerberos.rs b/rust/operator-binary/src/kerberos.rs new file mode 100644 index 00000000..323d43b5 --- /dev/null +++ b/rust/operator-binary/src/kerberos.rs @@ -0,0 +1,52 @@ +use snafu::{ResultExt, Snafu}; +use stackable_kafka_crd::{KafkaCluster, KafkaRole}; +use stackable_operator::{ + builder::pod::{ + container::ContainerBuilder, + volume::{ + SecretOperatorVolumeSourceBuilder, SecretOperatorVolumeSourceBuilderError, + VolumeBuilder, + }, + PodBuilder, + }, + kube::ResourceExt, +}; + +#[derive(Snafu, Debug)] +pub enum Error { + #[snafu(display("failed to add Kerberos secret volume"))] + AddKerberosSecretVolume { + source: SecretOperatorVolumeSourceBuilderError, + }, +} + +pub fn add_kerberos_pod_config( + kafka: &KafkaCluster, + role: &KafkaRole, + cb_kcat_prober: &mut ContainerBuilder, + cb_kafka: &mut ContainerBuilder, + pb: &mut PodBuilder, +) -> Result<(), Error> { + if let Some(kerberos_secret_class) = kafka.kerberos_secret_class() { + // Mount keytab + let kerberos_secret_operator_volume = + SecretOperatorVolumeSourceBuilder::new(kerberos_secret_class) + .with_service_scope(kafka.name_any()) + .with_pod_scope() + .with_kerberos_service_name(role.kerberos_service_name()) + .build() + .context(AddKerberosSecretVolumeSnafu)?; + pb.add_volume( + VolumeBuilder::new("kerberos") + .ephemeral(kerberos_secret_operator_volume) + .build(), + ); + cb_kcat_prober.add_volume_mount("kerberos", "/stackable/kerberos"); + cb_kcat_prober.add_env_var("KRB5_CONFIG", "/stackable/kerberos/krb5.conf"); + + cb_kafka.add_volume_mount("kerberos", "/stackable/kerberos"); + cb_kafka.add_env_var("KRB5_CONFIG", "/stackable/kerberos/krb5.conf"); + } + + Ok(()) +} diff --git a/rust/operator-binary/src/main.rs b/rust/operator-binary/src/main.rs index a846ff0e..496c3dca 100644 --- a/rust/operator-binary/src/main.rs +++ b/rust/operator-binary/src/main.rs @@ -24,6 +24,7 @@ use crate::{ mod discovery; mod kafka_controller; +mod kerberos; mod operations; mod pod_svc_controller; mod product_logging; From d90564b7dd357c72d4db7c3e22c304c737d90c6d Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Thu, 19 Sep 2024 18:12:45 +0200 Subject: [PATCH 04/49] wip: integration test --- rust/crd/src/listener.rs | 40 ++--- rust/crd/src/security.rs | 38 ++++- rust/operator-binary/src/kafka_controller.rs | 16 +- rust/operator-binary/src/kerberos.rs | 2 + .../kuttl/kerberos/00-assert.yaml.j2 | 10 ++ ...tor-aggregator-discovery-configmap.yaml.j2 | 9 ++ .../kuttl/kerberos/00-patch-ns.yaml.j2 | 9 ++ .../templates/kuttl/kerberos/00-rbac.yaml.j2 | 29 ++++ .../kuttl/kerberos/01-assert.yaml.j2 | 14 ++ .../kerberos/01-install-krb5-kdc.yaml.j2 | 146 ++++++++++++++++++ .../02-create-kerberos-secretclass.yaml.j2 | 72 +++++++++ tests/templates/kuttl/kerberos/10-assert.yaml | 12 ++ .../kuttl/kerberos/10-install-zk.yaml.j2 | 20 +++ tests/templates/kuttl/kerberos/20-assert.yaml | 12 ++ .../kuttl/kerberos/20-install-kafka.yaml.j2 | 69 +++++++++ tests/test-definition.yaml | 21 +++ 16 files changed, 494 insertions(+), 25 deletions(-) create mode 100644 tests/templates/kuttl/kerberos/00-assert.yaml.j2 create mode 100644 tests/templates/kuttl/kerberos/00-install-vector-aggregator-discovery-configmap.yaml.j2 create mode 100644 tests/templates/kuttl/kerberos/00-patch-ns.yaml.j2 create mode 100644 tests/templates/kuttl/kerberos/00-rbac.yaml.j2 create mode 100644 tests/templates/kuttl/kerberos/01-assert.yaml.j2 create mode 100644 tests/templates/kuttl/kerberos/01-install-krb5-kdc.yaml.j2 create mode 100644 tests/templates/kuttl/kerberos/02-create-kerberos-secretclass.yaml.j2 create mode 100644 tests/templates/kuttl/kerberos/10-assert.yaml create mode 100644 tests/templates/kuttl/kerberos/10-install-zk.yaml.j2 create mode 100644 tests/templates/kuttl/kerberos/20-assert.yaml create mode 100644 tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 diff --git a/rust/crd/src/listener.rs b/rust/crd/src/listener.rs index 771cc057..3cefee4d 100644 --- a/rust/crd/src/listener.rs +++ b/rust/crd/src/listener.rs @@ -114,34 +114,34 @@ pub fn get_kafka_listener_config( }); listener_security_protocol_map .insert(KafkaListenerName::ClientAuth, KafkaListenerProtocol::Ssl); - } else if kafka_security.tls_server_secret_class().is_some() { - // 2) If no client authentication but tls is required we expose CLIENT with SSL + } else if kafka.has_kerberos_enabled() { + // 2) Kerberos and TLS authentication classes are mutually exclusive and Kerberos takes preference listeners.push(KafkaListener { name: KafkaListenerName::Client, host: LISTENER_LOCAL_ADDRESS.to_string(), - port: kafka_security.client_port().to_string(), + port: KafkaTlsSecurity::SECURE_CLIENT_PORT.to_string(), }); advertised_listeners.push(KafkaListener { name: KafkaListenerName::Client, - host: LISTENER_NODE_ADDRESS.to_string(), - port: node_port_cmd(STACKABLE_TMP_DIR, kafka_security.client_port_name()), + host: pod_fqdn.clone(), + port: KafkaTlsSecurity::SECURE_CLIENT_PORT.to_string(), }); listener_security_protocol_map - .insert(KafkaListenerName::Client, KafkaListenerProtocol::Ssl); - } else if kafka.has_kerberos_enabled() { - // 3) Kerberos and TLS authentication classes are mutually exclusive + .insert(KafkaListenerName::Client, KafkaListenerProtocol::SaslSsl); + } else if kafka_security.tls_server_secret_class().is_some() { + // 3) If no client authentication but tls is required we expose CLIENT with SSL listeners.push(KafkaListener { name: KafkaListenerName::Client, host: LISTENER_LOCAL_ADDRESS.to_string(), - port: KafkaTlsSecurity::SECURE_CLIENT_PORT.to_string(), + port: kafka_security.client_port().to_string(), }); advertised_listeners.push(KafkaListener { name: KafkaListenerName::Client, - host: pod_fqdn.clone(), - port: KafkaTlsSecurity::SECURE_CLIENT_PORT.to_string(), + host: LISTENER_NODE_ADDRESS.to_string(), + port: node_port_cmd(STACKABLE_TMP_DIR, kafka_security.client_port_name()), }); listener_security_protocol_map - .insert(KafkaListenerName::Client, KafkaListenerProtocol::SaslSsl); + .insert(KafkaListenerName::Client, KafkaListenerProtocol::Ssl); } else { // 4) If no client auth or tls is required we expose CLIENT with PLAINTEXT listeners.push(KafkaListener { @@ -159,31 +159,31 @@ pub fn get_kafka_listener_config( } // INTERNAL - if kafka_security.tls_internal_secret_class().is_some() { - // 5) If internal tls is required we expose INTERNAL as SSL + if kafka.has_kerberos_enabled() { + // 5) Kerberos and TLS authentication classes are mutually exclusive and Kerberos takes preference listeners.push(KafkaListener { name: KafkaListenerName::Internal, host: LISTENER_LOCAL_ADDRESS.to_string(), - port: kafka_security.internal_port().to_string(), + port: KafkaTlsSecurity::SECURE_INTERNAL_PORT.to_string(), }); advertised_listeners.push(KafkaListener { name: KafkaListenerName::Internal, host: pod_fqdn.to_string(), - port: kafka_security.internal_port().to_string(), + port: KafkaTlsSecurity::SECURE_INTERNAL_PORT.to_string(), }); listener_security_protocol_map .insert(KafkaListenerName::Internal, KafkaListenerProtocol::Ssl); - } else if kafka.has_kerberos_enabled() { - // 6) Kerberos and TLS authentication classes are mutually exclusive + } else if kafka_security.tls_internal_secret_class().is_some() { + // 6) If internal tls is required we expose INTERNAL as SSL listeners.push(KafkaListener { name: KafkaListenerName::Internal, host: LISTENER_LOCAL_ADDRESS.to_string(), - port: KafkaTlsSecurity::SECURE_INTERNAL_PORT.to_string(), + port: kafka_security.internal_port().to_string(), }); advertised_listeners.push(KafkaListener { name: KafkaListenerName::Internal, host: pod_fqdn.to_string(), - port: KafkaTlsSecurity::SECURE_INTERNAL_PORT.to_string(), + port: kafka_security.internal_port().to_string(), }); listener_security_protocol_map .insert(KafkaListenerName::Internal, KafkaListenerProtocol::Ssl); diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index cae1ae5e..2d97145f 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -233,21 +233,31 @@ impl<'a> KafkaTlsSecurity<'a> { } /// Returns the commands for the kcat readiness probe. - pub fn kcat_prober_container_commands(&self) -> Vec { - let mut args = vec!["/stackable/kcat".to_string()]; + pub fn kcat_prober_container_commands(&self, pod_fqdn: &String) -> Vec { + let mut args = vec![]; let port = self.client_port(); if self.tls_client_authentication_class().is_some() { + args.push("/stackable/kcat".to_string()); args.push("-b".to_string()); args.push(format!("localhost:{}", port)); args.extend(Self::kcat_client_auth_ssl( Self::STACKABLE_TLS_CERT_SERVER_DIR, )); + } else if self.kafka.has_kerberos_enabled() { + let service_name = KafkaRole::Broker.kerberos_service_name(); + args.push("export KERBEROS_REALM=$(grep -oP 'default_realm = \\K.*' /stackable/kerberos/krb5.conf);".to_string()); + args.push("/stackable/kcat".to_string()); + args.push("-b".to_string()); + args.push(format!("{pod_fqdn}:{port}")); + args.extend(Self::kcat_client_sasl_ssl(Self::STACKABLE_TLS_CERT_SERVER_DIR, service_name, pod_fqdn)); } else if self.tls_server_secret_class().is_some() { + args.push("/stackable/kcat".to_string()); args.push("-b".to_string()); args.push(format!("localhost:{}", port)); args.extend(Self::kcat_client_ssl(Self::STACKABLE_TLS_CERT_SERVER_DIR)); } else { + args.push("/stackable/kcat".to_string()); args.push("-b".to_string()); args.push(format!("localhost:{}", port)); } @@ -278,7 +288,7 @@ impl<'a> KafkaTlsSecurity<'a> { create_vector_shutdown_file_command = create_vector_shutdown_file_command(STACKABLE_LOG_DIR), set_realm_env = match kerberos_enabled { - true => "KERBEROS_REALM=$(grep -oP 'default_realm = \\K.*' /stackable/kerberos/krb5.conf)", + true => "export KERBEROS_REALM=$(grep -oP 'default_realm = \\K.*' /stackable/kerberos/krb5.conf)", false => "", }, listeners = kafka_listeners.listeners(), @@ -291,7 +301,7 @@ impl<'a> KafkaTlsSecurity<'a> { jaas_config = match kerberos_enabled { true => { let service_name = KafkaRole::Broker.kerberos_service_name(); - format!(" --override \"listener.name.client.gssapi.sasl.jaas.config=com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true storeKey=true keyTab=\"/stackable/kerberos/keytab\" principal=\"{service_name}/{pod_fqdn}@$KERBEROS_REALM\";\"")}, + format!(" --override \"listener.name.client.gssapi.sasl.jaas.config=com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true storeKey=true keyTab=\\\"/stackable/kerberos/keytab\\\" principal=\\\"{service_name}/{pod_fqdn}@$KERBEROS_REALM\\\";\"")}, false => "".to_string(), }, }] @@ -522,4 +532,24 @@ impl<'a> KafkaTlsSecurity<'a> { format!("ssl.ca.location={cert_directory}/ca.crt"), ] } + + fn kcat_client_sasl_ssl( + cert_directory: &str, + service_name: &str, + pod_fqdn: &String) -> Vec { + vec![ + "-X".to_string(), + "security.protocol=SASL_SSL".to_string(), + "-X".to_string(), + format!("ssl.ca.location={cert_directory}/ca.crt"), + "-X".to_string(), + "sasl.kerberos.keytab=/stackable/kerberos/keytab".to_string(), + "-X".to_string(), + "sasl.mechanism=GSSAPI".to_string(), + "-X".to_string(), + format!("sasl.kerberos.service.name={service_name}"), + "-X".to_string(), + format!("sasl.kerberos.principal={service_name}/{pod_fqdn}@$KERBEROS_REALM"), + ] + } } diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index d96292fb..1ada8824 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -691,6 +691,9 @@ fn build_broker_rolegroup_config_map( })?, ); + tracing::debug!("Applied server config: [{:#?}]", server_cfg); + tracing::debug!("Applied JVM config: [{:#?}]", jvm_sec_props); + extend_role_group_config_map( rolegroup, vector_aggregator_address, @@ -943,6 +946,17 @@ fn build_broker_rolegroup_statefulset( cb_kcat_prober .image_from_product_image(resolved_product_image) .command(vec!["sleep".to_string(), "infinity".to_string()]) + .add_env_vars(vec![EnvVar { + name: "POD_NAME".to_string(), + value_from: Some(EnvVarSource { + field_ref: Some(ObjectFieldSelector { + api_version: Some("v1".to_string()), + field_path: "metadata.name".to_string(), + }), + ..EnvVarSource::default() + }), + ..EnvVar::default() + }]) .resources( ResourceRequirementsBuilder::new() .with_cpu_request("100m") @@ -956,7 +970,7 @@ fn build_broker_rolegroup_statefulset( .readiness_probe(Probe { exec: Some(ExecAction { // If the broker is able to get its fellow cluster members then it has at least completed basic registration at some point - command: Some(kafka_security.kcat_prober_container_commands()), + command: Some(kafka_security.kcat_prober_container_commands(&pod_fqdn)), }), timeout_seconds: Some(5), period_seconds: Some(2), diff --git a/rust/operator-binary/src/kerberos.rs b/rust/operator-binary/src/kerberos.rs index 323d43b5..e35704ba 100644 --- a/rust/operator-binary/src/kerberos.rs +++ b/rust/operator-binary/src/kerberos.rs @@ -43,9 +43,11 @@ pub fn add_kerberos_pod_config( ); cb_kcat_prober.add_volume_mount("kerberos", "/stackable/kerberos"); cb_kcat_prober.add_env_var("KRB5_CONFIG", "/stackable/kerberos/krb5.conf"); + cb_kcat_prober.add_env_var("KAFKA_OPTS", "-Djava.security.krb5.conf=/stackable/kerberos/krb5.conf"); cb_kafka.add_volume_mount("kerberos", "/stackable/kerberos"); cb_kafka.add_env_var("KRB5_CONFIG", "/stackable/kerberos/krb5.conf"); + cb_kafka.add_env_var("KAFKA_OPTS", "-Djava.security.krb5.conf=/stackable/kerberos/krb5.conf"); } Ok(()) diff --git a/tests/templates/kuttl/kerberos/00-assert.yaml.j2 b/tests/templates/kuttl/kerberos/00-assert.yaml.j2 new file mode 100644 index 00000000..50b1d4c3 --- /dev/null +++ b/tests/templates/kuttl/kerberos/00-assert.yaml.j2 @@ -0,0 +1,10 @@ +--- +apiVersion: kuttl.dev/v1beta1 +kind: TestAssert +{% if lookup('env', 'VECTOR_AGGREGATOR') %} +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: vector-aggregator-discovery +{% endif %} diff --git a/tests/templates/kuttl/kerberos/00-install-vector-aggregator-discovery-configmap.yaml.j2 b/tests/templates/kuttl/kerberos/00-install-vector-aggregator-discovery-configmap.yaml.j2 new file mode 100644 index 00000000..2d6a0df5 --- /dev/null +++ b/tests/templates/kuttl/kerberos/00-install-vector-aggregator-discovery-configmap.yaml.j2 @@ -0,0 +1,9 @@ +{% if lookup('env', 'VECTOR_AGGREGATOR') %} +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: vector-aggregator-discovery +data: + ADDRESS: {{ lookup('env', 'VECTOR_AGGREGATOR') }} +{% endif %} diff --git a/tests/templates/kuttl/kerberos/00-patch-ns.yaml.j2 b/tests/templates/kuttl/kerberos/00-patch-ns.yaml.j2 new file mode 100644 index 00000000..67185acf --- /dev/null +++ b/tests/templates/kuttl/kerberos/00-patch-ns.yaml.j2 @@ -0,0 +1,9 @@ +{% if test_scenario['values']['openshift'] == 'true' %} +# see https://github.com/stackabletech/issues/issues/566 +--- +apiVersion: kuttl.dev/v1beta1 +kind: TestStep +commands: + - script: kubectl patch namespace $NAMESPACE -p '{"metadata":{"labels":{"pod-security.kubernetes.io/enforce":"privileged"}}}' + timeout: 120 +{% endif %} diff --git a/tests/templates/kuttl/kerberos/00-rbac.yaml.j2 b/tests/templates/kuttl/kerberos/00-rbac.yaml.j2 new file mode 100644 index 00000000..7ee61d23 --- /dev/null +++ b/tests/templates/kuttl/kerberos/00-rbac.yaml.j2 @@ -0,0 +1,29 @@ +--- +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: test-role +rules: +{% if test_scenario['values']['openshift'] == "true" %} + - apiGroups: ["security.openshift.io"] + resources: ["securitycontextconstraints"] + resourceNames: ["privileged"] + verbs: ["use"] +{% endif %} +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: test-sa +--- +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: test-rb +subjects: + - kind: ServiceAccount + name: test-sa +roleRef: + kind: Role + name: test-role + apiGroup: rbac.authorization.k8s.io diff --git a/tests/templates/kuttl/kerberos/01-assert.yaml.j2 b/tests/templates/kuttl/kerberos/01-assert.yaml.j2 new file mode 100644 index 00000000..d34c1c63 --- /dev/null +++ b/tests/templates/kuttl/kerberos/01-assert.yaml.j2 @@ -0,0 +1,14 @@ +--- +apiVersion: kuttl.dev/v1beta1 +kind: TestAssert +timeout: 300 +{% if test_scenario['values']['kerberos-backend'] == 'mit' %} +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: krb5-kdc +status: + readyReplicas: 1 + replicas: 1 +{% endif %} diff --git a/tests/templates/kuttl/kerberos/01-install-krb5-kdc.yaml.j2 b/tests/templates/kuttl/kerberos/01-install-krb5-kdc.yaml.j2 new file mode 100644 index 00000000..fa04cc0d --- /dev/null +++ b/tests/templates/kuttl/kerberos/01-install-krb5-kdc.yaml.j2 @@ -0,0 +1,146 @@ +{% if test_scenario['values']['kerberos-backend'] == 'mit' %} +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: krb5-kdc +spec: + selector: + matchLabels: + app: krb5-kdc + template: + metadata: + labels: + app: krb5-kdc + spec: + serviceAccountName: test-sa + initContainers: + - name: init + image: docker.stackable.tech/stackable/krb5:{{ test_scenario['values']['krb5'] }}-stackable0.0.0-dev + args: + - sh + - -euo + - pipefail + - -c + - | + test -e /var/kerberos/krb5kdc/principal || kdb5_util create -s -P asdf + kadmin.local get_principal -terse root/admin || kadmin.local add_principal -pw asdf root/admin + # stackable-secret-operator principal must match the keytab specified in the SecretClass + kadmin.local get_principal -terse stackable-secret-operator || kadmin.local add_principal -e aes256-cts-hmac-sha384-192:normal -pw asdf stackable-secret-operator + env: + - name: KRB5_CONFIG + value: /stackable/config/krb5.conf + volumeMounts: + - mountPath: /stackable/config + name: config + - mountPath: /var/kerberos/krb5kdc + name: data + containers: + - name: kdc + image: docker.stackable.tech/stackable/krb5:{{ test_scenario['values']['krb5'] }}-stackable0.0.0-dev + args: + - krb5kdc + - -n + env: + - name: KRB5_CONFIG + value: /stackable/config/krb5.conf + volumeMounts: + - mountPath: /stackable/config + name: config + - mountPath: /var/kerberos/krb5kdc + name: data +# Root permissions required on Openshift to bind to privileged port numbers +{% if test_scenario['values']['openshift'] == "true" %} + securityContext: + runAsUser: 0 +{% endif %} + - name: kadmind + image: docker.stackable.tech/stackable/krb5:{{ test_scenario['values']['krb5'] }}-stackable0.0.0-dev + args: + - kadmind + - -nofork + env: + - name: KRB5_CONFIG + value: /stackable/config/krb5.conf + volumeMounts: + - mountPath: /stackable/config + name: config + - mountPath: /var/kerberos/krb5kdc + name: data +# Root permissions required on Openshift to bind to privileged port numbers +{% if test_scenario['values']['openshift'] == "true" %} + securityContext: + runAsUser: 0 +{% endif %} + - name: client + image: docker.stackable.tech/stackable/krb5:{{ test_scenario['values']['krb5'] }}-stackable0.0.0-dev + tty: true + stdin: true + env: + - name: KRB5_CONFIG + value: /stackable/config/krb5.conf + volumeMounts: + - mountPath: /stackable/config + name: config + volumes: + - name: config + configMap: + name: krb5-kdc + volumeClaimTemplates: + - metadata: + name: data + spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 1Gi +--- +apiVersion: v1 +kind: Service +metadata: + name: krb5-kdc +spec: + selector: + app: krb5-kdc + ports: + - name: kadmin + port: 749 + - name: kdc + port: 88 + - name: kdc-udp + port: 88 + protocol: UDP +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: krb5-kdc +data: + krb5.conf: | + [logging] + default = STDERR + kdc = STDERR + admin_server = STDERR + # default = FILE:/var/log/krb5libs.log + # kdc = FILE:/var/log/krb5kdc.log + # admin_server = FILE:/vaggr/log/kadmind.log + [libdefaults] + dns_lookup_realm = false + ticket_lifetime = 24h + renew_lifetime = 7d + forwardable = true + rdns = false + default_realm = {{ test_scenario['values']['kerberos-realm'] }} + spake_preauth_groups = edwards25519 + [realms] + {{ test_scenario['values']['kerberos-realm'] }} = { + acl_file = /stackable/config/kadm5.acl + disable_encrypted_timestamp = false + } + [domain_realm] + .cluster.local = {{ test_scenario['values']['kerberos-realm'] }} + cluster.local = {{ test_scenario['values']['kerberos-realm'] }} + kadm5.acl: | + root/admin *e + stackable-secret-operator *e +{% endif %} diff --git a/tests/templates/kuttl/kerberos/02-create-kerberos-secretclass.yaml.j2 b/tests/templates/kuttl/kerberos/02-create-kerberos-secretclass.yaml.j2 new file mode 100644 index 00000000..04ae9a63 --- /dev/null +++ b/tests/templates/kuttl/kerberos/02-create-kerberos-secretclass.yaml.j2 @@ -0,0 +1,72 @@ +--- +apiVersion: kuttl.dev/v1beta1 +kind: TestStep +commands: + - script: | + kubectl apply -n $NAMESPACE -f - < 0 }} + roleGroups: + default: + replicas: 1 diff --git a/tests/templates/kuttl/kerberos/20-assert.yaml b/tests/templates/kuttl/kerberos/20-assert.yaml new file mode 100644 index 00000000..01ba15d1 --- /dev/null +++ b/tests/templates/kuttl/kerberos/20-assert.yaml @@ -0,0 +1,12 @@ +--- +apiVersion: kuttl.dev/v1beta1 +kind: TestAssert +timeout: 600 +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: test-kafka-broker-default +status: + readyReplicas: 3 + replicas: 3 diff --git a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 new file mode 100644 index 00000000..6f286fc0 --- /dev/null +++ b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 @@ -0,0 +1,69 @@ +--- +apiVersion: kuttl.dev/v1beta1 +kind: TestStep +commands: + - script: | + kubectl apply -n $NAMESPACE -f - < 0 }} + roleGroups: + default: + replicas: 3 + EOF \ No newline at end of file diff --git a/tests/test-definition.yaml b/tests/test-definition.yaml index 4e995678..2584bb4b 100644 --- a/tests/test-definition.yaml +++ b/tests/test-definition.yaml @@ -34,6 +34,19 @@ dimensions: - name: openshift values: - "false" + - name: krb5 + values: + - 1.21.1 + - name: kerberos-realm + values: + - "CLUSTER.LOCAL" + - "PROD.MYCORP" + - name: kerberos-backend + values: + - mit + # Requires manual setup, see create-kerberos-secretclass.yaml + # This will *not* respect the kerberos-realm test attribute, but instead use a hard-coded realm + # - activeDirectory tests: - name: smoke dimensions: @@ -76,6 +89,14 @@ tests: - zookeeper-latest - kafka-latest - openshift + - name: kerberos + dimensions: + - kafka + - zookeeper-latest + - krb5 + - kerberos-realm + - kerberos-backend + - openshift suites: - name: nightly From ab47e06361bbed17e0e4a5f063d9ea4b1a4a02c4 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Fri, 20 Sep 2024 10:24:58 +0200 Subject: [PATCH 05/49] revert complex enum and use parallel struct (CRD decision pending) --- deploy/helm/kafka-operator/crds/crds.yaml | 53 +++++++++++------------ rust/crd/src/lib.rs | 50 ++++++++------------- rust/crd/src/security.rs | 21 +-------- 3 files changed, 46 insertions(+), 78 deletions(-) diff --git a/deploy/helm/kafka-operator/crds/crds.yaml b/deploy/helm/kafka-operator/crds/crds.yaml index 32b17959..68d26684 100644 --- a/deploy/helm/kafka-operator/crds/crds.yaml +++ b/deploy/helm/kafka-operator/crds/crds.yaml @@ -544,37 +544,24 @@ spec: description: Kafka settings that affect all roles and role groups. The settings in the `clusterConfig` are cluster wide settings that do not need to be configurable at role or role group level. properties: authentication: - anyOf: - - items: - properties: - authenticationClass: - description: |- - The AuthenticationClass to use. + default: [] + description: Authentication class settings for Kafka like mTLS authentication. + items: + properties: + authenticationClass: + description: |- + The AuthenticationClass to use. - ## TLS provider + ## TLS provider - Only affects client connections. This setting controls: - If clients need to authenticate themselves against the broker via TLS - Which ca.crt to use when validating the provided client certs + Only affects client connections. This setting controls: - If clients need to authenticate themselves against the broker via TLS - Which ca.crt to use when validating the provided client certs - This will override the server TLS settings (if set) in `spec.clusterConfig.tls.serverSecretClass`. - type: string - required: - - authenticationClass - type: object - type: array - - required: - - kerberos - description: Authentication class settings for Kafka like mTLS authentication or Kerberos secret name. - nullable: true - properties: - kerberos: - properties: - secretClass: - description: Name of the SecretClass providing the keytab for the Kafka services. - type: string - required: - - secretClass - type: object - type: object + This will override the server TLS settings (if set) in `spec.clusterConfig.tls.serverSecretClass`. + type: string + required: + - authenticationClass + type: object + type: array authorization: default: opa: null @@ -595,6 +582,16 @@ spec: - configMapName type: object type: object + kerberos: + description: Struct containing Kerberos secret name. + nullable: true + properties: + secretClass: + description: Name of the SecretClass providing the keytab for the Kafka services. + type: string + required: + - secretClass + type: object tls: default: internalSecretClass: tls diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index 75970e6d..5ea90170 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -132,8 +132,12 @@ pub struct KafkaClusterSpec { #[derive(Clone, Deserialize, Debug, Eq, JsonSchema, PartialEq, Serialize)] #[serde(rename_all = "camelCase")] pub struct KafkaClusterConfig { - /// Authentication class settings for Kafka like mTLS authentication or Kerberos secret name. - pub authentication: Option, + /// Authentication class settings for Kafka like mTLS authentication. + #[serde(default)] + pub authentication: Vec, + + /// Struct containing Kerberos secret name. + pub kerberos: Option, /// Authorization settings for Kafka like OPA. #[serde(default)] @@ -160,13 +164,6 @@ pub struct KafkaClusterConfig { pub zookeeper_config_map_name: String, } -#[derive(Clone, Debug, Deserialize, Display, JsonSchema, Eq, PartialEq, Serialize)] -#[serde(untagged)] -pub enum KafkaAuthenticationMethod { - AuthenticationClasses(Vec), - KerberosAuthentication(AuthenticationConfig), -} - #[derive(Clone, Debug, Deserialize, Eq, Hash, JsonSchema, PartialEq, Serialize)] #[serde(rename_all = "camelCase")] pub struct AuthenticationConfig { @@ -291,13 +288,8 @@ impl KafkaCluster { } pub fn kerberos_secret_class(&self) -> Option { - if let Some(authentication) = self.spec.cluster_config.authentication.clone() { - match authentication { - KafkaAuthenticationMethod::AuthenticationClasses(_) => None, - KafkaAuthenticationMethod::KerberosAuthentication(kerberos_authentication) => { - Some(kerberos_authentication.kerberos.secret_class) - } - } + if let Some(kerberos) = self.spec.cluster_config.kerberos.clone() { + Some(kerberos.secret_class) } else { None } @@ -532,6 +524,7 @@ impl Configuration for KafkaConfigFragment { "sasl.mechanism.inter.broker.protocol".to_string(), Some("GSSAPI".to_string()), ); + tracing::debug!("Kerberos configs added: [{:#?}]", config); } } @@ -741,23 +734,18 @@ mod tests { image: productVersion: 3.7.1 clusterConfig: - authentication: - kerberos: - secretClass: kafka-kerberos + kerberos: + secretClass: kafka-kerberos zookeeperConfigMapName: xyz "#; let kafka: KafkaCluster = serde_yaml::from_str(kafka_cluster).expect("illegal test input"); println!("{:#?}", kafka); assert_eq!( - Some(KafkaAuthenticationMethod::KerberosAuthentication( - AuthenticationConfig { - kerberos: KerberosConfig { - secret_class: "kafka-kerberos".to_string() - } - } - )), - kafka.spec.cluster_config.authentication + Some(KerberosConfig { + secret_class: "kafka-kerberos".to_string() + }), + kafka.spec.cluster_config.kerberos ); } @@ -786,14 +774,14 @@ mod tests { println!("{:#?}", kafka); assert_eq!( - Some(KafkaAuthenticationMethod::AuthenticationClasses(vec![ + vec![ KafkaAuthenticationClass { authentication_class: "kafka-client-tls1".to_string() }, KafkaAuthenticationClass { authentication_class: "kafka-client-tls2".to_string() - } - ])), + }, + ], kafka.spec.cluster_config.authentication ); } @@ -819,6 +807,6 @@ mod tests { println!("{:#?}", kafka); - assert_eq!(None, kafka.spec.cluster_config.authentication); + assert_eq!(0, kafka.spec.cluster_config.authentication.len()); } } diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index 2d97145f..42870a2a 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -23,15 +23,12 @@ use stackable_operator::{ utils::COMMON_BASH_TRAP_FUNCTIONS, }; -use crate::{ - authentication::KafkaAuthenticationClass, KafkaAuthenticationMethod, KafkaRole, - STACKABLE_LOG_DIR, -}; use crate::{ authentication::{self, ResolvedAuthenticationClasses}, listener::{self, KafkaListenerConfig}, tls, KafkaCluster, SERVER_PROPERTIES_FILE, STACKABLE_CONFIG_DIR, STACKABLE_TMP_DIR, }; +use crate::{KafkaRole, STACKABLE_LOG_DIR}; #[derive(Snafu, Debug)] pub enum Error { @@ -129,25 +126,11 @@ impl<'a> KafkaTlsSecurity<'a> { client: &Client, kafka: &'a KafkaCluster, ) -> Result { - let kafka_authentication_classes: Vec = - if let Some(authentication) = &kafka.spec.cluster_config.authentication { - match authentication { - KafkaAuthenticationMethod::AuthenticationClasses(auth_classes) => { - auth_classes.to_vec() - } - KafkaAuthenticationMethod::KerberosAuthentication(_) => { - vec![] - } - } - } else { - vec![] - }; - Ok(KafkaTlsSecurity { kafka, resolved_authentication_classes: ResolvedAuthenticationClasses::from_references( client, - &kafka_authentication_classes, + &kafka.spec.cluster_config.authentication, ) .await .context(InvalidAuthenticationClassConfigurationSnafu)?, From 4a855c28dc70439a8c4230a075eb771bd27832e0 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Fri, 20 Sep 2024 10:33:28 +0200 Subject: [PATCH 06/49] call shell explicitly for kerberos probe to allow variable substitution --- rust/crd/src/security.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index 42870a2a..1339ad75 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -229,6 +229,13 @@ impl<'a> KafkaTlsSecurity<'a> { )); } else if self.kafka.has_kerberos_enabled() { let service_name = KafkaRole::Broker.kerberos_service_name(); + // here we need to specify a shell so that variable substitution will work + // see e.g. https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1ExecAction.md + args.push("/bin/bash".to_string()); + args.push("-x".to_string()); + args.push("-euo".to_string()); + args.push("pipefail".to_string()); + args.push("-c".to_string()); args.push("export KERBEROS_REALM=$(grep -oP 'default_realm = \\K.*' /stackable/kerberos/krb5.conf);".to_string()); args.push("/stackable/kcat".to_string()); args.push("-b".to_string()); From 60489062d12f867e006fbebf4514188b892aed34 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Fri, 20 Sep 2024 13:13:46 +0200 Subject: [PATCH 07/49] working test --- .../kuttl/kerberos/30-access-kafka.txt.j2 | 117 ++++++++++++++++++ .../kuttl/kerberos/30-access-kafka.yaml.j2 | 6 + tests/templates/kuttl/kerberos/30-assert.yaml | 11 ++ 3 files changed, 134 insertions(+) create mode 100644 tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 create mode 100644 tests/templates/kuttl/kerberos/30-access-kafka.yaml.j2 create mode 100644 tests/templates/kuttl/kerberos/30-assert.yaml diff --git a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 new file mode 100644 index 00000000..2b68af37 --- /dev/null +++ b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 @@ -0,0 +1,117 @@ +--- +apiVersion: batch/v1 +kind: Job +metadata: + name: access-kafka +spec: + template: + spec: + serviceAccountName: test-sa + containers: + - name: access-kafka + image: docker.stackable.tech/apoc/stackable/kafka:3.7.1-stackable0.0.0-cyrus-sasl-gssapi + command: + - /bin/bash + - /tmp/script/script.sh + env: + - name: KRB5_CONFIG + value: /stackable/kerberos/krb5.conf + - name: KAFKA_OPTS + value: -Djava.security.krb5.conf=/stackable/kerberos/krb5.conf + volumeMounts: + - name: script + mountPath: /tmp/script + - mountPath: /stackable/tls_cert_server_mount + name: tls-cert-server-mount + - name: kerberos + mountPath: /stackable/kerberos + volumes: + - name: script + configMap: + name: access-kafka-script + - name: kerberos + ephemeral: + volumeClaimTemplate: + metadata: + annotations: + secrets.stackable.tech/class: kerberos-$NAMESPACE + secrets.stackable.tech/scope: service=access-kafka + secrets.stackable.tech/kerberos.service.names: developer + spec: + storageClassName: secrets.stackable.tech + accessModes: + - ReadWriteOnce + resources: + requests: + storage: "1" + - name: tls-cert-server-mount + ephemeral: + volumeClaimTemplate: + metadata: + annotations: + secrets.stackable.tech/class: tls + secrets.stackable.tech/scope: pod,node,service=kafka + creationTimestamp: null + spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: "1" + storageClassName: secrets.stackable.tech + volumeMode: Filesystem + securityContext: + fsGroup: 1000 + runAsGroup: 1000 + runAsUser: 1000 + restartPolicy: OnFailure +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: access-kafka-script +data: + script.sh: | + set -euxo pipefail + + export KCAT_CONFIG=/stackable/kcat.conf + BROKER=test-kafka-broker-default-0.test-kafka-broker-default.$NAMESPACE.svc.cluster.local:9093,test-kafka-broker-default-1.test-kafka-broker-default.$NAMESPACE.svc.cluster.local:9093,test-kafka-broker-default-2.test-kafka-broker-default.$NAMESPACE.svc.cluster.local:9093 + TOPIC=test-topic + CONSUMER_GROUP=test-consumer-group + + echo -e -n "\ + metadata.broker.list=$BROKER\n\ + auto.offset.reset=beginning\n\ + security.protocol=SASL_SSL\n\ + ssl.ca.location=/stackable/tls_cert_server_mount/ca.crt\n\ + sasl.kerberos.keytab=/stackable/kerberos/keytab\n\ + sasl.kerberos.service.name=kafka\n\ + sasl.kerberos.principal=developer/access-kafka.$NAMESPACE.svc.cluster.local@{{ test_scenario['values']['kerberos-realm'] }}\n\ + sasl.mechanism=GSSAPI\n\ + " > $KCAT_CONFIG + + cat $KCAT_CONFIG + + sent_message="Hello Stackable!" + + echo $sent_message | kcat \ + -t $TOPIC \ + -P + + echo Sent message: \"$sent_message\" + + received_message=$(kcat \ + -G $CONSUMER_GROUP \ + -o stored \ + -e \ + $TOPIC) + + echo Received message: \"$received_message\" + + if [ "$received_message" = "$sent_message" ]; then + echo "Test passed" + exit 0 + else + echo "Test failed" + exit 1 + fi diff --git a/tests/templates/kuttl/kerberos/30-access-kafka.yaml.j2 b/tests/templates/kuttl/kerberos/30-access-kafka.yaml.j2 new file mode 100644 index 00000000..eecc0f08 --- /dev/null +++ b/tests/templates/kuttl/kerberos/30-access-kafka.yaml.j2 @@ -0,0 +1,6 @@ +--- +apiVersion: kuttl.dev/v1beta1 +kind: TestStep +commands: + # We need to replace $NAMESPACE (by KUTTL) + - script: envsubst '$NAMESPACE' < 30-access-kafka.txt | kubectl apply -n $NAMESPACE -f - diff --git a/tests/templates/kuttl/kerberos/30-assert.yaml b/tests/templates/kuttl/kerberos/30-assert.yaml new file mode 100644 index 00000000..3f5be93e --- /dev/null +++ b/tests/templates/kuttl/kerberos/30-assert.yaml @@ -0,0 +1,11 @@ +--- +apiVersion: kuttl.dev/v1beta1 +kind: TestAssert +timeout: 600 +--- +apiVersion: batch/v1 +kind: Job +metadata: + name: access-kafka +status: + succeeded: 1 \ No newline at end of file From 35183eac485fc23dd496c38a11dd396766419bcb Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Fri, 20 Sep 2024 13:34:59 +0200 Subject: [PATCH 08/49] revert class name change and formatting --- rust/crd/src/authentication.rs | 4 ++-- rust/crd/src/lib.rs | 8 ++++---- rust/crd/src/security.rs | 11 ++++++++--- rust/operator-binary/src/kafka_controller.rs | 4 ++-- rust/operator-binary/src/kerberos.rs | 10 ++++++++-- 5 files changed, 24 insertions(+), 13 deletions(-) diff --git a/rust/crd/src/authentication.rs b/rust/crd/src/authentication.rs index 4fd15ad6..04932eed 100644 --- a/rust/crd/src/authentication.rs +++ b/rust/crd/src/authentication.rs @@ -32,7 +32,7 @@ pub enum Error { #[derive(Clone, Deserialize, Debug, Eq, JsonSchema, PartialEq, Serialize)] #[serde(rename_all = "camelCase")] -pub struct KafkaAuthenticationClass { +pub struct KafkaAuthentication { /// The AuthenticationClass to use. /// /// ## TLS provider @@ -64,7 +64,7 @@ impl ResolvedAuthenticationClasses { /// - Validation failed pub async fn from_references( client: &Client, - auth_classes: &Vec, + auth_classes: &Vec, ) -> Result { let mut resolved_authentication_classes: Vec = vec![]; diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index 5ea90170..b38afbef 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -9,7 +9,7 @@ use crate::authorization::KafkaAuthorization; use crate::tls::KafkaTls; use affinity::get_affinity; -use authentication::KafkaAuthenticationClass; +use authentication::KafkaAuthentication; use serde::{Deserialize, Serialize}; use snafu::{OptionExt, ResultExt, Snafu}; use stackable_operator::{ @@ -134,7 +134,7 @@ pub struct KafkaClusterSpec { pub struct KafkaClusterConfig { /// Authentication class settings for Kafka like mTLS authentication. #[serde(default)] - pub authentication: Vec, + pub authentication: Vec, /// Struct containing Kerberos secret name. pub kerberos: Option, @@ -775,10 +775,10 @@ mod tests { assert_eq!( vec![ - KafkaAuthenticationClass { + KafkaAuthentication { authentication_class: "kafka-client-tls1".to_string() }, - KafkaAuthenticationClass { + KafkaAuthentication { authentication_class: "kafka-client-tls2".to_string() }, ], diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index 1339ad75..6893e8e2 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -240,7 +240,11 @@ impl<'a> KafkaTlsSecurity<'a> { args.push("/stackable/kcat".to_string()); args.push("-b".to_string()); args.push(format!("{pod_fqdn}:{port}")); - args.extend(Self::kcat_client_sasl_ssl(Self::STACKABLE_TLS_CERT_SERVER_DIR, service_name, pod_fqdn)); + args.extend(Self::kcat_client_sasl_ssl( + Self::STACKABLE_TLS_CERT_SERVER_DIR, + service_name, + pod_fqdn, + )); } else if self.tls_server_secret_class().is_some() { args.push("/stackable/kcat".to_string()); args.push("-b".to_string()); @@ -524,9 +528,10 @@ impl<'a> KafkaTlsSecurity<'a> { } fn kcat_client_sasl_ssl( - cert_directory: &str, + cert_directory: &str, service_name: &str, - pod_fqdn: &String) -> Vec { + pod_fqdn: &String, + ) -> Vec { vec![ "-X".to_string(), "security.protocol=SASL_SSL".to_string(), diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index 1ada8824..12931daf 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -691,8 +691,8 @@ fn build_broker_rolegroup_config_map( })?, ); - tracing::debug!("Applied server config: [{:#?}]", server_cfg); - tracing::debug!("Applied JVM config: [{:#?}]", jvm_sec_props); + tracing::debug!("Applied server config: [{:#?}]", server_cfg); + tracing::debug!("Applied JVM config: [{:#?}]", jvm_sec_props); extend_role_group_config_map( rolegroup, diff --git a/rust/operator-binary/src/kerberos.rs b/rust/operator-binary/src/kerberos.rs index e35704ba..5a792da0 100644 --- a/rust/operator-binary/src/kerberos.rs +++ b/rust/operator-binary/src/kerberos.rs @@ -43,11 +43,17 @@ pub fn add_kerberos_pod_config( ); cb_kcat_prober.add_volume_mount("kerberos", "/stackable/kerberos"); cb_kcat_prober.add_env_var("KRB5_CONFIG", "/stackable/kerberos/krb5.conf"); - cb_kcat_prober.add_env_var("KAFKA_OPTS", "-Djava.security.krb5.conf=/stackable/kerberos/krb5.conf"); + cb_kcat_prober.add_env_var( + "KAFKA_OPTS", + "-Djava.security.krb5.conf=/stackable/kerberos/krb5.conf", + ); cb_kafka.add_volume_mount("kerberos", "/stackable/kerberos"); cb_kafka.add_env_var("KRB5_CONFIG", "/stackable/kerberos/krb5.conf"); - cb_kafka.add_env_var("KAFKA_OPTS", "-Djava.security.krb5.conf=/stackable/kerberos/krb5.conf"); + cb_kafka.add_env_var( + "KAFKA_OPTS", + "-Djava.security.krb5.conf=/stackable/kerberos/krb5.conf", + ); } Ok(()) From f2dd95093e2b6138dabbd030caa3308c8651dc45 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Fri, 20 Sep 2024 15:08:40 +0200 Subject: [PATCH 09/49] added validation --- rust/crd/src/lib.rs | 26 ++++++++++++++------ rust/operator-binary/src/kafka_controller.rs | 8 ++++++ 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index b38afbef..b037ce48 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -92,6 +92,9 @@ pub enum Error { #[snafu(display("fragment validation failure"))] FragmentValidationFailure { source: ValidationError }, + + #[snafu(display("only one authentication method is possible, TLS or Kerberos"))] + MultipleAuthenticationMethodsProvided, } /// A Kafka cluster stacklet. This resource is managed by the Stackable operator for Apache Kafka. @@ -294,6 +297,14 @@ impl KafkaCluster { None } } + + pub fn validate_authentication_methods(&self) -> Result<(), Error> { + // TLS authentication and Kerberos authentication are mutually exclusive + if !self.spec.cluster_config.authentication.is_empty() && self.has_kerberos_enabled() { + return Err(Error::MultipleAuthenticationMethodsProvided); + } + Ok(()) + } } /// Reference to a single `Pod` that is a component of a [`KafkaCluster`] @@ -739,7 +750,6 @@ mod tests { zookeeperConfigMapName: xyz "#; let kafka: KafkaCluster = serde_yaml::from_str(kafka_cluster).expect("illegal test input"); - println!("{:#?}", kafka); assert_eq!( Some(KerberosConfig { @@ -771,8 +781,6 @@ mod tests { "#; let kafka: KafkaCluster = serde_yaml::from_str(kafka_cluster).expect("illegal test input"); - println!("{:#?}", kafka); - assert_eq!( vec![ KafkaAuthentication { @@ -787,7 +795,7 @@ mod tests { } #[test] - fn test_get_auth_none() { + fn test_get_auth_multiple() { let kafka_cluster = r#" apiVersion: kafka.stackable.tech/v1alpha1 kind: KafkaCluster @@ -798,6 +806,10 @@ mod tests { image: productVersion: 3.7.1 clusterConfig: + authentication: + - authenticationClass: kafka-client-tls1 + kerberos: + secretClass: kafka-kerberos tls: internalSecretClass: internalTls serverSecretClass: tls @@ -805,8 +817,8 @@ mod tests { "#; let kafka: KafkaCluster = serde_yaml::from_str(kafka_cluster).expect("illegal test input"); - println!("{:#?}", kafka); - - assert_eq!(0, kafka.spec.cluster_config.authentication.len()); + assert_eq!(1, kafka.spec.cluster_config.authentication.len()); + let validation = &kafka.validate_authentication_methods(); + assert!(validation.is_err()); } } diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index 12931daf..8e8339af 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -316,6 +316,9 @@ pub enum Error { #[snafu(display("failed to add kerberos config"))] AddKerberosConfig { source: kerberos::Error }, + + #[snafu(display("failed to validate authentication method"))] + FailedToValidateAuthenticationMethod { source: stackable_kafka_crd::Error }, } type Result = std::result::Result; @@ -375,6 +378,7 @@ impl ReconcilerError for Error { Error::AddVolumesAndVolumeMounts { .. } => None, Error::ResolveNamespace { .. } => None, Error::AddKerberosConfig { .. } => None, + Error::FailedToValidateAuthenticationMethod { .. } => None, } } } @@ -426,6 +430,10 @@ pub async fn reconcile_kafka(kafka: Arc, ctx: Arc) -> Result< .map(Cow::Borrowed) .unwrap_or_default(); + kafka + .validate_authentication_methods() + .context(FailedToValidateAuthenticationMethodSnafu)?; + let kafka_security = KafkaTlsSecurity::new_from_kafka_cluster(client, &kafka) .await .context(FailedToInitializeSecurityContextSnafu)?; From 33b3ab96bb70fa7cc9ce6cd0c6d7664e65ef491f Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Fri, 20 Sep 2024 16:26:31 +0200 Subject: [PATCH 10/49] linting --- tests/templates/kuttl/kerberos/30-assert.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/templates/kuttl/kerberos/30-assert.yaml b/tests/templates/kuttl/kerberos/30-assert.yaml index 3f5be93e..edc6c317 100644 --- a/tests/templates/kuttl/kerberos/30-assert.yaml +++ b/tests/templates/kuttl/kerberos/30-assert.yaml @@ -8,4 +8,4 @@ kind: Job metadata: name: access-kafka status: - succeeded: 1 \ No newline at end of file + succeeded: 1 From 6bcff9440a1915680c9a3b7cc0b591f22e878401 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Fri, 20 Sep 2024 16:41:07 +0200 Subject: [PATCH 11/49] more linting --- tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 index 6f286fc0..79e20f61 100644 --- a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 +++ b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 @@ -66,4 +66,4 @@ commands: roleGroups: default: replicas: 3 - EOF \ No newline at end of file + EOF From e43e4fbfc1bf3a3b47f6fdb13d17a5966ed5c324 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Fri, 27 Sep 2024 09:51:10 +0200 Subject: [PATCH 12/49] refactor: add kerberos to list of authentication classes instead of dedicated struct --- Cargo.lock | 4 +- Cargo.nix | 160 +++++++++--------- Cargo.toml | 5 +- crate-hashes.json | 4 +- deploy/helm/kafka-operator/crds/crds.yaml | 12 +- rust/crd/src/authentication.rs | 25 ++- rust/crd/src/lib.rs | 109 ------------ rust/crd/src/listener.rs | 11 +- rust/crd/src/security.rs | 47 ++++- rust/operator-binary/src/kafka_controller.rs | 27 ++- rust/operator-binary/src/kerberos.rs | 5 +- .../kuttl/kerberos/20-install-kafka.yaml.j2 | 16 +- .../kuttl/kerberos/30-access-kafka.txt.j2 | 2 +- 13 files changed, 201 insertions(+), 226 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9b4a0f11..8728c045 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2161,7 +2161,7 @@ dependencies = [ [[package]] name = "stackable-operator" version = "0.76.0" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.76.0#a7e70f174fb043a1766e0a80de95834cb4f7513d" +source = "git+https://github.com/stackabletech//operator-rs.git?branch=chore/add-kerberos-auth-prov#61c45e176504ee3ea3a5ae9036dd73be8b68d416" dependencies = [ "chrono", "clap", @@ -2197,7 +2197,7 @@ dependencies = [ [[package]] name = "stackable-operator-derive" version = "0.3.1" -source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.76.0#a7e70f174fb043a1766e0a80de95834cb4f7513d" +source = "git+https://github.com/stackabletech//operator-rs.git?branch=chore/add-kerberos-auth-prov#61c45e176504ee3ea3a5ae9036dd73be8b68d416" dependencies = [ "darling", "proc-macro2", diff --git a/Cargo.nix b/Cargo.nix index 922fac35..001072a4 100644 --- a/Cargo.nix +++ b/Cargo.nix @@ -446,7 +446,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "full" "visit-mut" ]; } ]; @@ -473,7 +473,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; usesDefaultFeatures = false; features = [ "full" "visit-mut" "parsing" "printing" "proc-macro" ]; } @@ -884,10 +884,10 @@ rec { }; "clap" = rec { crateName = "clap"; - version = "4.5.16"; + version = "4.5.17"; edition = "2021"; crateBin = []; - sha256 = "068hjwbrndn4iz4fsc6d52q4ymg1kfsymjnqbxzdil23zbzijrzd"; + sha256 = "1b7nx9wsv5kc4n02ql4ca6p7pjh1n94rjk1n7hdjywsy96w22niy"; dependencies = [ { name = "clap_builder"; @@ -925,9 +925,9 @@ rec { }; "clap_builder" = rec { crateName = "clap_builder"; - version = "4.5.15"; + version = "4.5.17"; edition = "2021"; - sha256 = "1dmas5z20yqmlmfhykr38pn1hkcnr4jzxjw4cs2f6lkn2wmyqsi1"; + sha256 = "0wqsnajayxqpmqflqiqgpix2m8jrjwj6nayssvcpl13smw9dvwlc"; dependencies = [ { name = "anstream"; @@ -983,7 +983,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "full" ]; } ]; @@ -1034,9 +1034,9 @@ rec { }; "const_format" = rec { crateName = "const_format"; - version = "0.2.32"; + version = "0.2.33"; edition = "2021"; - sha256 = "0wvns8mzqwkyciwr00p2g5g4ak7zz8m473di85srj11xmz3i98p3"; + sha256 = "0jx6ffj65prbx1w9664ddwm73k7hm5g25afyvjq4y4gi3zc5bijh"; authors = [ "rodrimati1992 " ]; @@ -1066,9 +1066,9 @@ rec { }; "const_format_proc_macros" = rec { crateName = "const_format_proc_macros"; - version = "0.2.32"; + version = "0.2.33"; edition = "2021"; - sha256 = "0015dzbjbd773nn6096dwqv11fm8m3gy4a4a56cz5x10zl4gzxn7"; + sha256 = "1c8f7xh2b2kca9hlzvjplcdkvr4iqpsk4yd236n1nyzljd5s9wgg"; procMacro = true; authors = [ "rodrimati1992 " @@ -1287,7 +1287,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "full" "extra-traits" ]; } ]; @@ -1317,16 +1317,16 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; } ]; }; "delegate" = rec { crateName = "delegate"; - version = "0.12.0"; + version = "0.13.0"; edition = "2018"; - sha256 = "16zpkc6v2ss5qivwx7p7vb1bjnb6s0p7kkifaqkgyl7bpv68y0af"; + sha256 = "0mh98via297c51dq7b2yb0fq7gxlgb8px2iz4w3sjgxpxc7vnq2h"; procMacro = true; authors = [ "Godfrey Chan " @@ -1343,7 +1343,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "full" "visit-mut" ]; } ]; @@ -1888,7 +1888,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "full" ]; } ]; @@ -3227,10 +3227,10 @@ rec { }; "k8s-openapi" = rec { crateName = "k8s-openapi"; - version = "0.22.0"; + version = "0.23.0"; edition = "2021"; - links = "k8s-openapi-0.22.0"; - sha256 = "0mcpy1gdxpqayc74i3x3ayh3q5bl6dzvsgmw91jq1r9sjkxill0r"; + links = "k8s-openapi-0.23.0"; + sha256 = "04qv2iqwm3mgjvyp2m6n3vf6nnpjh5a60kf9ah9k1n184d04g24w"; libName = "k8s_openapi"; authors = [ "Arnav Singh " @@ -3272,17 +3272,17 @@ rec { } ]; features = { - "earliest" = [ "v1_24" ]; - "latest" = [ "v1_30" ]; + "earliest" = [ "v1_26" ]; + "latest" = [ "v1_31" ]; "schemars" = [ "dep:schemars" ]; }; - resolvedDefaultFeatures = [ "schemars" "v1_30" ]; + resolvedDefaultFeatures = [ "schemars" "v1_31" ]; }; "kube" = rec { crateName = "kube"; - version = "0.93.1"; + version = "0.95.0"; edition = "2021"; - sha256 = "05k4wwb1f1hs0izn4hnyaw0l1wljqh0sh74wc4ijs6mgfl094r83"; + sha256 = "1y04cqn7ami9rs6csjsf63c4clm5zyvyx3bzbmyaf828hly0c8gs"; authors = [ "clux " "Natalie Klestrup Röijezon " @@ -3325,34 +3325,35 @@ rec { ]; features = { "admission" = [ "kube-core/admission" ]; + "aws-lc-rs" = [ "kube-client?/aws-lc-rs" ]; "client" = [ "kube-client/client" "config" ]; "config" = [ "kube-client/config" ]; "default" = [ "client" "rustls-tls" ]; "derive" = [ "kube-derive" "kube-core/schema" ]; - "gzip" = [ "kube-client/gzip" ]; - "http-proxy" = [ "kube-client/http-proxy" ]; + "gzip" = [ "kube-client/gzip" "client" ]; + "http-proxy" = [ "kube-client/http-proxy" "client" ]; "jsonpatch" = [ "kube-core/jsonpatch" ]; "kube-client" = [ "dep:kube-client" ]; "kube-derive" = [ "dep:kube-derive" ]; "kube-runtime" = [ "dep:kube-runtime" ]; "kubelet-debug" = [ "kube-client/kubelet-debug" "kube-core/kubelet-debug" ]; - "oauth" = [ "kube-client/oauth" ]; - "oidc" = [ "kube-client/oidc" ]; - "openssl-tls" = [ "kube-client/openssl-tls" ]; + "oauth" = [ "kube-client/oauth" "client" ]; + "oidc" = [ "kube-client/oidc" "client" ]; + "openssl-tls" = [ "kube-client/openssl-tls" "client" ]; "runtime" = [ "kube-runtime" ]; - "rustls-tls" = [ "kube-client/rustls-tls" ]; - "socks5" = [ "kube-client/socks5" ]; - "unstable-client" = [ "kube-client/unstable-client" ]; - "unstable-runtime" = [ "kube-runtime/unstable-runtime" ]; + "rustls-tls" = [ "kube-client/rustls-tls" "client" ]; + "socks5" = [ "kube-client/socks5" "client" ]; + "unstable-client" = [ "kube-client/unstable-client" "client" ]; + "unstable-runtime" = [ "kube-runtime/unstable-runtime" "runtime" ]; "ws" = [ "kube-client/ws" "kube-core/ws" ]; }; resolvedDefaultFeatures = [ "client" "config" "derive" "jsonpatch" "kube-client" "kube-derive" "kube-runtime" "runtime" "rustls-tls" ]; }; "kube-client" = rec { crateName = "kube-client"; - version = "0.93.1"; + version = "0.95.0"; edition = "2021"; - sha256 = "00w69ym31mzdi7kmxq1cjjnjps92cvb7mad5jw6a842v7bmkc4yq"; + sha256 = "1dwvvd37psgzcaqv87lppx2yrmdb8wggx9ki1s8132lxbigkbhii"; libName = "kube_client"; authors = [ "clux " @@ -3543,6 +3544,7 @@ rec { features = { "__non_core" = [ "tracing" "serde_yaml" "base64" ]; "admission" = [ "kube-core/admission" ]; + "aws-lc-rs" = [ "rustls?/aws-lc-rs" ]; "base64" = [ "dep:base64" ]; "bytes" = [ "dep:bytes" ]; "chrono" = [ "dep:chrono" ]; @@ -3591,9 +3593,9 @@ rec { }; "kube-core" = rec { crateName = "kube-core"; - version = "0.93.1"; + version = "0.95.0"; edition = "2021"; - sha256 = "1rhg99v7w5qrj3ikh6n0pbbhp1kj6q7spvydcf846zbq9nkp7qyc"; + sha256 = "0rp6n4k4b1ai6ghxqci72wy67kxrgn6x9rs70ajl9dcx3kchn0zk"; libName = "kube_core"; authors = [ "clux " @@ -3635,6 +3637,10 @@ rec { packageId = "serde"; features = [ "derive" ]; } + { + name = "serde-value"; + packageId = "serde-value"; + } { name = "serde_json"; packageId = "serde_json"; @@ -3664,9 +3670,9 @@ rec { }; "kube-derive" = rec { crateName = "kube-derive"; - version = "0.93.1"; + version = "0.95.0"; edition = "2021"; - sha256 = "1wkqxfjxx4lc8gapd44kylprwgxhk29cwpdy55ri47kr8jc6r8h4"; + sha256 = "09qcgysprgrdhl838y28xfvvyrak0x367s5879vhmmyxisbvx67s"; procMacro = true; libName = "kube_derive"; authors = [ @@ -3693,7 +3699,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "extra-traits" ]; } ]; @@ -3701,9 +3707,9 @@ rec { }; "kube-runtime" = rec { crateName = "kube-runtime"; - version = "0.93.1"; + version = "0.95.0"; edition = "2021"; - sha256 = "197zjakpach42n6s62qdh0g6qrs8cawzyhqb474qaq7d1wy7711v"; + sha256 = "053r458alv2gk2q7vb8nbycw4d1bclsvja7i10j95b21ls5cp5aq"; libName = "kube_runtime"; authors = [ "clux " @@ -4788,7 +4794,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; } ]; features = { @@ -4861,7 +4867,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "full" "visit-mut" ]; } ]; @@ -5498,7 +5504,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "full" "parsing" "extra-traits" "visit" "visit-mut" ]; } { @@ -5863,7 +5869,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "extra-traits" ]; } ]; @@ -6003,9 +6009,9 @@ rec { }; "serde" = rec { crateName = "serde"; - version = "1.0.209"; + version = "1.0.210"; edition = "2018"; - sha256 = "029yqqbb3c8v3gc720fhxn49dhgvb88zbyprdg5621riwzzy1z4r"; + sha256 = "0flc0z8wgax1k4j5bf2zyq48bgzyv425jkd5w0i6wbh7f8j5kqy8"; authors = [ "Erick Tryzelaar " "David Tolnay " @@ -6058,9 +6064,9 @@ rec { }; "serde_derive" = rec { crateName = "serde_derive"; - version = "1.0.209"; + version = "1.0.210"; edition = "2015"; - sha256 = "0w114ksg1ymnmqdisd0g1j3g8jgz6pam45xg6yb47dfpkybip0x5"; + sha256 = "07yzy4wafk79ps0hmbqmsqh5xjna4pm4q57wc847bb8gl3nh4f94"; procMacro = true; authors = [ "Erick Tryzelaar " @@ -6081,7 +6087,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; usesDefaultFeatures = false; features = [ "clone-impls" "derive" "parsing" "printing" "proc-macro" ]; } @@ -6113,7 +6119,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; usesDefaultFeatures = false; features = [ "clone-impls" "derive" "parsing" "printing" ]; } @@ -6122,9 +6128,9 @@ rec { }; "serde_json" = rec { crateName = "serde_json"; - version = "1.0.127"; + version = "1.0.128"; edition = "2021"; - sha256 = "1b99lgg1d986gwz5fbmmzmvjmqg5bx0lzmhy6rqp5gc2kxnw0hw0"; + sha256 = "1n43nia50ybpcfmh3gcw4lcc627qsg9nyakzwgkk9pm10xklbxbg"; authors = [ "Erick Tryzelaar " "David Tolnay " @@ -6484,7 +6490,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "full" ]; } ]; @@ -6686,13 +6692,13 @@ rec { }; "stackable-operator" = rec { crateName = "stackable-operator"; - version = "0.74.0"; + version = "0.76.0"; edition = "2021"; workspace_member = null; src = pkgs.fetchgit { - url = "https://github.com/stackabletech/operator-rs.git"; - rev = "c77a5423b66bc1667b63af7d8bec00de88a5303f"; - sha256 = "1g1a0v98wlcb36ibwv1nv75g3b3s1mjmaps443fc2w2maam94lya"; + url = "https://github.com/stackabletech//operator-rs.git"; + rev = "61c45e176504ee3ea3a5ae9036dd73be8b68d416"; + sha256 = "1s7lw82zdfw6frci5lh8rjzjhgjfa27xz11xlllb6p8dxpxhlcl0"; }; libName = "stackable_operator"; authors = [ @@ -6741,7 +6747,7 @@ rec { name = "k8s-openapi"; packageId = "k8s-openapi"; usesDefaultFeatures = false; - features = [ "schemars" "v1_30" ]; + features = [ "schemars" "v1_31" ]; } { name = "kube"; @@ -6840,9 +6846,9 @@ rec { edition = "2021"; workspace_member = null; src = pkgs.fetchgit { - url = "https://github.com/stackabletech/operator-rs.git"; - rev = "c77a5423b66bc1667b63af7d8bec00de88a5303f"; - sha256 = "1g1a0v98wlcb36ibwv1nv75g3b3s1mjmaps443fc2w2maam94lya"; + url = "https://github.com/stackabletech//operator-rs.git"; + rev = "61c45e176504ee3ea3a5ae9036dd73be8b68d416"; + sha256 = "1s7lw82zdfw6frci5lh8rjzjhgjfa27xz11xlllb6p8dxpxhlcl0"; }; procMacro = true; libName = "stackable_operator_derive"; @@ -6864,7 +6870,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; } ]; @@ -6937,7 +6943,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "parsing" "extra-traits" ]; } ]; @@ -6990,11 +6996,11 @@ rec { }; resolvedDefaultFeatures = [ "clone-impls" "default" "derive" "extra-traits" "full" "parsing" "printing" "proc-macro" "quote" "visit" ]; }; - "syn 2.0.76" = rec { + "syn 2.0.77" = rec { crateName = "syn"; - version = "2.0.76"; + version = "2.0.77"; edition = "2021"; - sha256 = "09fmdkmqqkkfkg53qnldl10ppwqkqlw22ixhg4rgrkp02hd0i3jp"; + sha256 = "1vbkwfp9ymmi0fsyyjsqfvnv7gm8vjgl4pzprbk7p3pxc7gvqdcz"; authors = [ "David Tolnay " ]; @@ -7060,7 +7066,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; } ]; @@ -7299,9 +7305,9 @@ rec { }; "tokio" = rec { crateName = "tokio"; - version = "1.39.3"; + version = "1.40.0"; edition = "2021"; - sha256 = "1xgzhj7bxqqpjaabjkgsx8hi0f600bzj4iyp9f0a9gr3k6dwkawv"; + sha256 = "166rllhfkyqp0fs7sxn6crv74iizi4wzd3cvxkcpmlk52qip1c72"; authors = [ "Tokio Contributors " ]; @@ -7422,7 +7428,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "full" ]; } ]; @@ -7956,7 +7962,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; usesDefaultFeatures = false; features = [ "full" "parsing" "printing" "visit-mut" "clone-impls" "extra-traits" "proc-macro" ]; } @@ -8594,7 +8600,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "full" ]; } { @@ -8653,7 +8659,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; features = [ "visit" "full" ]; } { @@ -9250,7 +9256,7 @@ rec { } { name = "syn"; - packageId = "syn 2.0.76"; + packageId = "syn 2.0.77"; } ]; diff --git a/Cargo.toml b/Cargo.toml index 71f327a9..4c92f17b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,5 +27,6 @@ strum = { version = "0.26", features = ["derive"] } tokio = { version = "1.40", features = ["full"] } tracing = "0.1" -# [patch."https://github.com/stackabletech/operator-rs.git"] -# stackable-operator = { git = "https://github.com/stackabletech//operator-rs.git", branch = "main" } +[patch."https://github.com/stackabletech/operator-rs.git"] +# TODO change back to tag! +stackable-operator = { git = "https://github.com/stackabletech//operator-rs.git", branch = "chore/add-kerberos-auth-prov" } diff --git a/crate-hashes.json b/crate-hashes.json index 8725a492..dc47fb72 100644 --- a/crate-hashes.json +++ b/crate-hashes.json @@ -1,5 +1,5 @@ { - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.74.0#stackable-operator-derive@0.3.1": "1g1a0v98wlcb36ibwv1nv75g3b3s1mjmaps443fc2w2maam94lya", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.74.0#stackable-operator@0.74.0": "1g1a0v98wlcb36ibwv1nv75g3b3s1mjmaps443fc2w2maam94lya", + "git+https://github.com/stackabletech//operator-rs.git?branch=chore%2Fadd-kerberos-auth-prov#stackable-operator-derive@0.3.1": "1s7lw82zdfw6frci5lh8rjzjhgjfa27xz11xlllb6p8dxpxhlcl0", + "git+https://github.com/stackabletech//operator-rs.git?branch=chore%2Fadd-kerberos-auth-prov#stackable-operator@0.76.0": "1s7lw82zdfw6frci5lh8rjzjhgjfa27xz11xlllb6p8dxpxhlcl0", "git+https://github.com/stackabletech/product-config.git?tag=0.7.0#product-config@0.7.0": "0gjsm80g6r75pm3824dcyiz4ysq1ka4c1if6k1mjm9cnd5ym0gny" } \ No newline at end of file diff --git a/deploy/helm/kafka-operator/crds/crds.yaml b/deploy/helm/kafka-operator/crds/crds.yaml index c0e07561..dc557069 100644 --- a/deploy/helm/kafka-operator/crds/crds.yaml +++ b/deploy/helm/kafka-operator/crds/crds.yaml @@ -549,6 +549,8 @@ spec: Only affects client connections. This setting controls: - If clients need to authenticate themselves against the broker via TLS - Which ca.crt to use when validating the provided client certs This will override the server TLS settings (if set) in `spec.clusterConfig.tls.serverSecretClass`. + + ## Kerberos provider type: string required: - authenticationClass @@ -574,16 +576,6 @@ spec: - configMapName type: object type: object - kerberos: - description: Struct containing Kerberos secret name. - nullable: true - properties: - secretClass: - description: Name of the SecretClass providing the keytab for the Kafka services. - type: string - required: - - secretClass - type: object tls: default: internalSecretClass: tls diff --git a/rust/crd/src/authentication.rs b/rust/crd/src/authentication.rs index 04932eed..cebff7d8 100644 --- a/rust/crd/src/authentication.rs +++ b/rust/crd/src/authentication.rs @@ -42,6 +42,8 @@ pub struct KafkaAuthentication { /// - Which ca.crt to use when validating the provided client certs /// /// This will override the server TLS settings (if set) in `spec.clusterConfig.tls.serverSecretClass`. + /// + /// ## Kerberos provider pub authentication_class: String, } @@ -90,6 +92,13 @@ impl ResolvedAuthenticationClasses { .find(|auth| matches!(auth.spec.provider, AuthenticationClassProvider::Tls(_))) } + /// Return the (first) Kerberos `AuthenticationClass` if available + pub fn get_kerberos_authentication_class(&self) -> Option<&AuthenticationClass> { + self.resolved_authentication_classes + .iter() + .find(|auth| matches!(auth.spec.provider, AuthenticationClassProvider::Kerberos(_))) + } + /// Validates the resolved AuthenticationClasses. /// Currently errors out if: /// - More than one AuthenticationClass was provided @@ -101,8 +110,22 @@ impl ResolvedAuthenticationClasses { for auth_class in &self.resolved_authentication_classes { match &auth_class.spec.provider { + // explicitly list each branch so new elements do not get overlooked AuthenticationClassProvider::Tls(_) => {} - _ => { + AuthenticationClassProvider::Kerberos(_) => {} + AuthenticationClassProvider::Static(_) => { + return Err(Error::AuthenticationProviderNotSupported { + authentication_class: ObjectRef::from_obj(auth_class), + provider: auth_class.spec.provider.to_string(), + }) + } + AuthenticationClassProvider::Ldap(_) => { + return Err(Error::AuthenticationProviderNotSupported { + authentication_class: ObjectRef::from_obj(auth_class), + provider: auth_class.spec.provider.to_string(), + }) + } + AuthenticationClassProvider::Oidc(_) => { return Err(Error::AuthenticationProviderNotSupported { authentication_class: ObjectRef::from_obj(auth_class), provider: auth_class.spec.provider.to_string(), diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index b037ce48..b40afa8c 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -92,9 +92,6 @@ pub enum Error { #[snafu(display("fragment validation failure"))] FragmentValidationFailure { source: ValidationError }, - - #[snafu(display("only one authentication method is possible, TLS or Kerberos"))] - MultipleAuthenticationMethodsProvided, } /// A Kafka cluster stacklet. This resource is managed by the Stackable operator for Apache Kafka. @@ -139,9 +136,6 @@ pub struct KafkaClusterConfig { #[serde(default)] pub authentication: Vec, - /// Struct containing Kerberos secret name. - pub kerberos: Option, - /// Authorization settings for Kafka like OPA. #[serde(default)] pub authorization: KafkaAuthorization, @@ -167,19 +161,6 @@ pub struct KafkaClusterConfig { pub zookeeper_config_map_name: String, } -#[derive(Clone, Debug, Deserialize, Eq, Hash, JsonSchema, PartialEq, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct AuthenticationConfig { - pub kerberos: KerberosConfig, -} - -#[derive(Clone, Debug, Deserialize, Eq, Hash, JsonSchema, PartialEq, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct KerberosConfig { - /// Name of the SecretClass providing the keytab for the Kafka services. - pub secret_class: String, -} - impl KafkaCluster { /// The name of the load-balanced Kubernetes Service providing the bootstrap address. Kafka clients will use this /// to get a list of broker addresses and will use those to transmit data to the correct broker. @@ -285,26 +266,6 @@ impl KafkaCluster { tracing::debug!("Merged config: {:?}", conf_role_group); fragment::validate(conf_role_group).context(FragmentValidationFailureSnafu) } - - pub fn has_kerberos_enabled(&self) -> bool { - self.kerberos_secret_class().is_some() - } - - pub fn kerberos_secret_class(&self) -> Option { - if let Some(kerberos) = self.spec.cluster_config.kerberos.clone() { - Some(kerberos.secret_class) - } else { - None - } - } - - pub fn validate_authentication_methods(&self) -> Result<(), Error> { - // TLS authentication and Kerberos authentication are mutually exclusive - if !self.spec.cluster_config.authentication.is_empty() && self.has_kerberos_enabled() { - return Err(Error::MultipleAuthenticationMethodsProvided); - } - Ok(()) - } } /// Reference to a single `Pod` that is a component of a [`KafkaCluster`] @@ -521,22 +482,6 @@ impl Configuration for KafkaConfigFragment { Some("true".to_string()), ); } - // Kerberos - if resource.has_kerberos_enabled() { - config.insert( - "sasl.enabled.mechanisms".to_string(), - Some("GSSAPI".to_string()), - ); - config.insert( - "sasl.kerberos.service.name".to_string(), - Some(KafkaRole::Broker.kerberos_service_name().to_string()), - ); - config.insert( - "sasl.mechanism.inter.broker.protocol".to_string(), - Some("GSSAPI".to_string()), - ); - tracing::debug!("Kerberos configs added: [{:#?}]", config); - } } Ok(config) @@ -733,32 +678,6 @@ mod tests { ); } - #[test] - fn test_get_auth_kerberos() { - let kafka_cluster = r#" - apiVersion: kafka.stackable.tech/v1alpha1 - kind: KafkaCluster - metadata: - name: simple-kafka - namespace: default - spec: - image: - productVersion: 3.7.1 - clusterConfig: - kerberos: - secretClass: kafka-kerberos - zookeeperConfigMapName: xyz - "#; - let kafka: KafkaCluster = serde_yaml::from_str(kafka_cluster).expect("illegal test input"); - - assert_eq!( - Some(KerberosConfig { - secret_class: "kafka-kerberos".to_string() - }), - kafka.spec.cluster_config.kerberos - ); - } - #[test] fn test_get_auth_tls() { let kafka_cluster = r#" @@ -793,32 +712,4 @@ mod tests { kafka.spec.cluster_config.authentication ); } - - #[test] - fn test_get_auth_multiple() { - let kafka_cluster = r#" - apiVersion: kafka.stackable.tech/v1alpha1 - kind: KafkaCluster - metadata: - name: simple-kafka - namespace: default - spec: - image: - productVersion: 3.7.1 - clusterConfig: - authentication: - - authenticationClass: kafka-client-tls1 - kerberos: - secretClass: kafka-kerberos - tls: - internalSecretClass: internalTls - serverSecretClass: tls - zookeeperConfigMapName: xyz - "#; - let kafka: KafkaCluster = serde_yaml::from_str(kafka_cluster).expect("illegal test input"); - - assert_eq!(1, kafka.spec.cluster_config.authentication.len()); - let validation = &kafka.validate_authentication_methods(); - assert!(validation.is_err()); - } } diff --git a/rust/crd/src/listener.rs b/rust/crd/src/listener.rs index 3cefee4d..82d9d10e 100644 --- a/rust/crd/src/listener.rs +++ b/rust/crd/src/listener.rs @@ -91,7 +91,6 @@ impl Display for KafkaListener { } pub fn get_kafka_listener_config( - kafka: &KafkaCluster, kafka_security: &KafkaTlsSecurity, pod_fqdn: &String, ) -> Result { @@ -114,7 +113,7 @@ pub fn get_kafka_listener_config( }); listener_security_protocol_map .insert(KafkaListenerName::ClientAuth, KafkaListenerProtocol::Ssl); - } else if kafka.has_kerberos_enabled() { + } else if kafka_security.has_kerberos_enabled() { // 2) Kerberos and TLS authentication classes are mutually exclusive and Kerberos takes preference listeners.push(KafkaListener { name: KafkaListenerName::Client, @@ -159,7 +158,7 @@ pub fn get_kafka_listener_config( } // INTERNAL - if kafka.has_kerberos_enabled() { + if kafka_security.has_kerberos_enabled() { // 5) Kerberos and TLS authentication classes are mutually exclusive and Kerberos takes preference listeners.push(KafkaListener { name: KafkaListenerName::Internal, @@ -273,7 +272,7 @@ mod tests { Some("tls".to_string()), ); let pod_fqdn = pod_fqdn(&kafka, object_name).unwrap(); - let config = get_kafka_listener_config(&kafka, &kafka_security, &pod_fqdn).unwrap(); + let config = get_kafka_listener_config(&kafka_security, &pod_fqdn).unwrap(); assert_eq!( config.listeners(), @@ -333,7 +332,7 @@ mod tests { "tls".to_string(), Some("tls".to_string()), ); - let config = get_kafka_listener_config(&kafka, &kafka_security, &pod_fqdn).unwrap(); + let config = get_kafka_listener_config(&kafka_security, &pod_fqdn).unwrap(); assert_eq!( config.listeners(), @@ -395,7 +394,7 @@ mod tests { "".to_string(), None, ); - let config = get_kafka_listener_config(&kafka, &kafka_security, &pod_fqdn).unwrap(); + let config = get_kafka_listener_config(&kafka_security, &pod_fqdn).unwrap(); assert_eq!( config.listeners(), diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index 6893e8e2..81f4611c 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -39,6 +39,9 @@ pub enum Error { SecretVolumeBuild { source: stackable_operator::builder::pod::volume::SecretOperatorVolumeSourceBuilderError, }, + + #[snafu(display("only one authentication method is possible, TLS or Kerberos"))] + MultipleAuthenticationMethodsProvided, } /// Helper struct combining TLS settings for server and internal with the resolved AuthenticationClasses @@ -181,6 +184,34 @@ impl<'a> KafkaTlsSecurity<'a> { } } + pub fn has_kerberos_enabled(&self) -> bool { + self.kerberos_secret_class().is_some() + } + + pub fn kerberos_secret_class(&self) -> Option { + if let Some(kerberos) = self + .resolved_authentication_classes + .get_kerberos_authentication_class() + { + match &kerberos.spec.provider { + AuthenticationClassProvider::Kerberos(kerberos) => { + Some(kerberos.kerberos_secret_class.clone()) + } + _ => None, + } + } else { + None + } + } + + pub fn validate_authentication_methods(&self) -> Result<(), Error> { + // Client TLS authentication and Kerberos authentication are mutually exclusive + if self.tls_client_authentication_class().is_some() && self.has_kerberos_enabled() { + return Err(Error::MultipleAuthenticationMethodsProvided); + } + Ok(()) + } + /// Return the Kafka (secure) client port depending on tls or authentication settings. pub fn client_port(&self) -> u16 { if self.tls_enabled() { @@ -227,7 +258,7 @@ impl<'a> KafkaTlsSecurity<'a> { args.extend(Self::kcat_client_auth_ssl( Self::STACKABLE_TLS_CERT_SERVER_DIR, )); - } else if self.kafka.has_kerberos_enabled() { + } else if self.has_kerberos_enabled() { let service_name = KafkaRole::Broker.kerberos_service_name(); // here we need to specify a shell so that variable substitution will work // see e.g. https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1ExecAction.md @@ -448,6 +479,20 @@ impl<'a> KafkaTlsSecurity<'a> { ); } + // Kerberos + if self.has_kerberos_enabled() { + config.insert("sasl.enabled.mechanisms".to_string(), "GSSAPI".to_string()); + config.insert( + "sasl.kerberos.service.name".to_string(), + KafkaRole::Broker.kerberos_service_name().to_string(), + ); + config.insert( + "sasl.mechanism.inter.broker.protocol".to_string(), + "GSSAPI".to_string(), + ); + tracing::debug!("Kerberos configs added: [{:#?}]", config); + } + // common config.insert( Self::INTER_BROKER_LISTENER_NAME.to_string(), diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index a57d5420..5dc494ef 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -318,7 +318,9 @@ pub enum Error { AddKerberosConfig { source: kerberos::Error }, #[snafu(display("failed to validate authentication method"))] - FailedToValidateAuthenticationMethod { source: stackable_kafka_crd::Error }, + FailedToValidateAuthenticationMethod { + source: stackable_kafka_crd::security::Error, + }, } type Result = std::result::Result; @@ -430,14 +432,20 @@ pub async fn reconcile_kafka(kafka: Arc, ctx: Arc) -> Result< .map(Cow::Borrowed) .unwrap_or_default(); - kafka - .validate_authentication_methods() - .context(FailedToValidateAuthenticationMethodSnafu)?; - let kafka_security = KafkaTlsSecurity::new_from_kafka_cluster(client, &kafka) .await .context(FailedToInitializeSecurityContextSnafu)?; + tracing::debug!("Security settings: kerberos enabled/secret-class: {}/{:#?} tls enabled/client-auth-class: {}/{:#?}", + kafka_security.has_kerberos_enabled(), + kafka_security.kerberos_secret_class(), + kafka_security.tls_enabled(), + kafka_security.tls_client_authentication_class()); + + kafka_security + .validate_authentication_methods() + .context(FailedToValidateAuthenticationMethodSnafu)?; + // Assemble the OPA connection string from the discovery and the given path if provided // Will be passed as --override parameter in the cli in the state ful set let opa_connect = if let Some(opa_spec) = &kafka.spec.cluster_config.authorization.opa { @@ -809,9 +817,10 @@ fn build_broker_rolegroup_statefulset( .add_volume_and_volume_mounts(&mut pod_builder, &mut cb_kcat_prober, &mut cb_kafka) .context(AddVolumesAndVolumeMountsSnafu)?; - if kafka.has_kerberos_enabled() { + if kafka_security.has_kerberos_enabled() { add_kerberos_pod_config( kafka, + kafka_security, kafka_role, &mut cb_kcat_prober, &mut cb_kafka, @@ -915,8 +924,8 @@ fn build_broker_rolegroup_statefulset( "-Djava.security.properties={STACKABLE_CONFIG_DIR}/{JVM_SECURITY_PROPERTIES_FILE} -javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar={METRICS_PORT}:/stackable/jmx/broker.yaml", ); let pod_fqdn = pod_fqdn(kafka, &rolegroup_ref.object_name()).context(ResolveNamespaceSnafu)?; - let kafka_listeners = get_kafka_listener_config(kafka, kafka_security, &pod_fqdn) - .context(InvalidKafkaListenersSnafu)?; + let kafka_listeners = + get_kafka_listener_config(kafka_security, &pod_fqdn).context(InvalidKafkaListenersSnafu)?; cb_kafka .image_from_product_image(resolved_product_image) @@ -931,7 +940,7 @@ fn build_broker_rolegroup_statefulset( .kafka_container_commands( &kafka_listeners, opa_connect_string, - kafka.has_kerberos_enabled(), + kafka_security.has_kerberos_enabled(), &pod_fqdn, ) .join("\n")]) diff --git a/rust/operator-binary/src/kerberos.rs b/rust/operator-binary/src/kerberos.rs index 5a792da0..46504a3e 100644 --- a/rust/operator-binary/src/kerberos.rs +++ b/rust/operator-binary/src/kerberos.rs @@ -1,5 +1,5 @@ use snafu::{ResultExt, Snafu}; -use stackable_kafka_crd::{KafkaCluster, KafkaRole}; +use stackable_kafka_crd::{security::KafkaTlsSecurity, KafkaCluster, KafkaRole}; use stackable_operator::{ builder::pod::{ container::ContainerBuilder, @@ -22,12 +22,13 @@ pub enum Error { pub fn add_kerberos_pod_config( kafka: &KafkaCluster, + kafka_security: &KafkaTlsSecurity, role: &KafkaRole, cb_kcat_prober: &mut ContainerBuilder, cb_kafka: &mut ContainerBuilder, pb: &mut PodBuilder, ) -> Result<(), Error> { - if let Some(kerberos_secret_class) = kafka.kerberos_secret_class() { + if let Some(kerberos_secret_class) = kafka_security.kerberos_secret_class() { // Mount keytab let kerberos_secret_operator_volume = SecretOperatorVolumeSourceBuilder::new(kerberos_secret_class) diff --git a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 index 79e20f61..9241364a 100644 --- a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 +++ b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 @@ -37,18 +37,26 @@ commands: autoGenerate: true {% endif %} --- + apiVersion: authentication.stackable.tech/v1alpha1 + kind: AuthenticationClass + metadata: + name: kerberos-auth + spec: + provider: + kerberos: + kerberosSecretClass: kerberos-$NAMESPACE + --- apiVersion: kafka.stackable.tech/v1alpha1 kind: KafkaCluster metadata: name: test-kafka spec: image: - productVersion: 3.7.1 #"{{ test_scenario['values']['kafka'] }}" - repo: docker.stackable.tech/apoc/stackable # TODO merge images PR + productVersion: "{{ test_scenario['values']['kafka'] }}" pullPolicy: IfNotPresent clusterConfig: - kerberos: - secretClass: kerberos-$NAMESPACE + authentication: + - authenticationClass: kerberos-auth tls: {% if test_scenario['values']['use-client-tls'] == 'true' %} serverSecretClass: tls diff --git a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 index 2b68af37..52c2542a 100644 --- a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 +++ b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 @@ -9,7 +9,7 @@ spec: serviceAccountName: test-sa containers: - name: access-kafka - image: docker.stackable.tech/apoc/stackable/kafka:3.7.1-stackable0.0.0-cyrus-sasl-gssapi + image: docker.stackable.tech/stackable/kafka:{{ test_scenario['values']['kafka'] }}-stackable0.0.0-dev command: - /bin/bash - /tmp/script/script.sh From 61f17c08448fd0fa7a2a28b542cb79f7c043ce00 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Fri, 27 Sep 2024 13:01:50 +0200 Subject: [PATCH 13/49] changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0fca81f2..e6650837 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ All notable changes to this project will be documented in this file. ### Added - Support version `3.8.0` ([#753]). +- Add support for Kerberos authentication ([#762]). ### Changed @@ -27,6 +28,7 @@ All notable changes to this project will be documented in this file. [#741]: https://github.com/stackabletech/kafka-operator/pull/741 [#750]: https://github.com/stackabletech/kafka-operator/pull/750 [#753]: https://github.com/stackabletech/kafka-operator/pull/753 +[#762]: https://github.com/stackabletech/kafka-operator/pull/762 ## [24.7.0] - 2024-07-24 From d0c9158cbf5ce1713d9055ebc718054ff1e551ab Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Mon, 30 Sep 2024 12:42:29 +0200 Subject: [PATCH 14/49] reverted operator-rs ref and corrected test --- Cargo.toml | 7 +++---- tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 | 4 ++-- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 4c92f17b..774d117c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,11 +22,10 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" serde_yaml = "0.9" snafu = "0.8" -stackable-operator = { git = "https://github.com/stackabletech/operator-rs.git", tag = "stackable-operator-0.76.0" } +stackable-operator = { git = "https://github.com/stackabletech/operator-rs.git", tag = "stackable-operator-0.78.0" } strum = { version = "0.26", features = ["derive"] } tokio = { version = "1.40", features = ["full"] } tracing = "0.1" -[patch."https://github.com/stackabletech/operator-rs.git"] -# TODO change back to tag! -stackable-operator = { git = "https://github.com/stackabletech//operator-rs.git", branch = "chore/add-kerberos-auth-prov" } +# [patch."https://github.com/stackabletech/operator-rs.git"] +# stackable-operator = { git = "https://github.com/stackabletech//operator-rs.git", branch = "main" } diff --git a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 index 9241364a..4fbf08cb 100644 --- a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 +++ b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 @@ -40,7 +40,7 @@ commands: apiVersion: authentication.stackable.tech/v1alpha1 kind: AuthenticationClass metadata: - name: kerberos-auth + name: kerberos-auth-$NAMESPACE spec: provider: kerberos: @@ -56,7 +56,7 @@ commands: pullPolicy: IfNotPresent clusterConfig: authentication: - - authenticationClass: kerberos-auth + - authenticationClass: kerberos-auth-$NAMESPACE tls: {% if test_scenario['values']['use-client-tls'] == 'true' %} serverSecretClass: tls From d0ea2771734455115dfd42cc4e0b535ff1fac023 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Mon, 30 Sep 2024 13:35:05 +0200 Subject: [PATCH 15/49] fixed changes due to operator-rs to 0.78.0 --- Cargo.lock | 89 ++++++++++---------- Cargo.toml | 2 +- rust/crd/src/security.rs | 85 ++++++++++++------- rust/operator-binary/src/kafka_controller.rs | 88 +++++++++++++------ rust/operator-binary/src/kerberos.rs | 38 ++++++--- 5 files changed, 187 insertions(+), 115 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8728c045..561bf316 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -146,7 +146,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -157,7 +157,7 @@ checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -329,7 +329,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -444,7 +444,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -455,7 +455,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -466,7 +466,7 @@ checksum = "5060bb0febb73fa907273f8a7ed17ab4bf831d585eac835b28ec24a1e2460956" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -673,7 +673,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -992,9 +992,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c" +checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" dependencies = [ "equivalent", "hashbrown", @@ -1193,7 +1193,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -1532,7 +1532,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -1563,7 +1563,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -1601,9 +1601,9 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.1.0" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ "toml_edit", ] @@ -1748,9 +1748,9 @@ dependencies = [ [[package]] name = "rstest" -version = "0.22.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b423f0e62bdd61734b67cd21ff50871dfaeb9cc74f869dcd6af974fbcb19936" +checksum = "0a2c585be59b6b5dd66a9d2084aa1d8bd52fbdb806eafdeffb52791147862035" dependencies = [ "futures 0.3.30", "futures-timer", @@ -1760,9 +1760,9 @@ dependencies = [ [[package]] name = "rstest_macros" -version = "0.22.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e1711e7d14f74b12a58411c542185ef7fb7f2e7f8ee6e2940a883628522b42" +checksum = "825ea780781b15345a146be27eaefb05085e337e869bff01b4306a4fd4a9ad5a" dependencies = [ "cfg-if", "glob", @@ -1772,7 +1772,7 @@ dependencies = [ "regex", "relative-path", "rustc_version", - "syn 2.0.77", + "syn 2.0.79", "unicode-ident", ] @@ -1784,9 +1784,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] @@ -1889,7 +1889,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -1964,7 +1964,7 @@ checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -1975,7 +1975,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2103,7 +2103,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2160,8 +2160,8 @@ dependencies = [ [[package]] name = "stackable-operator" -version = "0.76.0" -source = "git+https://github.com/stackabletech//operator-rs.git?branch=chore/add-kerberos-auth-prov#61c45e176504ee3ea3a5ae9036dd73be8b68d416" +version = "0.78.0" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.78.0#8b0172ded942499845ebf647ef1b17ccfc7bdbb3" dependencies = [ "chrono", "clap", @@ -2171,6 +2171,7 @@ dependencies = [ "dockerfile-parser", "either", "futures 0.3.30", + "indexmap", "json-patch", "k8s-openapi", "kube", @@ -2197,12 +2198,12 @@ dependencies = [ [[package]] name = "stackable-operator-derive" version = "0.3.1" -source = "git+https://github.com/stackabletech//operator-rs.git?branch=chore/add-kerberos-auth-prov#61c45e176504ee3ea3a5ae9036dd73be8b68d416" +source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.78.0#8b0172ded942499845ebf647ef1b17ccfc7bdbb3" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2230,7 +2231,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2252,9 +2253,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.77" +version = "2.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" +checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" dependencies = [ "proc-macro2", "quote", @@ -2278,7 +2279,7 @@ checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2385,7 +2386,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2432,9 +2433,9 @@ checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" [[package]] name = "toml_edit" -version = "0.21.1" +version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" +checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ "indexmap", "toml_datetime", @@ -2521,7 +2522,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] @@ -2620,9 +2621,9 @@ checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" [[package]] name = "unicode-normalization" @@ -2724,7 +2725,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", "wasm-bindgen-shared", ] @@ -2746,7 +2747,7 @@ checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -2873,9 +2874,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.5.40" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" dependencies = [ "memchr", ] @@ -2904,7 +2905,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.77", + "syn 2.0.79", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 774d117c..4fc428ca 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,7 +16,7 @@ clap = "4.5" futures = { version = "0.3", features = ["compat"] } indoc = "2.0" product-config = { git = "https://github.com/stackabletech/product-config.git", tag = "0.7.0" } -rstest = "0.22" +rstest = "0.23" semver = "1.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index 81f4611c..c1fd1d95 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -9,10 +9,13 @@ use std::collections::BTreeMap; use indoc::formatdoc; use snafu::{ResultExt, Snafu}; use stackable_operator::{ - builder::pod::{ - container::ContainerBuilder, - volume::{SecretFormat, SecretOperatorVolumeSourceBuilder, VolumeBuilder}, - PodBuilder, + builder::{ + self, + pod::{ + container::ContainerBuilder, + volume::{SecretFormat, SecretOperatorVolumeSourceBuilder, VolumeBuilder}, + PodBuilder, + }, }, client::Client, commons::authentication::{AuthenticationClass, AuthenticationClassProvider}, @@ -42,6 +45,14 @@ pub enum Error { #[snafu(display("only one authentication method is possible, TLS or Kerberos"))] MultipleAuthenticationMethodsProvided, + + #[snafu(display("failed to add needed volume"))] + AddVolume { source: builder::pod::Error }, + + #[snafu(display("failed to add needed volumeMount"))] + AddVolumeMount { + source: builder::pod::container::Error, + }, } /// Helper struct combining TLS settings for server and internal with the resolved AuthenticationClasses @@ -343,37 +354,49 @@ impl<'a> KafkaTlsSecurity<'a> { // add tls (server or client authentication volumes) if required if let Some(tls_server_secret_class) = self.get_tls_secret_class() { // We have to mount tls pem files for kcat (the mount can be used directly) - pod_builder.add_volume(Self::create_tls_volume( - &self.kafka.bootstrap_service_name(), - Self::STACKABLE_TLS_CERT_SERVER_DIR_NAME, - tls_server_secret_class, - )?); - cb_kcat_prober.add_volume_mount( - Self::STACKABLE_TLS_CERT_SERVER_DIR_NAME, - Self::STACKABLE_TLS_CERT_SERVER_DIR, - ); + pod_builder + .add_volume(Self::create_tls_volume( + &self.kafka.bootstrap_service_name(), + Self::STACKABLE_TLS_CERT_SERVER_DIR_NAME, + tls_server_secret_class, + )?) + .context(AddVolumeSnafu)?; + cb_kcat_prober + .add_volume_mount( + Self::STACKABLE_TLS_CERT_SERVER_DIR_NAME, + Self::STACKABLE_TLS_CERT_SERVER_DIR, + ) + .context(AddVolumeMountSnafu)?; // Keystores fore the kafka container - pod_builder.add_volume(Self::create_tls_keystore_volume( - &self.kafka.bootstrap_service_name(), - Self::STACKABLE_TLS_KEYSTORE_SERVER_DIR_NAME, - tls_server_secret_class, - )?); - cb_kafka.add_volume_mount( - Self::STACKABLE_TLS_KEYSTORE_SERVER_DIR_NAME, - Self::STACKABLE_TLS_KEYSTORE_SERVER_DIR, - ); + pod_builder + .add_volume(Self::create_tls_keystore_volume( + &self.kafka.bootstrap_service_name(), + Self::STACKABLE_TLS_KEYSTORE_SERVER_DIR_NAME, + tls_server_secret_class, + )?) + .context(AddVolumeSnafu)?; + cb_kafka + .add_volume_mount( + Self::STACKABLE_TLS_KEYSTORE_SERVER_DIR_NAME, + Self::STACKABLE_TLS_KEYSTORE_SERVER_DIR, + ) + .context(AddVolumeMountSnafu)?; } if let Some(tls_internal_secret_class) = self.tls_internal_secret_class() { - pod_builder.add_volume(Self::create_tls_keystore_volume( - &self.kafka.bootstrap_service_name(), - Self::STACKABLE_TLS_KEYSTORE_INTERNAL_DIR_NAME, - tls_internal_secret_class, - )?); - cb_kafka.add_volume_mount( - Self::STACKABLE_TLS_KEYSTORE_INTERNAL_DIR_NAME, - Self::STACKABLE_TLS_KEYSTORE_INTERNAL_DIR, - ); + pod_builder + .add_volume(Self::create_tls_keystore_volume( + &self.kafka.bootstrap_service_name(), + Self::STACKABLE_TLS_KEYSTORE_INTERNAL_DIR_NAME, + tls_internal_secret_class, + )?) + .context(AddVolumeSnafu)?; + cb_kafka + .add_volume_mount( + Self::STACKABLE_TLS_KEYSTORE_INTERNAL_DIR_NAME, + Self::STACKABLE_TLS_KEYSTORE_INTERNAL_DIR, + ) + .context(AddVolumeMountSnafu)?; } Ok(()) diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index 5dc494ef..c15dd80a 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -21,6 +21,7 @@ use stackable_kafka_crd::{ }; use stackable_operator::{ builder::{ + self, configmap::ConfigMapBuilder, meta::ObjectMetaBuilder, pod::{ @@ -56,6 +57,7 @@ use stackable_operator::{ product_config_utils::{transform_all_roles_to_config, validate_all_roles_and_groups_config}, product_logging::{ self, + framework::LoggingError, spec::{ ConfigMapLogConfig, ContainerLogConfig, ContainerLogConfigChoice, CustomContainerLogConfig, @@ -321,6 +323,17 @@ pub enum Error { FailedToValidateAuthenticationMethod { source: stackable_kafka_crd::security::Error, }, + + #[snafu(display("failed to build vector container"))] + BuildVectorContainer { source: LoggingError }, + + #[snafu(display("failed to add needed volume"))] + AddVolume { source: builder::pod::Error }, + + #[snafu(display("failed to add needed volumeMount"))] + AddVolumeMount { + source: builder::pod::container::Error, + }, } type Result = std::result::Result; @@ -381,6 +394,9 @@ impl ReconcilerError for Error { Error::ResolveNamespace { .. } => None, Error::AddKerberosConfig { .. } => None, Error::FailedToValidateAuthenticationMethod { .. } => None, + Error::AddVolume { .. } => None, + Error::AddVolumeMount { .. } => None, + Error::BuildVectorContainer { .. } => None, } } } @@ -851,6 +867,7 @@ fn build_broker_rolegroup_statefulset( ..EnvVar::default() }]) .add_volume_mount("tmp", STACKABLE_TMP_DIR) + .context(AddVolumeMountSnafu)? .resources(merged_config.resources.clone().into()); let pvcs = merged_config.resources.storage.build_pvcs(); @@ -952,10 +969,15 @@ fn build_broker_rolegroup_statefulset( .add_env_vars(env) .add_container_ports(container_ports(kafka_security)) .add_volume_mount(LOG_DIRS_VOLUME_NAME, STACKABLE_DATA_DIR) + .context(AddVolumeMountSnafu)? .add_volume_mount("config", STACKABLE_CONFIG_DIR) + .context(AddVolumeMountSnafu)? .add_volume_mount("tmp", STACKABLE_TMP_DIR) + .context(AddVolumeMountSnafu)? .add_volume_mount("log-config", STACKABLE_LOG_CONFIG_DIR) + .context(AddVolumeMountSnafu)? .add_volume_mount("log", STACKABLE_LOG_DIR) + .context(AddVolumeMountSnafu)? .resources(merged_config.resources.clone().into()); // Use kcat sidecar for probing container status rather than the official Kafka tools, since they incur a lot of @@ -1001,23 +1023,27 @@ fn build_broker_rolegroup_statefulset( })), }) = merged_config.logging.containers.get(&Container::Kafka) { - pod_builder.add_volume(Volume { - name: "log-config".to_string(), - config_map: Some(ConfigMapVolumeSource { - name: config_map.into(), - ..ConfigMapVolumeSource::default() - }), - ..Volume::default() - }); + pod_builder + .add_volume(Volume { + name: "log-config".to_string(), + config_map: Some(ConfigMapVolumeSource { + name: config_map.into(), + ..ConfigMapVolumeSource::default() + }), + ..Volume::default() + }) + .context(AddVolumeSnafu)?; } else { - pod_builder.add_volume(Volume { - name: "log-config".to_string(), - config_map: Some(ConfigMapVolumeSource { - name: rolegroup_ref.object_name(), - ..ConfigMapVolumeSource::default() - }), - ..Volume::default() - }); + pod_builder + .add_volume(Volume { + name: "log-config".to_string(), + config_map: Some(ConfigMapVolumeSource { + name: rolegroup_ref.object_name(), + ..ConfigMapVolumeSource::default() + }), + ..Volume::default() + }) + .context(AddVolumeSnafu)?; } let metadata = ObjectMetaBuilder::new() @@ -1049,17 +1075,20 @@ fn build_broker_rolegroup_statefulset( }), ..Volume::default() }) + .context(AddVolumeSnafu)? .add_volume(Volume { name: "tmp".to_string(), empty_dir: Some(EmptyDirVolumeSource::default()), ..Volume::default() }) + .context(AddVolumeSnafu)? .add_empty_dir_volume( "log", Some(product_logging::framework::calculate_log_volume_size_limit( &[MAX_KAFKA_LOG_FILES_SIZE], )), ) + .context(AddVolumeSnafu)? .service_account_name(sa_name) .security_context( PodSecurityContextBuilder::new() @@ -1071,18 +1100,21 @@ fn build_broker_rolegroup_statefulset( // Add vector container after kafka container to keep the defaulting into kafka container if merged_config.logging.enable_vector_agent { - pod_builder.add_container(product_logging::framework::vector_container( - resolved_product_image, - "config", - "log", - merged_config.logging.containers.get(&Container::Vector), - ResourceRequirementsBuilder::new() - .with_cpu_request("250m") - .with_cpu_limit("500m") - .with_memory_request("128Mi") - .with_memory_limit("128Mi") - .build(), - )); + pod_builder.add_container( + product_logging::framework::vector_container( + resolved_product_image, + "config", + "log", + merged_config.logging.containers.get(&Container::Vector), + ResourceRequirementsBuilder::new() + .with_cpu_request("250m") + .with_cpu_limit("500m") + .with_memory_request("128Mi") + .with_memory_limit("128Mi") + .build(), + ) + .context(BuildVectorContainerSnafu)?, + ); } add_graceful_shutdown_config(merged_config, &mut pod_builder).context(GracefulShutdownSnafu)?; diff --git a/rust/operator-binary/src/kerberos.rs b/rust/operator-binary/src/kerberos.rs index 46504a3e..30e2553b 100644 --- a/rust/operator-binary/src/kerberos.rs +++ b/rust/operator-binary/src/kerberos.rs @@ -1,13 +1,16 @@ use snafu::{ResultExt, Snafu}; use stackable_kafka_crd::{security::KafkaTlsSecurity, KafkaCluster, KafkaRole}; use stackable_operator::{ - builder::pod::{ - container::ContainerBuilder, - volume::{ - SecretOperatorVolumeSourceBuilder, SecretOperatorVolumeSourceBuilderError, - VolumeBuilder, + builder::{ + self, + pod::{ + container::ContainerBuilder, + volume::{ + SecretOperatorVolumeSourceBuilder, SecretOperatorVolumeSourceBuilderError, + VolumeBuilder, + }, + PodBuilder, }, - PodBuilder, }, kube::ResourceExt, }; @@ -15,9 +18,17 @@ use stackable_operator::{ #[derive(Snafu, Debug)] pub enum Error { #[snafu(display("failed to add Kerberos secret volume"))] - AddKerberosSecretVolume { + KerberosSecretVolume { source: SecretOperatorVolumeSourceBuilderError, }, + + #[snafu(display("failed to add needed volume"))] + AddVolume { source: builder::pod::Error }, + + #[snafu(display("failed to add needed volumeMount"))] + AddVolumeMount { + source: builder::pod::container::Error, + }, } pub fn add_kerberos_pod_config( @@ -36,20 +47,25 @@ pub fn add_kerberos_pod_config( .with_pod_scope() .with_kerberos_service_name(role.kerberos_service_name()) .build() - .context(AddKerberosSecretVolumeSnafu)?; + .context(KerberosSecretVolumeSnafu)?; pb.add_volume( VolumeBuilder::new("kerberos") .ephemeral(kerberos_secret_operator_volume) .build(), - ); - cb_kcat_prober.add_volume_mount("kerberos", "/stackable/kerberos"); + ) + .context(AddVolumeSnafu)?; + cb_kcat_prober + .add_volume_mount("kerberos", "/stackable/kerberos") + .context(AddVolumeMountSnafu)?; cb_kcat_prober.add_env_var("KRB5_CONFIG", "/stackable/kerberos/krb5.conf"); cb_kcat_prober.add_env_var( "KAFKA_OPTS", "-Djava.security.krb5.conf=/stackable/kerberos/krb5.conf", ); - cb_kafka.add_volume_mount("kerberos", "/stackable/kerberos"); + cb_kafka + .add_volume_mount("kerberos", "/stackable/kerberos") + .context(AddVolumeMountSnafu)?; cb_kafka.add_env_var("KRB5_CONFIG", "/stackable/kerberos/krb5.conf"); cb_kafka.add_env_var( "KAFKA_OPTS", From eb405a98a78d8f13e3cfd1ec254d6426933d065b Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Mon, 30 Sep 2024 14:56:36 +0200 Subject: [PATCH 16/49] added docs/example --- .../kafka/pages/usage-guide/security.adoc | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/docs/modules/kafka/pages/usage-guide/security.adoc b/docs/modules/kafka/pages/usage-guide/security.adoc index 1752d1f6..4e5d2025 100644 --- a/docs/modules/kafka/pages/usage-guide/security.adoc +++ b/docs/modules/kafka/pages/usage-guide/security.adoc @@ -53,6 +53,10 @@ You can create your own secrets and reference them e.g. in the `spec.clusterConf == Authentication The internal or broker-to-broker communication is authenticated via TLS. +For client-to-server communication, authentication can be achieved with either TLS or Kerberos. + +=== TLS + In order to enforce TLS authentication for client-to-server communication, you can set an `AuthenticationClass` reference in the custom resource provided by the xref:commons-operator:index.adoc[Commons Operator]. [source,yaml] @@ -101,6 +105,50 @@ spec: <3> The reference to a `SecretClass`. <4> The `SecretClass` that is referenced by the `AuthenticationClass` in order to provide certificates. +=== Kerberos + +Similarly, you can set an `AuthenticationClass` reference for a Kerberos authentication provider: + +[source,yaml] +---- +apiVersion: authentication.stackable.tech/v1alpha1 +kind: AuthenticationClass +metadata: + name: kafka-client-kerberos # <2> +spec: + provider: + kerberos: + kerberosSecretClass: kafka-client-auth-secret # <3> +--- +apiVersion: secrets.stackable.tech/v1alpha1 +kind: SecretClass +metadata: + name: kafka-client-auth-secret # <4> +spec: + backend: + kerberosKeytab: + ... +--- +apiVersion: kafka.stackable.tech/v1alpha1 +kind: KafkaCluster +metadata: + name: simple-kafka +spec: + image: + productVersion: 3.7.1 + clusterConfig: + authentication: + - authenticationClass: kafka-client-kerberos # <1> + zookeeperConfigMapName: simple-kafka-znode + brokers: + roleGroups: + default: + replicas: 3 +---- +<1> The `clusterConfig.authentication.authenticationClass` can be set to use Kerberos for authentication. This is optional. +<2> The referenced `AuthenticationClass` that references a `SecretClass` to provide Kerberos keytabs. +<3> The reference to a `SecretClass`. +<4> The `SecretClass` that is referenced by the `AuthenticationClass` in order to provide keytabs. == [[authorization]]Authorization From 259fa72ecd85ae87a67fc6057026b8e4a54bf7cd Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Mon, 30 Sep 2024 16:00:55 +0200 Subject: [PATCH 17/49] improved comments --- deploy/helm/kafka-operator/crds/crds.yaml | 2 ++ rust/crd/src/authentication.rs | 4 ++++ rust/crd/src/listener.rs | 4 ++-- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/deploy/helm/kafka-operator/crds/crds.yaml b/deploy/helm/kafka-operator/crds/crds.yaml index dc557069..16397930 100644 --- a/deploy/helm/kafka-operator/crds/crds.yaml +++ b/deploy/helm/kafka-operator/crds/crds.yaml @@ -551,6 +551,8 @@ spec: This will override the server TLS settings (if set) in `spec.clusterConfig.tls.serverSecretClass`. ## Kerberos provider + + This affects client connections and also sets internal connections to use TLS for encryption. This setting is used to reference an `AuthenticationClass` and in turn, a `SecretClass` that is used to create keytabs). type: string required: - authenticationClass diff --git a/rust/crd/src/authentication.rs b/rust/crd/src/authentication.rs index cebff7d8..27ba993f 100644 --- a/rust/crd/src/authentication.rs +++ b/rust/crd/src/authentication.rs @@ -44,6 +44,10 @@ pub struct KafkaAuthentication { /// This will override the server TLS settings (if set) in `spec.clusterConfig.tls.serverSecretClass`. /// /// ## Kerberos provider + /// + /// This affects client connections and also sets internal connections to use TLS for encryption. + /// This setting is used to reference an `AuthenticationClass` and in turn, a `SecretClass` that is + /// used to create keytabs). pub authentication_class: String, } diff --git a/rust/crd/src/listener.rs b/rust/crd/src/listener.rs index 82d9d10e..ee778164 100644 --- a/rust/crd/src/listener.rs +++ b/rust/crd/src/listener.rs @@ -114,7 +114,7 @@ pub fn get_kafka_listener_config( listener_security_protocol_map .insert(KafkaListenerName::ClientAuth, KafkaListenerProtocol::Ssl); } else if kafka_security.has_kerberos_enabled() { - // 2) Kerberos and TLS authentication classes are mutually exclusive and Kerberos takes preference + // 2) Kerberos and TLS authentication classes are mutually exclusive listeners.push(KafkaListener { name: KafkaListenerName::Client, host: LISTENER_LOCAL_ADDRESS.to_string(), @@ -159,7 +159,7 @@ pub fn get_kafka_listener_config( // INTERNAL if kafka_security.has_kerberos_enabled() { - // 5) Kerberos and TLS authentication classes are mutually exclusive and Kerberos takes preference + // 5) Kerberos and TLS authentication classes are mutually exclusive listeners.push(KafkaListener { name: KafkaListenerName::Internal, host: LISTENER_LOCAL_ADDRESS.to_string(), From edd2286b399381b5e9de68085c95f140bb767d5d Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy <1712947+adwk67@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:01:50 +0200 Subject: [PATCH 18/49] Update rust/crd/src/authentication.rs Co-authored-by: Sebastian Bernauer --- rust/crd/src/authentication.rs | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/rust/crd/src/authentication.rs b/rust/crd/src/authentication.rs index 27ba993f..d1c582d8 100644 --- a/rust/crd/src/authentication.rs +++ b/rust/crd/src/authentication.rs @@ -116,20 +116,9 @@ impl ResolvedAuthenticationClasses { match &auth_class.spec.provider { // explicitly list each branch so new elements do not get overlooked AuthenticationClassProvider::Tls(_) => {} - AuthenticationClassProvider::Kerberos(_) => {} - AuthenticationClassProvider::Static(_) => { - return Err(Error::AuthenticationProviderNotSupported { - authentication_class: ObjectRef::from_obj(auth_class), - provider: auth_class.spec.provider.to_string(), - }) - } - AuthenticationClassProvider::Ldap(_) => { - return Err(Error::AuthenticationProviderNotSupported { - authentication_class: ObjectRef::from_obj(auth_class), - provider: auth_class.spec.provider.to_string(), - }) - } - AuthenticationClassProvider::Oidc(_) => { + AuthenticationClassProvider::Static(_) + | AuthenticationClassProvider::Ldap(_) + | AuthenticationClassProvider::Oidc(_) => { return Err(Error::AuthenticationProviderNotSupported { authentication_class: ObjectRef::from_obj(auth_class), provider: auth_class.spec.provider.to_string(), From 89a88db5f968d56b285fabe29912787cb673bfe8 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy <1712947+adwk67@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:02:20 +0200 Subject: [PATCH 19/49] Update rust/crd/src/lib.rs Co-authored-by: Sebastian Bernauer --- rust/crd/src/lib.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index b40afa8c..5d6636cf 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -328,7 +328,8 @@ impl KafkaRole { } /// A Kerberos principal has three parts, with the form username/fully.qualified.domain.name@YOUR-REALM.COM. - /// We only have one role and will use "kafka" everywhere (which e.g. differs from the current hdfs implementation). + /// We only have one role and will use "kafka" everywhere (which e.g. differs from the current hdfs implementation, + /// but is similar to HBase). pub fn kerberos_service_name(&self) -> &'static str { "kafka" } From fcf34b114658a53c9183608703d6606359a9945c Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy <1712947+adwk67@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:04:08 +0200 Subject: [PATCH 20/49] Update rust/operator-binary/src/kafka_controller.rs Co-authored-by: Sebastian Bernauer --- rust/operator-binary/src/kafka_controller.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index c15dd80a..f03df12b 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -452,7 +452,13 @@ pub async fn reconcile_kafka(kafka: Arc, ctx: Arc) -> Result< .await .context(FailedToInitializeSecurityContextSnafu)?; - tracing::debug!("Security settings: kerberos enabled/secret-class: {}/{:#?} tls enabled/client-auth-class: {}/{:#?}", + tracing::debug!( + kerberos_enabled = kafka_security.has_kerberos_enabled(), + kerberos_secret_class = ?kafka_security.kerberos_secret_class(), + tls_enabled = kafka_security.tls_enabled(), + tls_client_authentication_class = ?kafka_security.tls_client_authentication_class(), + "The following security settings are used" + ); kafka_security.has_kerberos_enabled(), kafka_security.kerberos_secret_class(), kafka_security.tls_enabled(), From 5633a52bcfcef0e705a40fb4544d2bfc22d358c0 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy <1712947+adwk67@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:05:06 +0200 Subject: [PATCH 21/49] Update rust/operator-binary/src/kafka_controller.rs Co-authored-by: Sebastian Bernauer --- rust/operator-binary/src/kafka_controller.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index f03df12b..38da8667 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -729,8 +729,8 @@ fn build_broker_rolegroup_config_map( })?, ); - tracing::debug!("Applied server config: [{:#?}]", server_cfg); - tracing::debug!("Applied JVM config: [{:#?}]", jvm_sec_props); + tracing::debug!(?server_cfg, "Applied server config"); + tracing::debug!(?jvm_sec_props, "Applied JVM config"); extend_role_group_config_map( rolegroup, From c2f3be0ab6bfddd207f0da12a5fd51bbc144d25d Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Tue, 8 Oct 2024 10:11:12 +0200 Subject: [PATCH 22/49] fixed review suggestions --- rust/crd/src/authentication.rs | 2 +- rust/operator-binary/src/kafka_controller.rs | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/rust/crd/src/authentication.rs b/rust/crd/src/authentication.rs index d1c582d8..5f276b81 100644 --- a/rust/crd/src/authentication.rs +++ b/rust/crd/src/authentication.rs @@ -115,7 +115,7 @@ impl ResolvedAuthenticationClasses { for auth_class in &self.resolved_authentication_classes { match &auth_class.spec.provider { // explicitly list each branch so new elements do not get overlooked - AuthenticationClassProvider::Tls(_) => {} + AuthenticationClassProvider::Tls(_) | AuthenticationClassProvider::Kerberos(_) => {} AuthenticationClassProvider::Static(_) | AuthenticationClassProvider::Ldap(_) | AuthenticationClassProvider::Oidc(_) => { diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index cb732044..882bc72d 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -476,10 +476,6 @@ pub async fn reconcile_kafka(kafka: Arc, ctx: Arc) -> Result< tls_client_authentication_class = ?kafka_security.tls_client_authentication_class(), "The following security settings are used" ); - kafka_security.has_kerberos_enabled(), - kafka_security.kerberos_secret_class(), - kafka_security.tls_enabled(), - kafka_security.tls_client_authentication_class()); kafka_security .validate_authentication_methods() From a88146c0cccadb676f9d673d917ba382926eee13 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Tue, 8 Oct 2024 10:13:22 +0200 Subject: [PATCH 23/49] formatting: new lines between enum elements --- rust/crd/src/listener.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rust/crd/src/listener.rs b/rust/crd/src/listener.rs index cb8ff01b..20455ba1 100644 --- a/rust/crd/src/listener.rs +++ b/rust/crd/src/listener.rs @@ -20,9 +20,11 @@ pub enum KafkaListenerProtocol { /// Unencrypted and unauthenticated HTTP connections #[strum(serialize = "PLAINTEXT")] Plaintext, + /// Encrypted and server-authenticated HTTPS connections #[strum(serialize = "SSL")] Ssl, + /// Kerberos authentication #[strum(serialize = "SASL_SSL")] SaslSsl, From c32b8282396cfcbf8841937a044d5e5ea6fbdbc0 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Tue, 8 Oct 2024 10:21:38 +0200 Subject: [PATCH 24/49] review suggestions --- rust/crd/src/authentication.rs | 7 ++++--- rust/crd/src/listener.rs | 2 +- rust/crd/src/security.rs | 3 ++- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/rust/crd/src/authentication.rs b/rust/crd/src/authentication.rs index 5f276b81..d6db99f0 100644 --- a/rust/crd/src/authentication.rs +++ b/rust/crd/src/authentication.rs @@ -9,7 +9,7 @@ use stackable_operator::{ schemars::{self, JsonSchema}, }; -const SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS: [&str; 1] = ["TLS"]; +pub const SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS: [&str; 2] = ["TLS", "Kerberos"]; #[derive(Snafu, Debug)] pub enum Error { @@ -18,9 +18,10 @@ pub enum Error { source: stackable_operator::client::Error, authentication_class: ObjectRef, }, - // TODO: Adapt message if multiple authentication classes are supported - #[snafu(display("only one authentication class is currently supported. Possible Authentication class providers are {SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS:?}"))] + + #[snafu(display("only one authentication class at a time is currently supported. Possible Authentication class providers are {SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS:?}"))] MultipleAuthenticationClassesProvided, + #[snafu(display( "failed to use authentication provider [{provider}] for authentication class [{authentication_class}] - supported providers: {SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS:?}", ))] diff --git a/rust/crd/src/listener.rs b/rust/crd/src/listener.rs index 20455ba1..962602d0 100644 --- a/rust/crd/src/listener.rs +++ b/rust/crd/src/listener.rs @@ -24,7 +24,7 @@ pub enum KafkaListenerProtocol { /// Encrypted and server-authenticated HTTPS connections #[strum(serialize = "SSL")] Ssl, - + /// Kerberos authentication #[strum(serialize = "SASL_SSL")] SaslSsl, diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index aa9468bb..a3b2b6f3 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -26,6 +26,7 @@ use stackable_operator::{ utils::COMMON_BASH_TRAP_FUNCTIONS, }; +use crate::authentication::SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS; use crate::{ authentication::{self, ResolvedAuthenticationClasses}, listener::{self, KafkaListenerConfig}, @@ -44,7 +45,7 @@ pub enum Error { source: stackable_operator::builder::pod::volume::SecretOperatorVolumeSourceBuilderError, }, - #[snafu(display("only one authentication method is possible, TLS or Kerberos"))] + #[snafu(display("only one authentication class at a time is currently supported. Possible Authentication class providers are {SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS:?}"))] MultipleAuthenticationMethodsProvided, #[snafu(display("failed to add needed volume"))] From ad7aee62a5d741b6b9a24455b184922b6b9dfd12 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Tue, 8 Oct 2024 13:37:24 +0200 Subject: [PATCH 25/49] add use-client-tls dimension and cleanup test --- .../kuttl/kerberos/20-install-kafka.yaml.j2 | 24 ------------------- tests/test-definition.yaml | 1 + 2 files changed, 1 insertion(+), 24 deletions(-) diff --git a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 index 4fbf08cb..bed28a1a 100644 --- a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 +++ b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 @@ -12,30 +12,6 @@ commands: spec: clusterRef: name: test-zk -{% if test_scenario['values']['use-client-auth-tls'] == 'true' %} - --- - apiVersion: authentication.stackable.tech/v1alpha1 - kind: AuthenticationClass - metadata: - name: test-kafka-client-auth-tls - spec: - provider: - tls: - clientCertSecretClass: test-kafka-client-auth-tls - --- - apiVersion: secrets.stackable.tech/v1alpha1 - kind: SecretClass - metadata: - name: test-kafka-client-auth-tls - spec: - backend: - autoTls: - ca: - secret: - name: secret-provisioner-tls-kafka-client-auth-ca - namespace: default - autoGenerate: true -{% endif %} --- apiVersion: authentication.stackable.tech/v1alpha1 kind: AuthenticationClass diff --git a/tests/test-definition.yaml b/tests/test-definition.yaml index 2584bb4b..59ba09cc 100644 --- a/tests/test-definition.yaml +++ b/tests/test-definition.yaml @@ -96,6 +96,7 @@ tests: - krb5 - kerberos-realm - kerberos-backend + - use-client-tls - openshift suites: From ef84d73d4d37a0c7a80da2d6e2783638a796e97a Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Tue, 8 Oct 2024 13:58:31 +0200 Subject: [PATCH 26/49] add constants for kerberos paths --- rust/crd/src/lib.rs | 3 +++ rust/crd/src/security.rs | 16 ++++++++++++---- rust/operator-binary/src/kerberos.rs | 13 +++++++------ 3 files changed, 22 insertions(+), 10 deletions(-) diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index ff4e3a2c..bab8b11a 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -60,6 +60,9 @@ pub const STACKABLE_DATA_DIR: &str = "/stackable/data"; pub const STACKABLE_CONFIG_DIR: &str = "/stackable/config"; pub const STACKABLE_LOG_CONFIG_DIR: &str = "/stackable/log_config"; pub const STACKABLE_LOG_DIR: &str = "/stackable/log"; +// kerberos +pub const STACKABLE_KERBEROS_DIR: &str = "/stackable/kerberos"; +pub const STACKABLE_KERBEROS_KRB5_PATH: &str = "/stackable/kerberos/krb5.conf"; const DEFAULT_BROKER_GRACEFUL_SHUTDOWN_TIMEOUT: Duration = Duration::from_minutes_unchecked(30); diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index a3b2b6f3..e6a30211 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -26,7 +26,9 @@ use stackable_operator::{ utils::COMMON_BASH_TRAP_FUNCTIONS, }; -use crate::authentication::SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS; +use crate::{ + authentication::SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS, STACKABLE_KERBEROS_KRB5_PATH, +}; use crate::{ authentication::{self, ResolvedAuthenticationClasses}, listener::{self, KafkaListenerConfig}, @@ -265,7 +267,13 @@ impl KafkaTlsSecurity { args.push("-euo".to_string()); args.push("pipefail".to_string()); args.push("-c".to_string()); - args.push("export KERBEROS_REALM=$(grep -oP 'default_realm = \\K.*' /stackable/kerberos/krb5.conf);".to_string()); + args.push( + format!( + "export KERBEROS_REALM=$(grep -oP 'default_realm = \\K.*' {});", + STACKABLE_KERBEROS_KRB5_PATH + ) + .to_string(), + ); args.push("/stackable/kcat".to_string()); args.push("-b".to_string()); args.push(format!("{pod_fqdn}:{port}")); @@ -311,8 +319,8 @@ impl KafkaTlsSecurity { create_vector_shutdown_file_command = create_vector_shutdown_file_command(STACKABLE_LOG_DIR), set_realm_env = match kerberos_enabled { - true => "export KERBEROS_REALM=$(grep -oP 'default_realm = \\K.*' /stackable/kerberos/krb5.conf)", - false => "", + true => format!("export KERBEROS_REALM=$(grep -oP 'default_realm = \\K.*' {})", STACKABLE_KERBEROS_KRB5_PATH), + false => "".to_string(), }, listeners = kafka_listeners.listeners(), advertised_listeners = kafka_listeners.advertised_listeners(), diff --git a/rust/operator-binary/src/kerberos.rs b/rust/operator-binary/src/kerberos.rs index 30e2553b..677c254f 100644 --- a/rust/operator-binary/src/kerberos.rs +++ b/rust/operator-binary/src/kerberos.rs @@ -1,5 +1,6 @@ use snafu::{ResultExt, Snafu}; use stackable_kafka_crd::{security::KafkaTlsSecurity, KafkaCluster, KafkaRole}; +use stackable_kafka_crd::{STACKABLE_KERBEROS_DIR, STACKABLE_KERBEROS_KRB5_PATH}; use stackable_operator::{ builder::{ self, @@ -55,21 +56,21 @@ pub fn add_kerberos_pod_config( ) .context(AddVolumeSnafu)?; cb_kcat_prober - .add_volume_mount("kerberos", "/stackable/kerberos") + .add_volume_mount("kerberos", STACKABLE_KERBEROS_DIR) .context(AddVolumeMountSnafu)?; - cb_kcat_prober.add_env_var("KRB5_CONFIG", "/stackable/kerberos/krb5.conf"); + cb_kcat_prober.add_env_var("KRB5_CONFIG", STACKABLE_KERBEROS_KRB5_PATH); cb_kcat_prober.add_env_var( "KAFKA_OPTS", - "-Djava.security.krb5.conf=/stackable/kerberos/krb5.conf", + format!("-Djava.security.krb5.conf={}", STACKABLE_KERBEROS_KRB5_PATH), ); cb_kafka - .add_volume_mount("kerberos", "/stackable/kerberos") + .add_volume_mount("kerberos", STACKABLE_KERBEROS_DIR) .context(AddVolumeMountSnafu)?; - cb_kafka.add_env_var("KRB5_CONFIG", "/stackable/kerberos/krb5.conf"); + cb_kafka.add_env_var("KRB5_CONFIG", STACKABLE_KERBEROS_KRB5_PATH); cb_kafka.add_env_var( "KAFKA_OPTS", - "-Djava.security.krb5.conf=/stackable/kerberos/krb5.conf", + format!("-Djava.security.krb5.conf={}", STACKABLE_KERBEROS_KRB5_PATH), ); } From 2e20d5aa14790d410eb0a3fb04cd76ead1b9083c Mon Sep 17 00:00:00 2001 From: Sebastian Bernauer Date: Tue, 8 Oct 2024 14:21:59 +0200 Subject: [PATCH 27/49] test: Update kerberos tests to always use TLS --- .../templates/kuttl/kerberos/20-install-kafka.yaml.j2 | 7 ++----- tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 | 11 +++++------ .../{30-access-kafka.yaml.j2 => 30-access-kafka.yaml} | 0 tests/test-definition.yaml | 1 - 4 files changed, 7 insertions(+), 12 deletions(-) rename tests/templates/kuttl/kerberos/{30-access-kafka.yaml.j2 => 30-access-kafka.yaml} (100%) diff --git a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 index bed28a1a..0ab5f794 100644 --- a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 +++ b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 @@ -31,18 +31,15 @@ commands: productVersion: "{{ test_scenario['values']['kafka'] }}" pullPolicy: IfNotPresent clusterConfig: + zookeeperConfigMapName: test-kafka-znode authentication: - authenticationClass: kerberos-auth-$NAMESPACE tls: -{% if test_scenario['values']['use-client-tls'] == 'true' %} + # Kerberos requires the use of server and internal TLS! serverSecretClass: tls -{% else %} - serverSecretClass: null -{% endif %} {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - zookeeperConfigMapName: test-kafka-znode brokers: config: logging: diff --git a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 index 52c2542a..5c2b356e 100644 --- a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 +++ b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 @@ -21,8 +21,8 @@ spec: volumeMounts: - name: script mountPath: /tmp/script - - mountPath: /stackable/tls_cert_server_mount - name: tls-cert-server-mount + - mountPath: /stackable/tls-ca-cert-mount + name: tls-ca-cert-mount - name: kerberos mountPath: /stackable/kerberos volumes: @@ -44,14 +44,13 @@ spec: resources: requests: storage: "1" - - name: tls-cert-server-mount + - name: tls-ca-cert-mount ephemeral: volumeClaimTemplate: metadata: annotations: secrets.stackable.tech/class: tls - secrets.stackable.tech/scope: pod,node,service=kafka - creationTimestamp: null + secrets.stackable.tech/scope: pod spec: accessModes: - ReadWriteOnce @@ -83,7 +82,7 @@ data: metadata.broker.list=$BROKER\n\ auto.offset.reset=beginning\n\ security.protocol=SASL_SSL\n\ - ssl.ca.location=/stackable/tls_cert_server_mount/ca.crt\n\ + ssl.ca.location=/stackable/tls-ca-cert-mount/ca.crt\n\ sasl.kerberos.keytab=/stackable/kerberos/keytab\n\ sasl.kerberos.service.name=kafka\n\ sasl.kerberos.principal=developer/access-kafka.$NAMESPACE.svc.cluster.local@{{ test_scenario['values']['kerberos-realm'] }}\n\ diff --git a/tests/templates/kuttl/kerberos/30-access-kafka.yaml.j2 b/tests/templates/kuttl/kerberos/30-access-kafka.yaml similarity index 100% rename from tests/templates/kuttl/kerberos/30-access-kafka.yaml.j2 rename to tests/templates/kuttl/kerberos/30-access-kafka.yaml diff --git a/tests/test-definition.yaml b/tests/test-definition.yaml index 59ba09cc..2584bb4b 100644 --- a/tests/test-definition.yaml +++ b/tests/test-definition.yaml @@ -96,7 +96,6 @@ tests: - krb5 - kerberos-realm - kerberos-backend - - use-client-tls - openshift suites: From ed0a7df50f9eb13c84206b4890bc797aed18fe72 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Tue, 8 Oct 2024 15:36:10 +0200 Subject: [PATCH 28/49] added check that TLS is enabled for Kerberos --- docs/modules/kafka/pages/usage-guide/security.adoc | 5 +++++ rust/crd/src/authentication.rs | 2 +- rust/crd/src/security.rs | 12 +++++++++++- 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/docs/modules/kafka/pages/usage-guide/security.adoc b/docs/modules/kafka/pages/usage-guide/security.adoc index 48cee199..e3931562 100644 --- a/docs/modules/kafka/pages/usage-guide/security.adoc +++ b/docs/modules/kafka/pages/usage-guide/security.adoc @@ -139,6 +139,8 @@ spec: clusterConfig: authentication: - authenticationClass: kafka-client-kerberos # <1> + tls: + serverSecretClass: tls # <5> zookeeperConfigMapName: simple-kafka-znode brokers: roleGroups: @@ -149,6 +151,9 @@ spec: <2> The referenced `AuthenticationClass` that references a `SecretClass` to provide Kerberos keytabs. <3> The reference to a `SecretClass`. <4> The `SecretClass` that is referenced by the `AuthenticationClass` in order to provide keytabs. +<5> The SecretClass that will be used for encryption. + +NOTE: When Kerberos is enabled it is also required to enable TLS for maximum security. == [[authorization]]Authorization diff --git a/rust/crd/src/authentication.rs b/rust/crd/src/authentication.rs index d6db99f0..9f00d884 100644 --- a/rust/crd/src/authentication.rs +++ b/rust/crd/src/authentication.rs @@ -46,7 +46,7 @@ pub struct KafkaAuthentication { /// /// ## Kerberos provider /// - /// This affects client connections and also sets internal connections to use TLS for encryption. + /// This affects client connections and also requires TLS for encryption. /// This setting is used to reference an `AuthenticationClass` and in turn, a `SecretClass` that is /// used to create keytabs). pub authentication_class: String, diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index e6a30211..16a2f6e5 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -7,7 +7,7 @@ use std::collections::BTreeMap; use indoc::formatdoc; -use snafu::{ResultExt, Snafu}; +use snafu::{ensure, ResultExt, Snafu}; use stackable_operator::{ builder::{ self, @@ -57,6 +57,9 @@ pub enum Error { AddVolumeMount { source: builder::pod::container::Error, }, + + #[snafu(display("kerberos enablement requires TLS activation"))] + KerberosRequiresTls, } /// Helper struct combining TLS settings for server and internal with the resolved AuthenticationClasses @@ -218,6 +221,13 @@ impl KafkaTlsSecurity { if self.tls_client_authentication_class().is_some() && self.has_kerberos_enabled() { return Err(Error::MultipleAuthenticationMethodsProvided); } + + // When users enable Kerberos we require them to also enable TLS for maximum security and + // to limit the number of combinations we need to support. + if self.has_kerberos_enabled() || self.tls_client_authentication_class().is_some() { + ensure!(self.server_secret_class.is_some(), KerberosRequiresTlsSnafu); + } + Ok(()) } From 0ad749dd7ec98160d06ab7253ad42e17557c2ed1 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Tue, 8 Oct 2024 15:40:20 +0200 Subject: [PATCH 29/49] regenerate charts --- deploy/helm/kafka-operator/crds/crds.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/helm/kafka-operator/crds/crds.yaml b/deploy/helm/kafka-operator/crds/crds.yaml index a3e9ae4c..671865ac 100644 --- a/deploy/helm/kafka-operator/crds/crds.yaml +++ b/deploy/helm/kafka-operator/crds/crds.yaml @@ -568,7 +568,7 @@ spec: ## Kerberos provider - This affects client connections and also sets internal connections to use TLS for encryption. This setting is used to reference an `AuthenticationClass` and in turn, a `SecretClass` that is used to create keytabs). + This affects client connections and also requires TLS for encryption. This setting is used to reference an `AuthenticationClass` and in turn, a `SecretClass` that is used to create keytabs). type: string required: - authenticationClass From d0c333a11fb6a627169884ffb87ea06a0f9db5b6 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Tue, 8 Oct 2024 15:41:30 +0200 Subject: [PATCH 30/49] formatting --- rust/crd/src/security.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index 16a2f6e5..79c4d08f 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -227,7 +227,7 @@ impl KafkaTlsSecurity { if self.has_kerberos_enabled() || self.tls_client_authentication_class().is_some() { ensure!(self.server_secret_class.is_some(), KerberosRequiresTlsSnafu); } - + Ok(()) } From 8aeb270f41edd866295624b4cdbe387d53bb8020 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Tue, 8 Oct 2024 16:29:01 +0200 Subject: [PATCH 31/49] corrected validation check --- rust/crd/src/security.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index 79c4d08f..2d9d0bc1 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -224,7 +224,7 @@ impl KafkaTlsSecurity { // When users enable Kerberos we require them to also enable TLS for maximum security and // to limit the number of combinations we need to support. - if self.has_kerberos_enabled() || self.tls_client_authentication_class().is_some() { + if self.has_kerberos_enabled() { ensure!(self.server_secret_class.is_some(), KerberosRequiresTlsSnafu); } From 602b891d70aa72258f58cc6a00542cd3caf1caa5 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy <1712947+adwk67@users.noreply.github.com> Date: Wed, 9 Oct 2024 11:06:11 +0200 Subject: [PATCH 32/49] Update rust/operator-binary/src/kerberos.rs Co-authored-by: Sebastian Bernauer --- rust/operator-binary/src/kerberos.rs | 25 +++++++++---------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/rust/operator-binary/src/kerberos.rs b/rust/operator-binary/src/kerberos.rs index 677c254f..7389a201 100644 --- a/rust/operator-binary/src/kerberos.rs +++ b/rust/operator-binary/src/kerberos.rs @@ -55,23 +55,16 @@ pub fn add_kerberos_pod_config( .build(), ) .context(AddVolumeSnafu)?; - cb_kcat_prober - .add_volume_mount("kerberos", STACKABLE_KERBEROS_DIR) - .context(AddVolumeMountSnafu)?; - cb_kcat_prober.add_env_var("KRB5_CONFIG", STACKABLE_KERBEROS_KRB5_PATH); - cb_kcat_prober.add_env_var( - "KAFKA_OPTS", - format!("-Djava.security.krb5.conf={}", STACKABLE_KERBEROS_KRB5_PATH), - ); - cb_kafka - .add_volume_mount("kerberos", STACKABLE_KERBEROS_DIR) - .context(AddVolumeMountSnafu)?; - cb_kafka.add_env_var("KRB5_CONFIG", STACKABLE_KERBEROS_KRB5_PATH); - cb_kafka.add_env_var( - "KAFKA_OPTS", - format!("-Djava.security.krb5.conf={}", STACKABLE_KERBEROS_KRB5_PATH), - ); + for cb in [cb_kafka, cb_kcat_prober] { + cb.add_volume_mount("kerberos", STACKABLE_KERBEROS_DIR) + .context(AddVolumeMountSnafu)?; + cb.add_env_var("KRB5_CONFIG", STACKABLE_KERBEROS_KRB5_PATH); + cb.add_env_var( + "KAFKA_OPTS", + format!("-Djava.security.krb5.conf={STACKABLE_KERBEROS_KRB5_PATH}",), + ); + } } Ok(()) From 0da29f4a45fc20b486c47b586a6b8cdfe1dae573 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Wed, 9 Oct 2024 16:35:28 +0200 Subject: [PATCH 33/49] use listener volume scope for kerberos volume and replace FQDN with listener in advertised listeners --- rust/crd/src/listener.rs | 9 ++++-- rust/crd/src/security.rs | 5 ++- rust/operator-binary/src/kafka_controller.rs | 2 -- rust/operator-binary/src/kerberos.rs | 31 +++++++++---------- .../kuttl/kerberos/30-access-kafka.txt.j2 | 7 ++++- 5 files changed, 29 insertions(+), 25 deletions(-) diff --git a/rust/crd/src/listener.rs b/rust/crd/src/listener.rs index 962602d0..47d9ba96 100644 --- a/rust/crd/src/listener.rs +++ b/rust/crd/src/listener.rs @@ -126,8 +126,11 @@ pub fn get_kafka_listener_config( }); advertised_listeners.push(KafkaListener { name: KafkaListenerName::Client, - host: pod_fqdn.clone(), - port: KafkaTlsSecurity::SECURE_CLIENT_PORT.to_string(), + host: node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), + port: node_port_cmd( + STACKABLE_LISTENER_BROKER_DIR, + kafka_security.client_port_name(), + ), }); listener_security_protocol_map .insert(KafkaListenerName::Client, KafkaListenerProtocol::SaslSsl); @@ -221,7 +224,7 @@ pub fn get_kafka_listener_config( }) } -fn node_address_cmd(directory: &str) -> String { +pub fn node_address_cmd(directory: &str) -> String { format!("$(cat {directory}/default-address/address)") } diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index 2d9d0bc1..f3880e67 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -313,7 +313,6 @@ impl KafkaTlsSecurity { kafka_listeners: &KafkaListenerConfig, opa_connect_string: Option<&str>, kerberos_enabled: bool, - pod_fqdn: &String, ) -> Vec { vec![formatdoc! {" {COMMON_BASH_TRAP_FUNCTIONS} @@ -341,8 +340,8 @@ impl KafkaTlsSecurity { }, jaas_config = match kerberos_enabled { true => { - let service_name = KafkaRole::Broker.kerberos_service_name(); - format!(" --override \"listener.name.client.gssapi.sasl.jaas.config=com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true storeKey=true keyTab=\\\"/stackable/kerberos/keytab\\\" principal=\\\"{service_name}/{pod_fqdn}@$KERBEROS_REALM\\\";\"")}, + // N.B. See https://docs.oracle.com/en/java/javase/22/docs/api/jdk.security.auth/com/sun/security/auth/module/Krb5LoginModule.html for reasoning behind the use of the asterisk/isInitiator settings below. + " --override \"listener.name.client.gssapi.sasl.jaas.config=com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true storeKey=true isInitiator=false keyTab=\\\"/stackable/kerberos/keytab\\\" principal=\\\"*\\\";\"".to_string()}, false => "".to_string(), }, }] diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index 882bc72d..9740bba1 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -877,7 +877,6 @@ fn build_broker_rolegroup_statefulset( if kafka_security.has_kerberos_enabled() { add_kerberos_pod_config( - kafka, kafka_security, kafka_role, &mut cb_kcat_prober, @@ -962,7 +961,6 @@ fn build_broker_rolegroup_statefulset( &kafka_listeners, opa_connect_string, kafka_security.has_kerberos_enabled(), - &pod_fqdn, ) .join("\n")]) .add_env_var("EXTRA_ARGS", jvm_args) diff --git a/rust/operator-binary/src/kerberos.rs b/rust/operator-binary/src/kerberos.rs index 7389a201..7558bd91 100644 --- a/rust/operator-binary/src/kerberos.rs +++ b/rust/operator-binary/src/kerberos.rs @@ -1,19 +1,19 @@ use snafu::{ResultExt, Snafu}; -use stackable_kafka_crd::{security::KafkaTlsSecurity, KafkaCluster, KafkaRole}; -use stackable_kafka_crd::{STACKABLE_KERBEROS_DIR, STACKABLE_KERBEROS_KRB5_PATH}; -use stackable_operator::{ - builder::{ - self, - pod::{ - container::ContainerBuilder, - volume::{ - SecretOperatorVolumeSourceBuilder, SecretOperatorVolumeSourceBuilderError, - VolumeBuilder, - }, - PodBuilder, +use stackable_kafka_crd::{security::KafkaTlsSecurity, KafkaRole}; +use stackable_kafka_crd::{ + LISTENER_BOOTSTRAP_VOLUME_NAME, LISTENER_BROKER_VOLUME_NAME, STACKABLE_KERBEROS_DIR, + STACKABLE_KERBEROS_KRB5_PATH, +}; +use stackable_operator::builder::{ + self, + pod::{ + container::ContainerBuilder, + volume::{ + SecretOperatorVolumeSourceBuilder, SecretOperatorVolumeSourceBuilderError, + VolumeBuilder, }, + PodBuilder, }, - kube::ResourceExt, }; #[derive(Snafu, Debug)] @@ -33,7 +33,6 @@ pub enum Error { } pub fn add_kerberos_pod_config( - kafka: &KafkaCluster, kafka_security: &KafkaTlsSecurity, role: &KafkaRole, cb_kcat_prober: &mut ContainerBuilder, @@ -44,8 +43,8 @@ pub fn add_kerberos_pod_config( // Mount keytab let kerberos_secret_operator_volume = SecretOperatorVolumeSourceBuilder::new(kerberos_secret_class) - .with_service_scope(kafka.name_any()) - .with_pod_scope() + .with_listener_volume_scope(LISTENER_BROKER_VOLUME_NAME) + .with_listener_volume_scope(LISTENER_BOOTSTRAP_VOLUME_NAME) .with_kerberos_service_name(role.kerberos_service_name()) .build() .context(KerberosSecretVolumeSnafu)?; diff --git a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 index 5c2b356e..b5512542 100644 --- a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 +++ b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 @@ -18,6 +18,12 @@ spec: value: /stackable/kerberos/krb5.conf - name: KAFKA_OPTS value: -Djava.security.krb5.conf=/stackable/kerberos/krb5.conf + # Define the environment variable for the kafka bootstrap listener + - name: BROKER + valueFrom: + configMapKeyRef: + name: test-kafka + key: KAFKA volumeMounts: - name: script mountPath: /tmp/script @@ -74,7 +80,6 @@ data: set -euxo pipefail export KCAT_CONFIG=/stackable/kcat.conf - BROKER=test-kafka-broker-default-0.test-kafka-broker-default.$NAMESPACE.svc.cluster.local:9093,test-kafka-broker-default-1.test-kafka-broker-default.$NAMESPACE.svc.cluster.local:9093,test-kafka-broker-default-2.test-kafka-broker-default.$NAMESPACE.svc.cluster.local:9093 TOPIC=test-topic CONSUMER_GROUP=test-consumer-group From 7be62574008fb14f8dc8ed4dfddd725a1733cd37 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Wed, 9 Oct 2024 16:51:04 +0200 Subject: [PATCH 34/49] added custom iamge usage from previous merge from main --- tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 index 0ab5f794..fc3a958a 100644 --- a/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 +++ b/tests/templates/kuttl/kerberos/20-install-kafka.yaml.j2 @@ -28,7 +28,12 @@ commands: name: test-kafka spec: image: +{% if test_scenario['values']['kafka'].find(",") > 0 %} + custom: "{{ test_scenario['values']['kafka'].split(',')[1] }}" + productVersion: "{{ test_scenario['values']['kafka'].split(',')[0] }}" +{% else %} productVersion: "{{ test_scenario['values']['kafka'] }}" +{% endif %} pullPolicy: IfNotPresent clusterConfig: zookeeperConfigMapName: test-kafka-znode From 3aa923ab0b1abdd0475f25fa78629a0709ea2cbf Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy <1712947+adwk67@users.noreply.github.com> Date: Wed, 16 Oct 2024 16:39:16 +0200 Subject: [PATCH 35/49] Update rust/crd/src/authentication.rs Co-authored-by: Siegfried Weber --- rust/crd/src/authentication.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust/crd/src/authentication.rs b/rust/crd/src/authentication.rs index 9f00d884..47462883 100644 --- a/rust/crd/src/authentication.rs +++ b/rust/crd/src/authentication.rs @@ -48,7 +48,7 @@ pub struct KafkaAuthentication { /// /// This affects client connections and also requires TLS for encryption. /// This setting is used to reference an `AuthenticationClass` and in turn, a `SecretClass` that is - /// used to create keytabs). + /// used to create keytabs. pub authentication_class: String, } From 203b52d4a6dfa16fb71afbcb3e2acc4c1ac07648 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Wed, 16 Oct 2024 16:49:21 +0200 Subject: [PATCH 36/49] remove unecessary test --- rust/crd/src/lib.rs | 35 ----------------------------------- 1 file changed, 35 deletions(-) diff --git a/rust/crd/src/lib.rs b/rust/crd/src/lib.rs index bab8b11a..a778e58d 100644 --- a/rust/crd/src/lib.rs +++ b/rust/crd/src/lib.rs @@ -692,39 +692,4 @@ mod tests { tls::internal_tls_default() ); } - - #[test] - fn test_get_auth_tls() { - let kafka_cluster = r#" - apiVersion: kafka.stackable.tech/v1alpha1 - kind: KafkaCluster - metadata: - name: simple-kafka - namespace: default - spec: - image: - productVersion: 3.7.1 - clusterConfig: - authentication: - - authenticationClass: kafka-client-tls1 - - authenticationClass: kafka-client-tls2 - tls: - internalSecretClass: internalTls - serverSecretClass: tls - zookeeperConfigMapName: xyz - "#; - let kafka: KafkaCluster = serde_yaml::from_str(kafka_cluster).expect("illegal test input"); - - assert_eq!( - vec![ - KafkaAuthentication { - authentication_class: "kafka-client-tls1".to_string() - }, - KafkaAuthentication { - authentication_class: "kafka-client-tls2".to_string() - }, - ], - kafka.spec.cluster_config.authentication - ); - } } From 5b6b7de91b3b6be10c7cbec71b4fcbaeec5da37d Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Wed, 16 Oct 2024 16:53:44 +0200 Subject: [PATCH 37/49] removed unused Error --- rust/operator-binary/src/kafka_controller.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index 9740bba1..86e9534a 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -336,9 +336,6 @@ pub enum Error { source: stackable_kafka_crd::security::Error, }, - #[snafu(display("failed to build vector container"))] - BuildVectorContainer { source: LoggingError }, - #[snafu(display("failed to add needed volume"))] AddVolume { source: builder::pod::Error }, @@ -413,7 +410,6 @@ impl ReconcilerError for Error { Error::ResolveNamespace { .. } => None, Error::AddKerberosConfig { .. } => None, Error::FailedToValidateAuthenticationMethod { .. } => None, - Error::BuildVectorContainer { .. } => None, } } } From ed300692cbaa3eb5b510cb8ecd56a1151600287c Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Wed, 16 Oct 2024 16:55:15 +0200 Subject: [PATCH 38/49] regenerate charts --- deploy/helm/kafka-operator/crds/crds.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/helm/kafka-operator/crds/crds.yaml b/deploy/helm/kafka-operator/crds/crds.yaml index 671865ac..98b6769d 100644 --- a/deploy/helm/kafka-operator/crds/crds.yaml +++ b/deploy/helm/kafka-operator/crds/crds.yaml @@ -568,7 +568,7 @@ spec: ## Kerberos provider - This affects client connections and also requires TLS for encryption. This setting is used to reference an `AuthenticationClass` and in turn, a `SecretClass` that is used to create keytabs). + This affects client connections and also requires TLS for encryption. This setting is used to reference an `AuthenticationClass` and in turn, a `SecretClass` that is used to create keytabs. type: string required: - authenticationClass From 2d4feacebe2ab9a0393cf714b0a5ffb1d96ad0dc Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Wed, 16 Oct 2024 17:29:06 +0200 Subject: [PATCH 39/49] combine cases where internal tls is required --- rust/crd/src/listener.rs | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/rust/crd/src/listener.rs b/rust/crd/src/listener.rs index 47d9ba96..34e96f21 100644 --- a/rust/crd/src/listener.rs +++ b/rust/crd/src/listener.rs @@ -171,8 +171,9 @@ pub fn get_kafka_listener_config( } // INTERNAL - if kafka_security.has_kerberos_enabled() { - // 5) Kerberos and TLS authentication classes are mutually exclusive + if kafka_security.has_kerberos_enabled() || kafka_security.tls_internal_secret_class().is_some() + { + // 5) & 6) Kerberos and TLS authentication classes are mutually exclusive but both require internal tls to be used listeners.push(KafkaListener { name: KafkaListenerName::Internal, host: LISTENER_LOCAL_ADDRESS.to_string(), @@ -185,20 +186,6 @@ pub fn get_kafka_listener_config( }); listener_security_protocol_map .insert(KafkaListenerName::Internal, KafkaListenerProtocol::Ssl); - } else if kafka_security.tls_internal_secret_class().is_some() { - // 6) If internal tls is required we expose INTERNAL as SSL - listeners.push(KafkaListener { - name: KafkaListenerName::Internal, - host: LISTENER_LOCAL_ADDRESS.to_string(), - port: kafka_security.internal_port().to_string(), - }); - advertised_listeners.push(KafkaListener { - name: KafkaListenerName::Internal, - host: pod_fqdn.to_string(), - port: kafka_security.internal_port().to_string(), - }); - listener_security_protocol_map - .insert(KafkaListenerName::Internal, KafkaListenerProtocol::Ssl); } else { // 7) If no internal tls is required we expose INTERNAL as PLAINTEXT listeners.push(KafkaListener { From cb8301d400c050abf0f4169b5e1f79508307d685 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Wed, 23 Oct 2024 17:52:55 +0200 Subject: [PATCH 40/49] working test with broker listeners instead of listener bootstrap --- rust/crd/src/security.rs | 7 +++++-- tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 | 7 +------ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index f3880e67..bf809259 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -27,7 +27,8 @@ use stackable_operator::{ }; use crate::{ - authentication::SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS, STACKABLE_KERBEROS_KRB5_PATH, + authentication::SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS, listener::node_address_cmd, + STACKABLE_KERBEROS_KRB5_PATH, STACKABLE_LISTENER_BROKER_DIR, }; use crate::{ authentication::{self, ResolvedAuthenticationClasses}, @@ -340,8 +341,10 @@ impl KafkaTlsSecurity { }, jaas_config = match kerberos_enabled { true => { + let service_name = KafkaRole::Broker.kerberos_service_name(); + let broker_address = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR); // N.B. See https://docs.oracle.com/en/java/javase/22/docs/api/jdk.security.auth/com/sun/security/auth/module/Krb5LoginModule.html for reasoning behind the use of the asterisk/isInitiator settings below. - " --override \"listener.name.client.gssapi.sasl.jaas.config=com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true storeKey=true isInitiator=false keyTab=\\\"/stackable/kerberos/keytab\\\" principal=\\\"*\\\";\"".to_string()}, + format!(" --override \"listener.name.client.gssapi.sasl.jaas.config=com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true storeKey=true isInitiator=false keyTab=\\\"/stackable/kerberos/keytab\\\" principal=\\\"{service_name}/{broker_address}@$KERBEROS_REALM\\\";\"").to_string()}, false => "".to_string(), }, }] diff --git a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 index b5512542..b99bb8e4 100644 --- a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 +++ b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 @@ -18,12 +18,6 @@ spec: value: /stackable/kerberos/krb5.conf - name: KAFKA_OPTS value: -Djava.security.krb5.conf=/stackable/kerberos/krb5.conf - # Define the environment variable for the kafka bootstrap listener - - name: BROKER - valueFrom: - configMapKeyRef: - name: test-kafka - key: KAFKA volumeMounts: - name: script mountPath: /tmp/script @@ -82,6 +76,7 @@ data: export KCAT_CONFIG=/stackable/kcat.conf TOPIC=test-topic CONSUMER_GROUP=test-consumer-group + BROKER=test-kafka-broker-default-0-listener-broker.$NAMESPACE.svc.cluster.local:9093,test-kafka-broker-default-1-listener-broker.$NAMESPACE.svc.cluster.local:9093,test-kafka-broker-default-2-listener-broker.$NAMESPACE.svc.cluster.local:9093 echo -e -n "\ metadata.broker.list=$BROKER\n\ From c852a1bd93947eb4bd63ce64bff7b0e02bb757fa Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Tue, 5 Nov 2024 22:18:55 +0100 Subject: [PATCH 41/49] add listener for bootstrapper --- rust/crd/src/listener.rs | 127 ++++++++++++++++-- rust/crd/src/security.rs | 59 +++++++- rust/operator-binary/src/kafka_controller.rs | 11 ++ .../kuttl/kerberos/30-access-kafka.txt.j2 | 2 +- 4 files changed, 183 insertions(+), 16 deletions(-) diff --git a/rust/crd/src/listener.rs b/rust/crd/src/listener.rs index d95ed34c..e70079f6 100644 --- a/rust/crd/src/listener.rs +++ b/rust/crd/src/listener.rs @@ -40,6 +40,8 @@ pub enum KafkaListenerName { ClientAuth, #[strum(serialize = "INTERNAL")] Internal, + #[strum(serialize = "BOOTSTRAP")] + Bootstrap, } #[derive(Debug)] @@ -209,6 +211,75 @@ pub fn get_kafka_listener_config( ); } + // BOOTSTRAP + if kafka_security.tls_client_authentication_class().is_some() { + listeners.push(KafkaListener { + name: KafkaListenerName::Bootstrap, + host: LISTENER_LOCAL_ADDRESS.to_string(), + port: kafka_security.bootstrap_port().to_string(), + }); + advertised_listeners.push(KafkaListener { + name: KafkaListenerName::Bootstrap, + host: node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), + port: node_port_cmd( + STACKABLE_LISTENER_BROKER_DIR, + kafka_security.client_port_name(), + ), + }); + listener_security_protocol_map + .insert(KafkaListenerName::Bootstrap, KafkaListenerProtocol::Ssl); + } else if kafka_security.has_kerberos_enabled() { + listeners.push(KafkaListener { + name: KafkaListenerName::Bootstrap, + host: LISTENER_LOCAL_ADDRESS.to_string(), + port: kafka_security.bootstrap_port().to_string(), + }); + advertised_listeners.push(KafkaListener { + name: KafkaListenerName::Bootstrap, + host: node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), + port: node_port_cmd( + STACKABLE_LISTENER_BROKER_DIR, + kafka_security.client_port_name(), + ), + }); + listener_security_protocol_map + .insert(KafkaListenerName::Bootstrap, KafkaListenerProtocol::SaslSsl); + } else if kafka_security.tls_server_secret_class().is_some() { + listeners.push(KafkaListener { + name: KafkaListenerName::Bootstrap, + host: LISTENER_LOCAL_ADDRESS.to_string(), + port: kafka_security.bootstrap_port().to_string(), + }); + advertised_listeners.push(KafkaListener { + name: KafkaListenerName::Bootstrap, + host: node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), + port: node_port_cmd( + STACKABLE_LISTENER_BROKER_DIR, + kafka_security.client_port_name(), + ), + }); + listener_security_protocol_map + .insert(KafkaListenerName::Bootstrap, KafkaListenerProtocol::Ssl); + } else { + listeners.push(KafkaListener { + name: KafkaListenerName::Bootstrap, + host: LISTENER_LOCAL_ADDRESS.to_string(), + port: KafkaTlsSecurity::BOOTSTRAP_PORT.to_string(), + }); + advertised_listeners.push(KafkaListener { + name: KafkaListenerName::Bootstrap, + host: node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), + port: node_port_cmd( + STACKABLE_LISTENER_BROKER_DIR, + kafka_security.client_port_name(), + ), + }); + listener_security_protocol_map.insert( + KafkaListenerName::Bootstrap, + KafkaListenerProtocol::Plaintext, + ); + } + Ok(KafkaListenerConfig { listeners, advertised_listeners, @@ -301,20 +372,22 @@ mod tests { assert_eq!( config.listeners(), format!( - "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port}", + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{internal_host}:{bootstrap_port}", name = KafkaListenerName::ClientAuth, host = LISTENER_LOCAL_ADDRESS, port = kafka_security.client_port(), internal_name = KafkaListenerName::Internal, internal_host = LISTENER_LOCAL_ADDRESS, internal_port = kafka_security.internal_port(), + bootstrap_name = KafkaListenerName::Bootstrap, + bootstrap_port = kafka_security.bootstrap_port(), ) ); assert_eq!( config.advertised_listeners(), format!( - "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port}", + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{bootstrap_host}:{bootstrap_port}", name = KafkaListenerName::ClientAuth, host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), port = node_port_cmd( @@ -324,17 +397,25 @@ mod tests { internal_name = KafkaListenerName::Internal, internal_host = pod_fqdn(&kafka, object_name, &cluster_info).unwrap(), internal_port = kafka_security.internal_port(), + bootstrap_name = KafkaListenerName::Bootstrap, + bootstrap_host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), + bootstrap_port = node_port_cmd( + STACKABLE_LISTENER_BROKER_DIR, + kafka_security.client_port_name() + ), ) ); assert_eq!( config.listener_security_protocol_map(), format!( - "{name}:{protocol},{internal_name}:{internal_protocol}", + "{name}:{protocol},{internal_name}:{internal_protocol},{bootstrap_name}:{bootstrap_protocol}", name = KafkaListenerName::ClientAuth, protocol = KafkaListenerProtocol::Ssl, internal_name = KafkaListenerName::Internal, - internal_protocol = KafkaListenerProtocol::Ssl + internal_protocol = KafkaListenerProtocol::Ssl, + bootstrap_name = KafkaListenerName::Bootstrap, + bootstrap_protocol = KafkaListenerProtocol::Ssl ) ); @@ -349,20 +430,23 @@ mod tests { assert_eq!( config.listeners(), format!( - "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port}", + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{bootstrap_host}:{bootstrap_port}", name = KafkaListenerName::Client, host = LISTENER_LOCAL_ADDRESS, port = kafka_security.client_port(), internal_name = KafkaListenerName::Internal, internal_host = LISTENER_LOCAL_ADDRESS, internal_port = kafka_security.internal_port(), + bootstrap_name = KafkaListenerName::Bootstrap, + bootstrap_host = LISTENER_LOCAL_ADDRESS, + bootstrap_port = kafka_security.bootstrap_port(), ) ); assert_eq!( config.advertised_listeners(), format!( - "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port}", + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{bootstrap_host}:{bootstrap_port}", name = KafkaListenerName::Client, host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), port = node_port_cmd( @@ -372,17 +456,25 @@ mod tests { internal_name = KafkaListenerName::Internal, internal_host = pod_fqdn(&kafka, object_name, &cluster_info).unwrap(), internal_port = kafka_security.internal_port(), + bootstrap_name = KafkaListenerName::Bootstrap, + bootstrap_host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), + bootstrap_port = node_port_cmd( + STACKABLE_LISTENER_BROKER_DIR, + kafka_security.client_port_name() + ), ) ); assert_eq!( config.listener_security_protocol_map(), format!( - "{name}:{protocol},{internal_name}:{internal_protocol}", + "{name}:{protocol},{internal_name}:{internal_protocol},{bootstrap_name}:{bootstrap_protocol}", name = KafkaListenerName::Client, protocol = KafkaListenerProtocol::Ssl, internal_name = KafkaListenerName::Internal, - internal_protocol = KafkaListenerProtocol::Ssl + internal_protocol = KafkaListenerProtocol::Ssl, + bootstrap_name = KafkaListenerName::Bootstrap, + bootstrap_protocol = KafkaListenerProtocol::Ssl ) ); @@ -398,20 +490,23 @@ mod tests { assert_eq!( config.listeners(), format!( - "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port}", + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{bootstrap_host}:{bootstrap_port}", name = KafkaListenerName::Client, host = LISTENER_LOCAL_ADDRESS, port = kafka_security.client_port(), internal_name = KafkaListenerName::Internal, internal_host = LISTENER_LOCAL_ADDRESS, internal_port = kafka_security.internal_port(), + bootstrap_name = KafkaListenerName::Bootstrap, + bootstrap_host = LISTENER_LOCAL_ADDRESS, + bootstrap_port = kafka_security.bootstrap_port(), ) ); assert_eq!( config.advertised_listeners(), format!( - "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port}", + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{bootstrap_host}:{bootstrap_port}", name = KafkaListenerName::Client, host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), port = node_port_cmd( @@ -421,17 +516,25 @@ mod tests { internal_name = KafkaListenerName::Internal, internal_host = pod_fqdn(&kafka, object_name, &cluster_info).unwrap(), internal_port = kafka_security.internal_port(), + bootstrap_name = KafkaListenerName::Bootstrap, + bootstrap_host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), + bootstrap_port = node_port_cmd( + STACKABLE_LISTENER_BROKER_DIR, + kafka_security.client_port_name() + ), ) ); assert_eq!( config.listener_security_protocol_map(), format!( - "{name}:{protocol},{internal_name}:{internal_protocol}", + "{name}:{protocol},{internal_name}:{internal_protocol},{bootstrap_name}:{bootstrap_protocol}", name = KafkaListenerName::Client, protocol = KafkaListenerProtocol::Plaintext, internal_name = KafkaListenerName::Internal, - internal_protocol = KafkaListenerProtocol::Plaintext + internal_protocol = KafkaListenerProtocol::Plaintext, + bootstrap_name = KafkaListenerName::Bootstrap, + bootstrap_protocol = KafkaListenerProtocol::Plaintext ) ); } diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index bf809259..9a11ca8a 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -28,7 +28,7 @@ use stackable_operator::{ use crate::{ authentication::SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS, listener::node_address_cmd, - STACKABLE_KERBEROS_KRB5_PATH, STACKABLE_LISTENER_BROKER_DIR, + STACKABLE_KERBEROS_KRB5_PATH, STACKABLE_LISTENER_BOOTSTRAP_DIR, STACKABLE_LISTENER_BROKER_DIR, }; use crate::{ authentication::{self, ResolvedAuthenticationClasses}, @@ -76,6 +76,9 @@ impl KafkaTlsSecurity { pub const CLIENT_PORT: u16 = 9092; pub const SECURE_CLIENT_PORT_NAME: &'static str = "kafka-tls"; pub const SECURE_CLIENT_PORT: u16 = 9093; + pub const BOOTSTRAP_PORT_NAME: &'static str = "bootstrap"; + pub const BOOTSTRAP_PORT: u16 = 9094; + pub const SECURE_BOOTSTRAP_PORT: u16 = 9095; pub const INTERNAL_PORT: u16 = 19092; pub const SECURE_INTERNAL_PORT: u16 = 19093; // - TLS global @@ -89,6 +92,18 @@ impl KafkaTlsSecurity { const CLIENT_SSL_TRUSTSTORE_PASSWORD: &'static str = "listener.name.client.ssl.truststore.password"; const CLIENT_SSL_TRUSTSTORE_TYPE: &'static str = "listener.name.client.ssl.truststore.type"; + // - Bootstrapper + const BOOTSTRAP_SSL_KEYSTORE_LOCATION: &'static str = + "listener.name.bootstrap.ssl.keystore.location"; + const BOOTSTRAP_SSL_KEYSTORE_PASSWORD: &'static str = + "listener.name.bootstrap.ssl.keystore.password"; + const BOOTSTRAP_SSL_KEYSTORE_TYPE: &'static str = "listener.name.bootstrap.ssl.keystore.type"; + const BOOTSTRAP_SSL_TRUSTSTORE_LOCATION: &'static str = + "listener.name.bootstrap.ssl.truststore.location"; + const BOOTSTRAP_SSL_TRUSTSTORE_PASSWORD: &'static str = + "listener.name.bootstrap.ssl.truststore.password"; + const BOOTSTRAP_SSL_TRUSTSTORE_TYPE: &'static str = + "listener.name.bootstrap.ssl.truststore.type"; // - TLS client authentication const CLIENT_AUTH_SSL_KEYSTORE_LOCATION: &'static str = "listener.name.client_auth.ssl.keystore.location"; @@ -241,6 +256,18 @@ impl KafkaTlsSecurity { } } + pub fn bootstrap_port(&self) -> u16 { + if self.tls_enabled() { + Self::SECURE_BOOTSTRAP_PORT + } else { + Self::BOOTSTRAP_PORT + } + } + + pub fn bootstrap_port_name(&self) -> &str { + Self::BOOTSTRAP_PORT_NAME + } + /// Return the Kafka (secure) client port name depending on tls or authentication settings. pub fn client_port_name(&self) -> &str { if self.tls_enabled() { @@ -343,8 +370,9 @@ impl KafkaTlsSecurity { true => { let service_name = KafkaRole::Broker.kerberos_service_name(); let broker_address = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR); - // N.B. See https://docs.oracle.com/en/java/javase/22/docs/api/jdk.security.auth/com/sun/security/auth/module/Krb5LoginModule.html for reasoning behind the use of the asterisk/isInitiator settings below. - format!(" --override \"listener.name.client.gssapi.sasl.jaas.config=com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true storeKey=true isInitiator=false keyTab=\\\"/stackable/kerberos/keytab\\\" principal=\\\"{service_name}/{broker_address}@$KERBEROS_REALM\\\";\"").to_string()}, + let bootstrap_address = node_address_cmd(STACKABLE_LISTENER_BOOTSTRAP_DIR); + // TODO replace client and bootstrap below with constants + format!(" --override \"listener.name.client.gssapi.sasl.jaas.config=com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true storeKey=true isInitiator=false keyTab=\\\"/stackable/kerberos/keytab\\\" principal=\\\"{service_name}/{broker_address}@$KERBEROS_REALM\\\";\" --override \"listener.name.bootstrap.gssapi.sasl.jaas.config=com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true storeKey=true isInitiator=false keyTab=\\\"/stackable/kerberos/keytab\\\" principal=\\\"{service_name}/{bootstrap_address}@$KERBEROS_REALM\\\";\"").to_string()}, false => "".to_string(), }, }] @@ -469,6 +497,31 @@ impl KafkaTlsSecurity { Self::CLIENT_SSL_TRUSTSTORE_TYPE.to_string(), "PKCS12".to_string(), ); + // Bootstrap + config.insert( + Self::BOOTSTRAP_SSL_KEYSTORE_LOCATION.to_string(), + format!("{}/keystore.p12", Self::STACKABLE_TLS_KAFKA_SERVER_DIR), + ); + config.insert( + Self::BOOTSTRAP_SSL_KEYSTORE_PASSWORD.to_string(), + Self::SSL_STORE_PASSWORD.to_string(), + ); + config.insert( + Self::BOOTSTRAP_SSL_KEYSTORE_TYPE.to_string(), + "PKCS12".to_string(), + ); + config.insert( + Self::BOOTSTRAP_SSL_TRUSTSTORE_LOCATION.to_string(), + format!("{}/truststore.p12", Self::STACKABLE_TLS_KAFKA_SERVER_DIR), + ); + config.insert( + Self::BOOTSTRAP_SSL_TRUSTSTORE_PASSWORD.to_string(), + Self::SSL_STORE_PASSWORD.to_string(), + ); + config.insert( + Self::BOOTSTRAP_SSL_TRUSTSTORE_TYPE.to_string(), + "PKCS12".to_string(), + ); } // Internal TLS diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index 71dd64b6..cac25375 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -1199,6 +1199,11 @@ fn listener_ports(kafka_security: &KafkaTlsSecurity) -> Vec { port: kafka_security.client_port().into(), protocol: Some("TCP".to_string()), }, + ListenerPort { + name: kafka_security.bootstrap_port_name().to_string(), + port: kafka_security.bootstrap_port().into(), + protocol: Some("TCP".to_string()), + }, ] } @@ -1217,5 +1222,11 @@ fn container_ports(kafka_security: &KafkaTlsSecurity) -> Vec { protocol: Some("TCP".to_string()), ..ContainerPort::default() }, + ContainerPort { + name: Some(kafka_security.bootstrap_port_name().to_string()), + container_port: kafka_security.bootstrap_port().into(), + protocol: Some("TCP".to_string()), + ..ContainerPort::default() + }, ] } diff --git a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 index b99bb8e4..499fb46a 100644 --- a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 +++ b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 @@ -76,7 +76,7 @@ data: export KCAT_CONFIG=/stackable/kcat.conf TOPIC=test-topic CONSUMER_GROUP=test-consumer-group - BROKER=test-kafka-broker-default-0-listener-broker.$NAMESPACE.svc.cluster.local:9093,test-kafka-broker-default-1-listener-broker.$NAMESPACE.svc.cluster.local:9093,test-kafka-broker-default-2-listener-broker.$NAMESPACE.svc.cluster.local:9093 + BROKER=test-kafka-broker-default-bootstrap.$NAMESPACE.svc.cluster.local:9095 echo -e -n "\ metadata.broker.list=$BROKER\n\ From 57b7ffc40e6d5cced4a13985e3e35d8876fd11ba Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Tue, 5 Nov 2024 22:29:42 +0100 Subject: [PATCH 42/49] removed duplicate check --- rust/crd/src/security.rs | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index 9a11ca8a..3f13b03e 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -26,16 +26,16 @@ use stackable_operator::{ utils::COMMON_BASH_TRAP_FUNCTIONS, }; -use crate::{ - authentication::SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS, listener::node_address_cmd, - STACKABLE_KERBEROS_KRB5_PATH, STACKABLE_LISTENER_BOOTSTRAP_DIR, STACKABLE_LISTENER_BROKER_DIR, -}; use crate::{ authentication::{self, ResolvedAuthenticationClasses}, listener::{self, KafkaListenerConfig}, tls, KafkaCluster, LISTENER_BOOTSTRAP_VOLUME_NAME, SERVER_PROPERTIES_FILE, STACKABLE_CONFIG_DIR, }; +use crate::{ + listener::node_address_cmd, STACKABLE_KERBEROS_KRB5_PATH, STACKABLE_LISTENER_BOOTSTRAP_DIR, + STACKABLE_LISTENER_BROKER_DIR, +}; use crate::{KafkaRole, LISTENER_BROKER_VOLUME_NAME, STACKABLE_LOG_DIR}; #[derive(Snafu, Debug)] @@ -48,9 +48,6 @@ pub enum Error { source: stackable_operator::builder::pod::volume::SecretOperatorVolumeSourceBuilderError, }, - #[snafu(display("only one authentication class at a time is currently supported. Possible Authentication class providers are {SUPPORTED_AUTHENTICATION_CLASS_PROVIDERS:?}"))] - MultipleAuthenticationMethodsProvided, - #[snafu(display("failed to add needed volume"))] AddVolume { source: builder::pod::Error }, @@ -233,13 +230,11 @@ impl KafkaTlsSecurity { } pub fn validate_authentication_methods(&self) -> Result<(), Error> { - // Client TLS authentication and Kerberos authentication are mutually exclusive - if self.tls_client_authentication_class().is_some() && self.has_kerberos_enabled() { - return Err(Error::MultipleAuthenticationMethodsProvided); - } - - // When users enable Kerberos we require them to also enable TLS for maximum security and - // to limit the number of combinations we need to support. + // Client TLS authentication and Kerberos authentication are mutually + // exclusive, but this has already been checked when checking the + // authentication classes. When users enable Kerberos we require them + // to also enable TLS for a) maximum security and b) to limit the + // number of combinations we need to support. if self.has_kerberos_enabled() { ensure!(self.server_secret_class.is_some(), KerberosRequiresTlsSnafu); } From 344ddf7e7408a218db1b70eae9831bb73101b639 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Wed, 6 Nov 2024 07:36:53 +0100 Subject: [PATCH 43/49] add bootstrap configs for client_auth as well --- rust/crd/src/security.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index 3f13b03e..f3ed0b72 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -492,6 +492,11 @@ impl KafkaTlsSecurity { Self::CLIENT_SSL_TRUSTSTORE_TYPE.to_string(), "PKCS12".to_string(), ); + } + + if self.tls_client_authentication_class().is_some() + || self.tls_server_secret_class().is_some() + { // Bootstrap config.insert( Self::BOOTSTRAP_SSL_KEYSTORE_LOCATION.to_string(), From 109ad3c93ba931f516b4624c13e27f0f43aa4b62 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Thu, 7 Nov 2024 16:26:37 +0100 Subject: [PATCH 44/49] use correct port in discovery for kerberos. Removed bootstrap changes for non-kerberos. --- rust/crd/src/listener.rs | 102 ++----------------- rust/crd/src/security.rs | 9 +- rust/operator-binary/src/discovery.rs | 6 +- rust/operator-binary/src/kafka_controller.rs | 22 ++-- 4 files changed, 35 insertions(+), 104 deletions(-) diff --git a/rust/crd/src/listener.rs b/rust/crd/src/listener.rs index e70079f6..16fcd412 100644 --- a/rust/crd/src/listener.rs +++ b/rust/crd/src/listener.rs @@ -212,23 +212,7 @@ pub fn get_kafka_listener_config( } // BOOTSTRAP - if kafka_security.tls_client_authentication_class().is_some() { - listeners.push(KafkaListener { - name: KafkaListenerName::Bootstrap, - host: LISTENER_LOCAL_ADDRESS.to_string(), - port: kafka_security.bootstrap_port().to_string(), - }); - advertised_listeners.push(KafkaListener { - name: KafkaListenerName::Bootstrap, - host: node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), - port: node_port_cmd( - STACKABLE_LISTENER_BROKER_DIR, - kafka_security.client_port_name(), - ), - }); - listener_security_protocol_map - .insert(KafkaListenerName::Bootstrap, KafkaListenerProtocol::Ssl); - } else if kafka_security.has_kerberos_enabled() { + if kafka_security.has_kerberos_enabled() { listeners.push(KafkaListener { name: KafkaListenerName::Bootstrap, host: LISTENER_LOCAL_ADDRESS.to_string(), @@ -244,40 +228,6 @@ pub fn get_kafka_listener_config( }); listener_security_protocol_map .insert(KafkaListenerName::Bootstrap, KafkaListenerProtocol::SaslSsl); - } else if kafka_security.tls_server_secret_class().is_some() { - listeners.push(KafkaListener { - name: KafkaListenerName::Bootstrap, - host: LISTENER_LOCAL_ADDRESS.to_string(), - port: kafka_security.bootstrap_port().to_string(), - }); - advertised_listeners.push(KafkaListener { - name: KafkaListenerName::Bootstrap, - host: node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), - port: node_port_cmd( - STACKABLE_LISTENER_BROKER_DIR, - kafka_security.client_port_name(), - ), - }); - listener_security_protocol_map - .insert(KafkaListenerName::Bootstrap, KafkaListenerProtocol::Ssl); - } else { - listeners.push(KafkaListener { - name: KafkaListenerName::Bootstrap, - host: LISTENER_LOCAL_ADDRESS.to_string(), - port: KafkaTlsSecurity::BOOTSTRAP_PORT.to_string(), - }); - advertised_listeners.push(KafkaListener { - name: KafkaListenerName::Bootstrap, - host: node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), - port: node_port_cmd( - STACKABLE_LISTENER_BROKER_DIR, - kafka_security.client_port_name(), - ), - }); - listener_security_protocol_map.insert( - KafkaListenerName::Bootstrap, - KafkaListenerProtocol::Plaintext, - ); } Ok(KafkaListenerConfig { @@ -372,22 +322,20 @@ mod tests { assert_eq!( config.listeners(), format!( - "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{internal_host}:{bootstrap_port}", + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port}", name = KafkaListenerName::ClientAuth, host = LISTENER_LOCAL_ADDRESS, port = kafka_security.client_port(), internal_name = KafkaListenerName::Internal, internal_host = LISTENER_LOCAL_ADDRESS, internal_port = kafka_security.internal_port(), - bootstrap_name = KafkaListenerName::Bootstrap, - bootstrap_port = kafka_security.bootstrap_port(), ) ); assert_eq!( config.advertised_listeners(), format!( - "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{bootstrap_host}:{bootstrap_port}", + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port}", name = KafkaListenerName::ClientAuth, host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), port = node_port_cmd( @@ -397,25 +345,17 @@ mod tests { internal_name = KafkaListenerName::Internal, internal_host = pod_fqdn(&kafka, object_name, &cluster_info).unwrap(), internal_port = kafka_security.internal_port(), - bootstrap_name = KafkaListenerName::Bootstrap, - bootstrap_host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), - bootstrap_port = node_port_cmd( - STACKABLE_LISTENER_BROKER_DIR, - kafka_security.client_port_name() - ), ) ); assert_eq!( config.listener_security_protocol_map(), format!( - "{name}:{protocol},{internal_name}:{internal_protocol},{bootstrap_name}:{bootstrap_protocol}", + "{name}:{protocol},{internal_name}:{internal_protocol}", name = KafkaListenerName::ClientAuth, protocol = KafkaListenerProtocol::Ssl, internal_name = KafkaListenerName::Internal, internal_protocol = KafkaListenerProtocol::Ssl, - bootstrap_name = KafkaListenerName::Bootstrap, - bootstrap_protocol = KafkaListenerProtocol::Ssl ) ); @@ -430,23 +370,20 @@ mod tests { assert_eq!( config.listeners(), format!( - "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{bootstrap_host}:{bootstrap_port}", + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port}", name = KafkaListenerName::Client, host = LISTENER_LOCAL_ADDRESS, port = kafka_security.client_port(), internal_name = KafkaListenerName::Internal, internal_host = LISTENER_LOCAL_ADDRESS, internal_port = kafka_security.internal_port(), - bootstrap_name = KafkaListenerName::Bootstrap, - bootstrap_host = LISTENER_LOCAL_ADDRESS, - bootstrap_port = kafka_security.bootstrap_port(), ) ); assert_eq!( config.advertised_listeners(), format!( - "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{bootstrap_host}:{bootstrap_port}", + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port}", name = KafkaListenerName::Client, host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), port = node_port_cmd( @@ -456,25 +393,17 @@ mod tests { internal_name = KafkaListenerName::Internal, internal_host = pod_fqdn(&kafka, object_name, &cluster_info).unwrap(), internal_port = kafka_security.internal_port(), - bootstrap_name = KafkaListenerName::Bootstrap, - bootstrap_host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), - bootstrap_port = node_port_cmd( - STACKABLE_LISTENER_BROKER_DIR, - kafka_security.client_port_name() - ), ) ); assert_eq!( config.listener_security_protocol_map(), format!( - "{name}:{protocol},{internal_name}:{internal_protocol},{bootstrap_name}:{bootstrap_protocol}", + "{name}:{protocol},{internal_name}:{internal_protocol}", name = KafkaListenerName::Client, protocol = KafkaListenerProtocol::Ssl, internal_name = KafkaListenerName::Internal, internal_protocol = KafkaListenerProtocol::Ssl, - bootstrap_name = KafkaListenerName::Bootstrap, - bootstrap_protocol = KafkaListenerProtocol::Ssl ) ); @@ -490,23 +419,20 @@ mod tests { assert_eq!( config.listeners(), format!( - "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{bootstrap_host}:{bootstrap_port}", + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port}", name = KafkaListenerName::Client, host = LISTENER_LOCAL_ADDRESS, port = kafka_security.client_port(), internal_name = KafkaListenerName::Internal, internal_host = LISTENER_LOCAL_ADDRESS, internal_port = kafka_security.internal_port(), - bootstrap_name = KafkaListenerName::Bootstrap, - bootstrap_host = LISTENER_LOCAL_ADDRESS, - bootstrap_port = kafka_security.bootstrap_port(), ) ); assert_eq!( config.advertised_listeners(), format!( - "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{bootstrap_host}:{bootstrap_port}", + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port}", name = KafkaListenerName::Client, host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), port = node_port_cmd( @@ -516,25 +442,17 @@ mod tests { internal_name = KafkaListenerName::Internal, internal_host = pod_fqdn(&kafka, object_name, &cluster_info).unwrap(), internal_port = kafka_security.internal_port(), - bootstrap_name = KafkaListenerName::Bootstrap, - bootstrap_host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), - bootstrap_port = node_port_cmd( - STACKABLE_LISTENER_BROKER_DIR, - kafka_security.client_port_name() - ), ) ); assert_eq!( config.listener_security_protocol_map(), format!( - "{name}:{protocol},{internal_name}:{internal_protocol},{bootstrap_name}:{bootstrap_protocol}", + "{name}:{protocol},{internal_name}:{internal_protocol}", name = KafkaListenerName::Client, protocol = KafkaListenerProtocol::Plaintext, internal_name = KafkaListenerName::Internal, internal_protocol = KafkaListenerProtocol::Plaintext, - bootstrap_name = KafkaListenerName::Bootstrap, - bootstrap_protocol = KafkaListenerProtocol::Plaintext ) ); } diff --git a/rust/crd/src/security.rs b/rust/crd/src/security.rs index f3ed0b72..5b4fe9b5 100644 --- a/rust/crd/src/security.rs +++ b/rust/crd/src/security.rs @@ -73,9 +73,14 @@ impl KafkaTlsSecurity { pub const CLIENT_PORT: u16 = 9092; pub const SECURE_CLIENT_PORT_NAME: &'static str = "kafka-tls"; pub const SECURE_CLIENT_PORT: u16 = 9093; + // bootstrap: we will have a single named port with different values for + // secure (9095) and insecure (9094). The bootstrap listener is needed to + // be able to expose principals for both the broker and bootstrap in the + // JAAS configuration, so that clients can use both. pub const BOOTSTRAP_PORT_NAME: &'static str = "bootstrap"; pub const BOOTSTRAP_PORT: u16 = 9094; pub const SECURE_BOOTSTRAP_PORT: u16 = 9095; + // internal pub const INTERNAL_PORT: u16 = 19092; pub const SECURE_INTERNAL_PORT: u16 = 19093; // - TLS global @@ -494,9 +499,7 @@ impl KafkaTlsSecurity { ); } - if self.tls_client_authentication_class().is_some() - || self.tls_server_secret_class().is_some() - { + if self.has_kerberos_enabled() { // Bootstrap config.insert( Self::BOOTSTRAP_SSL_KEYSTORE_LOCATION.to_string(), diff --git a/rust/operator-binary/src/discovery.rs b/rust/operator-binary/src/discovery.rs index 218de7ba..70e6da0d 100644 --- a/rust/operator-binary/src/discovery.rs +++ b/rust/operator-binary/src/discovery.rs @@ -60,7 +60,11 @@ pub async fn build_discovery_configmaps( listeners: &[Listener], ) -> Result, Error> { let name = owner.name_unchecked(); - let port_name = kafka_security.client_port_name(); + let port_name = if kafka_security.has_kerberos_enabled() { + kafka_security.bootstrap_port_name() + } else { + kafka_security.client_port_name() + }; Ok(vec![ build_discovery_configmap( kafka, diff --git a/rust/operator-binary/src/kafka_controller.rs b/rust/operator-binary/src/kafka_controller.rs index cac25375..9d1a933f 100644 --- a/rust/operator-binary/src/kafka_controller.rs +++ b/rust/operator-binary/src/kafka_controller.rs @@ -1188,7 +1188,7 @@ pub fn error_policy( /// We only expose client HTTP / HTTPS and Metrics ports. fn listener_ports(kafka_security: &KafkaTlsSecurity) -> Vec { - vec![ + let mut ports = vec![ ListenerPort { name: METRICS_PORT_NAME.to_string(), port: METRICS_PORT.into(), @@ -1199,17 +1199,20 @@ fn listener_ports(kafka_security: &KafkaTlsSecurity) -> Vec { port: kafka_security.client_port().into(), protocol: Some("TCP".to_string()), }, - ListenerPort { + ]; + if kafka_security.has_kerberos_enabled() { + ports.push(ListenerPort { name: kafka_security.bootstrap_port_name().to_string(), port: kafka_security.bootstrap_port().into(), protocol: Some("TCP".to_string()), - }, - ] + }); + } + ports } /// We only expose client HTTP / HTTPS and Metrics ports. fn container_ports(kafka_security: &KafkaTlsSecurity) -> Vec { - vec![ + let mut ports = vec![ ContainerPort { name: Some(METRICS_PORT_NAME.to_string()), container_port: METRICS_PORT.into(), @@ -1222,11 +1225,14 @@ fn container_ports(kafka_security: &KafkaTlsSecurity) -> Vec { protocol: Some("TCP".to_string()), ..ContainerPort::default() }, - ContainerPort { + ]; + if kafka_security.has_kerberos_enabled() { + ports.push(ContainerPort { name: Some(kafka_security.bootstrap_port_name().to_string()), container_port: kafka_security.bootstrap_port().into(), protocol: Some("TCP".to_string()), ..ContainerPort::default() - }, - ] + }); + } + ports } From 6ced9a5cacdb84ede039144cb66719bfd7574f7a Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Thu, 7 Nov 2024 18:26:19 +0100 Subject: [PATCH 45/49] use discovery in kerberos test --- tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 index 499fb46a..7fa56095 100644 --- a/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 +++ b/tests/templates/kuttl/kerberos/30-access-kafka.txt.j2 @@ -18,6 +18,11 @@ spec: value: /stackable/kerberos/krb5.conf - name: KAFKA_OPTS value: -Djava.security.krb5.conf=/stackable/kerberos/krb5.conf + - name: KAFKA + valueFrom: + configMapKeyRef: + name: test-kafka + key: KAFKA volumeMounts: - name: script mountPath: /tmp/script @@ -76,10 +81,9 @@ data: export KCAT_CONFIG=/stackable/kcat.conf TOPIC=test-topic CONSUMER_GROUP=test-consumer-group - BROKER=test-kafka-broker-default-bootstrap.$NAMESPACE.svc.cluster.local:9095 echo -e -n "\ - metadata.broker.list=$BROKER\n\ + metadata.broker.list=$KAFKA\n\ auto.offset.reset=beginning\n\ security.protocol=SASL_SSL\n\ ssl.ca.location=/stackable/tls-ca-cert-mount/ca.crt\n\ From b6f3c6007f45a5e924e4beeda44f6d4df8b53da8 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Thu, 7 Nov 2024 19:29:08 +0100 Subject: [PATCH 46/49] added unit test for kerberos config --- rust/crd/src/listener.rs | 100 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 97 insertions(+), 3 deletions(-) diff --git a/rust/crd/src/listener.rs b/rust/crd/src/listener.rs index 16fcd412..acbf1216 100644 --- a/rust/crd/src/listener.rs +++ b/rust/crd/src/listener.rs @@ -267,8 +267,9 @@ mod tests { builder::meta::ObjectMetaBuilder, commons::{ authentication::{ - tls::AuthenticationProvider, AuthenticationClass, AuthenticationClassProvider, - AuthenticationClassSpec, + kerberos, + tls::{self}, + AuthenticationClass, AuthenticationClassProvider, AuthenticationClassSpec, }, networking::DomainName, }, @@ -307,7 +308,7 @@ mod tests { ResolvedAuthenticationClasses::new(vec![AuthenticationClass { metadata: ObjectMetaBuilder::new().name("auth-class").build(), spec: AuthenticationClassSpec { - provider: AuthenticationClassProvider::Tls(AuthenticationProvider { + provider: AuthenticationClassProvider::Tls(tls::AuthenticationProvider { client_cert_secret_class: Some("client-auth-secret-class".to_string()), }), }, @@ -456,4 +457,97 @@ mod tests { ) ); } + + #[test] + fn test_get_kafka_kerberos_listeners_config() { + let object_name = "simple-kafka-broker-default"; + let cluster_info = default_cluster_info(); + + let kafka_cluster = r#" + apiVersion: kafka.stackable.tech/v1alpha1 + kind: KafkaCluster + metadata: + name: simple-kafka + namespace: default + spec: + image: + productVersion: 3.7.1 + clusterConfig: + authentication: + - authenticationClass: kafka-kerberos + tls: + serverSecretClass: tls + zookeeperConfigMapName: xyz + "#; + let kafka: KafkaCluster = serde_yaml::from_str(kafka_cluster).expect("illegal test input"); + let kafka_security = KafkaTlsSecurity::new( + ResolvedAuthenticationClasses::new(vec![AuthenticationClass { + metadata: ObjectMetaBuilder::new().name("auth-class").build(), + spec: AuthenticationClassSpec { + provider: AuthenticationClassProvider::Kerberos( + kerberos::AuthenticationProvider { + kerberos_secret_class: "kerberos-secret-class".to_string(), + }, + ), + }, + }]), + "tls".to_string(), + Some("tls".to_string()), + ); + + let config = + get_kafka_listener_config(&kafka, &kafka_security, object_name, &cluster_info).unwrap(); + + assert_eq!( + config.listeners(), + format!( + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{bootstrap_host}:{bootstrap_port}", + name = KafkaListenerName::Client, + host = LISTENER_LOCAL_ADDRESS, + port = kafka_security.client_port(), + internal_name = KafkaListenerName::Internal, + internal_host = LISTENER_LOCAL_ADDRESS, + internal_port = kafka_security.internal_port(), + bootstrap_name = KafkaListenerName::Bootstrap, + bootstrap_host = LISTENER_LOCAL_ADDRESS, + bootstrap_port = kafka_security.bootstrap_port(), + ) + ); + + assert_eq!( + config.advertised_listeners(), + format!( + "{name}://{host}:{port},{internal_name}://{internal_host}:{internal_port},{bootstrap_name}://{bootstrap_host}:{bootstrap_port}", + name = KafkaListenerName::Client, + host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), + port = node_port_cmd( + STACKABLE_LISTENER_BROKER_DIR, + kafka_security.client_port_name() + ), + internal_name = KafkaListenerName::Internal, + internal_host = pod_fqdn(&kafka, object_name, &cluster_info).unwrap(), + internal_port = kafka_security.internal_port(), + bootstrap_name = KafkaListenerName::Bootstrap, + bootstrap_host = node_address_cmd(STACKABLE_LISTENER_BROKER_DIR), + bootstrap_port = node_port_cmd( + STACKABLE_LISTENER_BROKER_DIR, + kafka_security.client_port_name() + ), + ) + ); + + assert_eq!( + config.listener_security_protocol_map(), + format!( + "{name}:{protocol},{internal_name}:{internal_protocol},{bootstrap_name}:{bootstrap_protocol}", + name = KafkaListenerName::Client, + protocol = KafkaListenerProtocol::SaslSsl, + internal_name = KafkaListenerName::Internal, + internal_protocol = KafkaListenerProtocol::Ssl, + bootstrap_name = KafkaListenerName::Bootstrap, + bootstrap_protocol = KafkaListenerProtocol::SaslSsl, + + ) + ); + } } From c9336eda26b2235119694c9c33a210e79c164002 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Mon, 11 Nov 2024 15:23:46 +0100 Subject: [PATCH 47/49] added note about client connections and ports --- docs/modules/kafka/pages/usage-guide/security.adoc | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/modules/kafka/pages/usage-guide/security.adoc b/docs/modules/kafka/pages/usage-guide/security.adoc index e3931562..3dce6290 100644 --- a/docs/modules/kafka/pages/usage-guide/security.adoc +++ b/docs/modules/kafka/pages/usage-guide/security.adoc @@ -155,6 +155,14 @@ spec: NOTE: When Kerberos is enabled it is also required to enable TLS for maximum security. +==== Clients + +In order to keep client configuration as uncluttered as possible, each kerberized Kafka broker has two principals: one for the broker itself and one for the bootstrap service. +The client can connect to the bootstrap service, which returns the broker quorum for use in subsequent operations. +This is transparent as each connection dynamically uses the relevant principal (broker or bootstrap). +In order for this to work it is necessary for kerberized clusters to define an extra kafka listener for the bootstrap, with a corresponding service (and port). +The bootstrap address is written to the discovery ConfigMap, using the Stackable bootstrap listener with the port being either 9094 (non-secure) or 9095 (secure) for kerberized clusters, and 9092 (non-secure) or 9093 (secure) for non-kerberized ones. + == [[authorization]]Authorization If you wish to include integration with xref:opa:index.adoc[Open Policy Agent] and already have an OPA cluster, then you can include an `opa` field pointing to the OPA cluster discovery `ConfigMap` and the required package. From 20fccb915b182f2439921f5eab5aa7f93026a4bb Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy <1712947+adwk67@users.noreply.github.com> Date: Tue, 12 Nov 2024 08:54:09 +0100 Subject: [PATCH 48/49] Update docs/modules/kafka/pages/usage-guide/security.adoc Co-authored-by: Siegfried Weber --- docs/modules/kafka/pages/usage-guide/security.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/modules/kafka/pages/usage-guide/security.adoc b/docs/modules/kafka/pages/usage-guide/security.adoc index 3dce6290..50b7a92b 100644 --- a/docs/modules/kafka/pages/usage-guide/security.adoc +++ b/docs/modules/kafka/pages/usage-guide/security.adoc @@ -160,7 +160,7 @@ NOTE: When Kerberos is enabled it is also required to enable TLS for maximum sec In order to keep client configuration as uncluttered as possible, each kerberized Kafka broker has two principals: one for the broker itself and one for the bootstrap service. The client can connect to the bootstrap service, which returns the broker quorum for use in subsequent operations. This is transparent as each connection dynamically uses the relevant principal (broker or bootstrap). -In order for this to work it is necessary for kerberized clusters to define an extra kafka listener for the bootstrap, with a corresponding service (and port). +In order for this to work, it is necessary for kerberized clusters to define an extra Kafka listener for the bootstrap with a corresponding service (and port). The bootstrap address is written to the discovery ConfigMap, using the Stackable bootstrap listener with the port being either 9094 (non-secure) or 9095 (secure) for kerberized clusters, and 9092 (non-secure) or 9093 (secure) for non-kerberized ones. == [[authorization]]Authorization From 74f2cb8c42dd8fb94ec19c34aab8e1fcd3e60523 Mon Sep 17 00:00:00 2001 From: Andrew Kenworthy Date: Tue, 12 Nov 2024 09:18:10 +0100 Subject: [PATCH 49/49] clarified comment --- docs/modules/kafka/pages/usage-guide/security.adoc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/modules/kafka/pages/usage-guide/security.adoc b/docs/modules/kafka/pages/usage-guide/security.adoc index 50b7a92b..e829a91b 100644 --- a/docs/modules/kafka/pages/usage-guide/security.adoc +++ b/docs/modules/kafka/pages/usage-guide/security.adoc @@ -161,7 +161,9 @@ In order to keep client configuration as uncluttered as possible, each kerberize The client can connect to the bootstrap service, which returns the broker quorum for use in subsequent operations. This is transparent as each connection dynamically uses the relevant principal (broker or bootstrap). In order for this to work, it is necessary for kerberized clusters to define an extra Kafka listener for the bootstrap with a corresponding service (and port). -The bootstrap address is written to the discovery ConfigMap, using the Stackable bootstrap listener with the port being either 9094 (non-secure) or 9095 (secure) for kerberized clusters, and 9092 (non-secure) or 9093 (secure) for non-kerberized ones. +The bootstrap address is written to the discovery ConfigMap, using the Stackable bootstrap listener with the port being 9095 (secure) for kerberized clusters, and 9092 (non-secure) or 9093 (secure) for non-kerberized ones. + +NOTE: Port 9094 is reserved for non-secure kerberized connections which is not currently implemented. == [[authorization]]Authorization