Skip to content

Commit 19d569d

Browse files
committed
fix: rustfmt lint
1 parent e361ef2 commit 19d569d

File tree

10 files changed

+121
-105
lines changed

10 files changed

+121
-105
lines changed

rust/operator-binary/src/crd/affinity.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
use stackable_operator::{
2-
commons::affinity::{affinity_between_role_pods, StackableAffinityFragment},
2+
commons::affinity::{StackableAffinityFragment, affinity_between_role_pods},
33
k8s_openapi::api::core::v1::PodAntiAffinity,
44
};
55

rust/operator-binary/src/crd/history.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use std::collections::{BTreeMap, HashMap};
22

3-
use product_config::{types::PropertyNameKind, ProductConfigManager};
3+
use product_config::{ProductConfigManager, types::PropertyNameKind};
44
use serde::{Deserialize, Serialize};
55
use snafu::{OptionExt, ResultExt, Snafu};
66
use stackable_operator::{
@@ -18,10 +18,10 @@ use stackable_operator::{
1818
merge::Merge,
1919
},
2020
k8s_openapi::{api::core::v1::EnvVar, apimachinery::pkg::api::resource::Quantity},
21-
kube::{runtime::reflector::ObjectRef, CustomResource, ResourceExt},
21+
kube::{CustomResource, ResourceExt, runtime::reflector::ObjectRef},
2222
product_config_utils::{
23-
transform_all_roles_to_config, validate_all_roles_and_groups_config, Configuration,
24-
ValidatedRoleConfigByPropertyKind,
23+
Configuration, ValidatedRoleConfigByPropertyKind, transform_all_roles_to_config,
24+
validate_all_roles_and_groups_config,
2525
},
2626
product_logging::{self, spec::Logging},
2727
role_utils::{GenericRoleConfig, JavaCommonConfig, Role, RoleGroup, RoleGroupRef},

rust/operator-binary/src/crd/mod.rs

Lines changed: 64 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ use std::{
88
use constants::*;
99
use history::LogFileDirectorySpec;
1010
use logdir::ResolvedLogDir;
11-
use product_config::{types::PropertyNameKind, ProductConfigManager};
11+
use product_config::{ProductConfigManager, types::PropertyNameKind};
1212
use serde::{Deserialize, Serialize};
1313
use snafu::{OptionExt, ResultExt, Snafu};
1414
use stackable_operator::{
@@ -33,8 +33,8 @@ use stackable_operator::{
3333
kvp::ObjectLabels,
3434
memory::{BinaryMultiple, MemoryQuantity},
3535
product_config_utils::{
36-
transform_all_roles_to_config, validate_all_roles_and_groups_config,
37-
ValidatedRoleConfigByPropertyKind,
36+
ValidatedRoleConfigByPropertyKind, transform_all_roles_to_config,
37+
validate_all_roles_and_groups_config,
3838
},
3939
product_logging,
4040
role_utils::{CommonConfiguration, GenericRoleConfig, JavaCommonConfig, Role, RoleGroup},
@@ -544,20 +544,47 @@ impl v1alpha1::SparkApplication {
544544
let mut submit_cmd = vec![
545545
"/stackable/spark/bin/spark-submit".to_string(),
546546
"--verbose".to_string(),
547-
"--master k8s://https://${KUBERNETES_SERVICE_HOST}:${KUBERNETES_SERVICE_PORT_HTTPS}".to_string(),
547+
"--master k8s://https://${KUBERNETES_SERVICE_HOST}:${KUBERNETES_SERVICE_PORT_HTTPS}"
548+
.to_string(),
548549
format!("--deploy-mode {mode}"),
549550
format!("--name {name}"),
550-
format!("--conf spark.kubernetes.driver.podTemplateFile={VOLUME_MOUNT_PATH_DRIVER_POD_TEMPLATES}/{POD_TEMPLATE_FILE}"),
551-
format!("--conf spark.kubernetes.executor.podTemplateFile={VOLUME_MOUNT_PATH_EXECUTOR_POD_TEMPLATES}/{POD_TEMPLATE_FILE}"),
552-
format!("--conf spark.kubernetes.driver.podTemplateContainerName={container_name}", container_name = SparkContainer::Spark),
553-
format!("--conf spark.kubernetes.executor.podTemplateContainerName={container_name}", container_name = SparkContainer::Spark),
554-
format!("--conf spark.kubernetes.namespace={}", self.metadata.namespace.as_ref().context(NoNamespaceSnafu)?),
555-
format!("--conf spark.kubernetes.driver.container.image={}", spark_image.to_string()),
556-
format!("--conf spark.kubernetes.executor.container.image={}", spark_image.to_string()),
557-
format!("--conf spark.kubernetes.authenticate.driver.serviceAccountName={}", serviceaccount_name),
558-
format!("--conf spark.driver.defaultJavaOptions=-Dlog4j.configurationFile={VOLUME_MOUNT_PATH_LOG_CONFIG}/{LOG4J2_CONFIG_FILE}"),
551+
format!(
552+
"--conf spark.kubernetes.driver.podTemplateFile={VOLUME_MOUNT_PATH_DRIVER_POD_TEMPLATES}/{POD_TEMPLATE_FILE}"
553+
),
554+
format!(
555+
"--conf spark.kubernetes.executor.podTemplateFile={VOLUME_MOUNT_PATH_EXECUTOR_POD_TEMPLATES}/{POD_TEMPLATE_FILE}"
556+
),
557+
format!(
558+
"--conf spark.kubernetes.driver.podTemplateContainerName={container_name}",
559+
container_name = SparkContainer::Spark
560+
),
561+
format!(
562+
"--conf spark.kubernetes.executor.podTemplateContainerName={container_name}",
563+
container_name = SparkContainer::Spark
564+
),
565+
format!(
566+
"--conf spark.kubernetes.namespace={}",
567+
self.metadata.namespace.as_ref().context(NoNamespaceSnafu)?
568+
),
569+
format!(
570+
"--conf spark.kubernetes.driver.container.image={}",
571+
spark_image.to_string()
572+
),
573+
format!(
574+
"--conf spark.kubernetes.executor.container.image={}",
575+
spark_image.to_string()
576+
),
577+
format!(
578+
"--conf spark.kubernetes.authenticate.driver.serviceAccountName={}",
579+
serviceaccount_name
580+
),
581+
format!(
582+
"--conf spark.driver.defaultJavaOptions=-Dlog4j.configurationFile={VOLUME_MOUNT_PATH_LOG_CONFIG}/{LOG4J2_CONFIG_FILE}"
583+
),
559584
format!("--conf spark.driver.extraClassPath=/stackable/spark/extra-jars/*"),
560-
format!("--conf spark.executor.defaultJavaOptions=-Dlog4j.configurationFile={VOLUME_MOUNT_PATH_LOG_CONFIG}/{LOG4J2_CONFIG_FILE}"),
585+
format!(
586+
"--conf spark.executor.defaultJavaOptions=-Dlog4j.configurationFile={VOLUME_MOUNT_PATH_LOG_CONFIG}/{LOG4J2_CONFIG_FILE}"
587+
),
561588
format!("--conf spark.executor.extraClassPath=/stackable/spark/extra-jars/*"),
562589
];
563590

@@ -682,7 +709,9 @@ impl v1alpha1::SparkApplication {
682709
submit_cmd.extend(self.spec.args.clone());
683710

684711
Ok(vec![
685-
format!("containerdebug --output={VOLUME_MOUNT_PATH_LOG}/containerdebug-state.json --loop &"),
712+
format!(
713+
"containerdebug --output={VOLUME_MOUNT_PATH_LOG}/containerdebug-state.json --loop &"
714+
),
686715
submit_cmd.join(" "),
687716
])
688717
}
@@ -792,14 +821,11 @@ impl v1alpha1::SparkApplication {
792821
};
793822
if let Some(role_envs) = role_envs {
794823
env.extend(role_envs.iter().map(|(k, v)| {
795-
(
796-
k,
797-
EnvVar {
798-
name: k.clone(),
799-
value: Some(v.clone()),
800-
..Default::default()
801-
},
802-
)
824+
(k, EnvVar {
825+
name: k.clone(),
826+
value: Some(v.clone()),
827+
..Default::default()
828+
})
803829
}))
804830
}
805831

@@ -854,13 +880,10 @@ impl v1alpha1::SparkApplication {
854880
Role {
855881
config: submit_conf.clone(),
856882
role_config: GenericRoleConfig::default(),
857-
role_groups: [(
858-
"default".to_string(),
859-
RoleGroup {
860-
config: submit_conf,
861-
replicas: Some(1),
862-
},
863-
)]
883+
role_groups: [("default".to_string(), RoleGroup {
884+
config: submit_conf,
885+
replicas: Some(1),
886+
})]
864887
.into(),
865888
}
866889
.erase(),
@@ -877,13 +900,10 @@ impl v1alpha1::SparkApplication {
877900
Role {
878901
config: driver_conf.clone(),
879902
role_config: GenericRoleConfig::default(),
880-
role_groups: [(
881-
"default".to_string(),
882-
RoleGroup {
883-
config: driver_conf,
884-
replicas: Some(1),
885-
},
886-
)]
903+
role_groups: [("default".to_string(), RoleGroup {
904+
config: driver_conf,
905+
replicas: Some(1),
906+
})]
887907
.into(),
888908
}
889909
.erase(),
@@ -967,7 +987,9 @@ fn subtract_spark_memory_overhead(for_java: bool, limit: &Quantity) -> Result<St
967987
.value as u32;
968988

969989
if MIN_MEMORY_OVERHEAD > original_memory {
970-
tracing::warn!("Skip memory overhead since not enough memory ({original_memory}m). At least {MIN_MEMORY_OVERHEAD}m required");
990+
tracing::warn!(
991+
"Skip memory overhead since not enough memory ({original_memory}m). At least {MIN_MEMORY_OVERHEAD}m required"
992+
);
971993
return Ok(format!("{original_memory}m"));
972994
}
973995

@@ -981,7 +1003,9 @@ fn subtract_spark_memory_overhead(for_java: bool, limit: &Quantity) -> Result<St
9811003

9821004
let deduction = max(MIN_MEMORY_OVERHEAD, original_memory - reduced_memory);
9831005

984-
tracing::debug!("subtract_spark_memory_overhead: original_memory ({original_memory}) - deduction ({deduction})");
1006+
tracing::debug!(
1007+
"subtract_spark_memory_overhead: original_memory ({original_memory}) - deduction ({deduction})"
1008+
);
9851009
Ok(format!("{}m", original_memory - deduction))
9861010
}
9871011

@@ -1089,7 +1113,7 @@ mod tests {
10891113
use std::collections::{BTreeMap, HashMap};
10901114

10911115
use indoc::indoc;
1092-
use product_config::{types::PropertyNameKind, ProductConfigManager};
1116+
use product_config::{ProductConfigManager, types::PropertyNameKind};
10931117
use rstest::rstest;
10941118
use stackable_operator::{
10951119
commons::{

rust/operator-binary/src/crd/roles.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ use stackable_operator::{
3838
};
3939
use strum::{Display, EnumIter};
4040

41-
use crate::crd::{v1alpha1, ResolvedLogDir};
41+
use crate::crd::{ResolvedLogDir, v1alpha1};
4242

4343
#[derive(Clone, Debug, Deserialize, Display, Eq, PartialEq, Serialize, JsonSchema)]
4444
#[strum(serialize_all = "kebab-case")]
@@ -153,6 +153,7 @@ impl RoleConfig {
153153
requested_secret_lifetime: Some(Self::DEFAULT_SECRET_LIFETIME),
154154
}
155155
}
156+
156157
pub fn volume_mounts(
157158
&self,
158159
spark_application: &v1alpha1::SparkApplication,
@@ -289,8 +290,8 @@ pub struct VolumeMounts {
289290
impl Atomic for VolumeMounts {}
290291

291292
impl<'a> IntoIterator for &'a VolumeMounts {
292-
type Item = &'a VolumeMount;
293293
type IntoIter = slice::Iter<'a, VolumeMount>;
294+
type Item = &'a VolumeMount;
294295

295296
fn into_iter(self) -> Self::IntoIter {
296297
self.volume_mounts.iter()

rust/operator-binary/src/crd/tlscerts.rs

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -47,17 +47,13 @@ pub fn tls_secret_names<'a>(
4747
names.push(secret_name);
4848
}
4949
}
50-
if names.is_empty() {
51-
None
52-
} else {
53-
Some(names)
54-
}
50+
if names.is_empty() { None } else { Some(names) }
5551
}
5652

5753
pub fn convert_system_trust_store_to_pkcs12() -> Vec<String> {
58-
vec![
59-
format!("keytool -importkeystore -srckeystore {SYSTEM_TRUST_STORE} -srcstoretype jks -srcstorepass {SYSTEM_TRUST_STORE_PASSWORD} -destkeystore {STACKABLE_TRUST_STORE}/truststore.p12 -deststoretype pkcs12 -deststorepass {STACKABLE_TLS_STORE_PASSWORD} -noprompt"),
60-
]
54+
vec![format!(
55+
"keytool -importkeystore -srckeystore {SYSTEM_TRUST_STORE} -srcstoretype jks -srcstorepass {SYSTEM_TRUST_STORE_PASSWORD} -destkeystore {STACKABLE_TRUST_STORE}/truststore.p12 -deststoretype pkcs12 -deststorepass {STACKABLE_TLS_STORE_PASSWORD} -noprompt"
56+
)]
6157
}
6258

6359
pub fn import_truststore(secret_name: &str) -> Vec<String> {
@@ -66,6 +62,8 @@ pub fn import_truststore(secret_name: &str) -> Vec<String> {
6662

6763
vec![
6864
format!("echo Importing [{mount_trust_store_path}] to [{trust_store_path}] ..."),
69-
format!("keytool -importkeystore -srckeystore {mount_trust_store_path} -srcalias 1 -srcstorepass \"\" -destkeystore {trust_store_path} -destalias stackable-{secret_name} -storepass {STACKABLE_TLS_STORE_PASSWORD} -noprompt"),
65+
format!(
66+
"keytool -importkeystore -srckeystore {mount_trust_store_path} -srcalias 1 -srcstorepass \"\" -destkeystore {trust_store_path} -destalias stackable-{secret_name} -storepass {STACKABLE_TLS_STORE_PASSWORD} -noprompt"
67+
),
7068
]
7169
}

rust/operator-binary/src/history/config/jvm.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,9 @@ pub fn construct_history_jvm_args(
3333
format!(
3434
"-Djava.security.properties={VOLUME_MOUNT_PATH_CONFIG}/{JVM_SECURITY_PROPERTIES_FILE}"
3535
),
36-
format!("-javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar={METRICS_PORT}:/stackable/jmx/config.yaml")
36+
format!(
37+
"-javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar={METRICS_PORT}:/stackable/jmx/config.yaml"
38+
),
3739
];
3840

3941
if logdir.tls_enabled() {

rust/operator-binary/src/history/history_controller.rs

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,14 @@ use stackable_operator::{
1111
configmap::ConfigMapBuilder,
1212
meta::ObjectMetaBuilder,
1313
pod::{
14-
container::ContainerBuilder, resources::ResourceRequirementsBuilder,
15-
volume::VolumeBuilder, PodBuilder,
14+
PodBuilder, container::ContainerBuilder, resources::ResourceRequirementsBuilder,
15+
volume::VolumeBuilder,
1616
},
1717
},
1818
cluster_resources::{ClusterResourceApplyStrategy, ClusterResources},
1919
commons::product_image_selection::ResolvedProductImage,
2020
k8s_openapi::{
21+
DeepMerge,
2122
api::{
2223
apps::v1::{StatefulSet, StatefulSetSpec},
2324
core::v1::{
@@ -26,17 +27,16 @@ use stackable_operator::{
2627
rbac::v1::{ClusterRole, RoleBinding, RoleRef, Subject},
2728
},
2829
apimachinery::pkg::apis::meta::v1::LabelSelector,
29-
DeepMerge,
3030
},
3131
kube::{
32-
core::{error_boundary, DeserializeGuard},
33-
runtime::{controller::Action, reflector::ObjectRef},
3432
Resource, ResourceExt,
33+
core::{DeserializeGuard, error_boundary},
34+
runtime::{controller::Action, reflector::ObjectRef},
3535
},
3636
kvp::{Label, Labels, ObjectLabels},
3737
logging::controller::ReconcilerError,
3838
product_logging::{
39-
framework::{calculate_log_volume_size_limit, vector_container, LoggingError},
39+
framework::{LoggingError, calculate_log_volume_size_limit, vector_container},
4040
spec::{
4141
ConfigMapLogConfig, ContainerLogConfig, ContainerLogConfigChoice,
4242
CustomContainerLogConfig,
@@ -48,6 +48,7 @@ use stackable_operator::{
4848
use strum::{EnumDiscriminants, IntoStaticStr};
4949

5050
use crate::{
51+
Ctx,
5152
crd::{
5253
constants::{
5354
ACCESS_KEY_ID, APP_NAME, HISTORY_CONTROLLER_NAME, HISTORY_ROLE_NAME,
@@ -57,13 +58,12 @@ use crate::{
5758
VOLUME_MOUNT_NAME_CONFIG, VOLUME_MOUNT_NAME_LOG, VOLUME_MOUNT_NAME_LOG_CONFIG,
5859
VOLUME_MOUNT_PATH_CONFIG, VOLUME_MOUNT_PATH_LOG, VOLUME_MOUNT_PATH_LOG_CONFIG,
5960
},
60-
history::{self, v1alpha1, HistoryConfig, SparkHistoryServerContainer},
61+
history::{self, HistoryConfig, SparkHistoryServerContainer, v1alpha1},
6162
logdir::ResolvedLogDir,
6263
tlscerts, to_spark_env_sh_string,
6364
},
6465
history::operations::pdb::add_pdbs,
6566
product_logging::{self, resolve_vector_aggregator_address},
66-
Ctx,
6767
};
6868

6969
#[derive(Snafu, Debug, EnumDiscriminants)]

0 commit comments

Comments
 (0)