Environment

Runtime Information

NameValue
Java Home/usr/lib/jvm/sapmachine-jre
Java Version17.0.16 (SAP SE)
Scala Versionversion 2.12.18

Spark Properties

NameValue
spark.app.idspark-fb863673200b4a4995c57b97e0b5a658
spark.app.initial.jar.urls*********(redacted)
spark.app.namecrp-order-quantity-kpi-service
spark.app.startTime1761120816743
spark.app.submitTime1761120816602
spark.authenticatetrue
spark.blockManager.port7079
spark.default.parallelism37
spark.delta.logStore.hdlfs.implcom.sap.hana.datalake.files.HdlfsLogStore
spark.driver.extraJavaOptions-Djava.net.preferIPv6Addresses=false -XX:+IgnoreUnrecognizedVMOptions --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.lang.invoke=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/sun.nio.cs=ALL-UNNAMED --add-opens=java.base/sun.security.action=ALL-UNNAMED --add-opens=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED -Djdk.reflect.useDirectMethodHandle=false -Duser.timezone=UTC -XX:MaxDirectMemorySize=1G
spark.driver.extraLibraryPath/app/native-libs
spark.driver.host10.96.10.30
spark.driver.memory10g
spark.driver.memoryOverhead2g
spark.driver.port7078
spark.driver.userClassPathFirsttrue
spark.driverEnv.LD_LIBRARY_PATH/app/native-libs
spark.driverEnv.MALLOC_ARENA_MAX2
spark.dynamicAllocation.cachedExecutorIdleTimeout900s
spark.dynamicAllocation.enabledtrue
spark.dynamicAllocation.executorAllocationRatio0.25
spark.dynamicAllocation.executorIdleTimeout300s
spark.dynamicAllocation.maxExecutors24
spark.dynamicAllocation.minExecutors3
spark.dynamicAllocation.schedulerBacklogTimeout150s
spark.dynamicAllocation.shuffleTracking.enabledtrue
spark.dynamicAllocation.shuffleTracking.timeout900s
spark.eventLog.logStageExecutorMetricstrue
spark.executor.cores5
spark.executor.extraJavaOptions-Djava.net.preferIPv6Addresses=false -XX:+IgnoreUnrecognizedVMOptions --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.lang.invoke=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/sun.nio.cs=ALL-UNNAMED --add-opens=java.base/sun.security.action=ALL-UNNAMED --add-opens=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED -Djdk.reflect.useDirectMethodHandle=false -Duser.timezone=UTC -XX:MaxDirectMemorySize=1G
spark.executor.extraLibraryPath/app/native-libs
spark.executor.iddriver
spark.executor.instances3
spark.executor.memory10g
spark.executor.memoryOverhead4g
spark.executor.userClassPathFirsttrue
spark.executorEnv.LD_LIBRARY_PATH/app/native-libs
spark.executorEnv.MALLOC_ARENA_MAX2
spark.executorEnv.QUEUE_NAME_FOR_DEMAND_WORKFLOWSQUEUE_NAME_FOR_DEMAND_WORKFLOWS
spark.executorEnv.QUEUE_NAME_FOR_KPI_WORKFLOWSCRP_ORDER_QUANTITY_KPI_SERVICE_PROD
spark.executorEnv.RUNTIME_NAMESPACEprod
spark.executorEnv.grafana_domaingrafana.prod.predictivereplenishment.cloud.sap
spark.hadoop.fs.AbstractFileSystem.hdlfs.implcom.sap.hana.datalake.files.Hdlfs
spark.hadoop.fs.hdlfs.direct-access.modeEXPECT
spark.hadoop.fs.hdlfs.implcom.sap.hana.datalake.files.HdlfsFileSystem
spark.hadoop.fs.hdlfs.impl.disable.cachefalse
spark.hadoop.fs.hdlfs.ssl.keystores.factory.classcom.sap.s4hana.eureka.business.hdlfs.keystore.impl.HdlfsTenantKeyStoresFactory
spark.io.encryption.enabledtrue
spark.jars*********(redacted)
spark.kubernetes.authenticate.driver.serviceAccountNamecrp-order-quantity-kpi-service
spark.kubernetes.container.imagedeploy-releases-hyperspace-docker.common.repositories.cloud.sap/crp-order-quantity-kpi-service:LATEST-20250916034951-6eb41a03a395cd45c63abaacbfa42a8e40292e07
spark.kubernetes.container.image.pullSecrets*********(redacted)
spark.kubernetes.driver.annotation.prometheus.io/path=/metrics/executors/prometheus
spark.kubernetes.driver.annotation.prometheus.io/port4040
spark.kubernetes.driver.annotation.prometheus.io/scrapetrue
spark.kubernetes.driver.container.imagedeploy-releases-hyperspace-docker.common.repositories.cloud.sap/crp-order-quantity-kpi-service:LATEST-20250916034951-6eb41a03a395cd45c63abaacbfa42a8e40292e07
spark.kubernetes.driver.label.appcrp-order-quantity-kpi-service-stream
spark.kubernetes.driver.label.spark_appcrp-order-quantity-kpi-service-stream
spark.kubernetes.driver.limit.cores2
spark.kubernetes.driver.limit.memory10g
spark.kubernetes.driver.ownPersistentVolumeClaimfalse
spark.kubernetes.driver.pod.namecrp-order-quantity-kpi-service-6997fcd99-6l2gl
spark.kubernetes.driver.request.cores2
spark.kubernetes.driver.requests.memory10g
spark.kubernetes.driver.service.annotation.prometheus.io/path/metrics/prometheus/
spark.kubernetes.driver.service.annotation.prometheus.io/port4040
spark.kubernetes.driver.service.annotation.prometheus.io/scrapetrue
spark.kubernetes.driverEnv.QUEUE_NAME_FOR_DEMAND_WORKFLOWSQUEUE_NAME_FOR_DEMAND_WORKFLOWS
spark.kubernetes.driverEnv.QUEUE_NAME_FOR_KPI_WORKFLOWSCRP_ORDER_QUANTITY_KPI_SERVICE_PROD
spark.kubernetes.driverEnv.RUNTIME_NAMESPACEprod
spark.kubernetes.driverEnv.grafana_domaingrafana.prod.predictivereplenishment.cloud.sap
spark.kubernetes.executor.container.imagedeploy-releases-hyperspace-docker.common.repositories.cloud.sap/crp-order-quantity-kpi-service:LATEST-20250916034951-6eb41a03a395cd45c63abaacbfa42a8e40292e07
spark.kubernetes.executor.deleteOnTerminationtrue
spark.kubernetes.executor.label.appcrp-order-quantity-kpi-service-stream
spark.kubernetes.executor.label.spark_appcrp-order-quantity-kpi-service-stream
spark.kubernetes.executor.label.spark_driver_podcrp-order-quantity-kpi-service-6997fcd99-6l2gl
spark.kubernetes.executor.limit.cores4500m
spark.kubernetes.executor.limit.memory10g
spark.kubernetes.executor.podNamePrefixcrp-order-quantity-kpi-service-6997fcd99-6l2gl
spark.kubernetes.executor.podTemplateFile/app/sparkExecutorTemplate.yaml
spark.kubernetes.executor.request.cores4500m
spark.kubernetes.executor.requests.memory10g
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path/cache
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnlyfalse
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimNameOnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit64Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClassdefault
spark.kubernetes.file.upload.path/tmp
spark.kubernetes.namespaceprod
spark.kubernetes.report.interval30000
spark.kubernetes.submission.waitAppCompletiontrue
spark.local.dir/cache
spark.masterk8s://https://api.c-5ef592f.kyma.internal.live.k8s.ondemand.com:443
spark.metrics.appStatusSource.enabledtrue
spark.metrics.conf/etc/configuration/metrics.properties
spark.metrics.namespacecrp-order-quantity-kpi-service
spark.network.crypto.enabledtrue
spark.rdd.compresstrue
spark.repl.local.jars*********(redacted)
spark.scheduler.allocation.file/etc/configuration/fairscheduler.xml
spark.scheduler.modeFAIR
spark.serializerorg.apache.spark.serializer.KryoSerializer
spark.sql.adaptive.coalescePartitions.minPartitionSize50k
spark.sql.autoBroadcastJoinThreshold524288
spark.sql.catalog.spark_catalogorg.apache.spark.sql.delta.catalog.DeltaCatalog
spark.sql.extensionsio.delta.sql.DeltaSparkSessionExtension
spark.sql.redaction.options.regex(fs\.hdlfs\.ssl\.(key|cert)file)
spark.sql.redaction.regex(fs\.hdlfs\.ssl\.(key|cert)file)
spark.sql.shuffle.partitions37
spark.sql.sources.partitionOverwriteModeDYNAMIC
spark.sql.streaming.stopTimeout5000
spark.submit.deployModeclient
spark.submit.pyFiles
spark.task.maxFailures8
spark.ui.prometheus.enabledtrue
spark.ui.retainedJobs5000
spark.ui.retainedStages5000
spark.ui.strictTransportSecuritymax-age=31536000

Resource Profiles

Resource Profile IdResource Profile Contents
0
Executor Reqs:
	memoryOverhead: [amount: 4096]
	cores: [amount: 5]
	memory: [amount: 10240]
	offHeap: [amount: 0]
Task Reqs:
	cpus: [amount: 1.0]

Hadoop Properties

System Properties

Metrics Properties

Classpath Entries