Databricks Sparkconfig 1669383836
Databricks Sparkconfig 1669383836
Databricks Sparkconfig 1669383836
config_params (Python)
Overview
This notebooks contains complete Databricks Spark Config Parameters
###Details
History
('spark.databricks.preemption.enabled', 'true'),
('spark.sql.hive.metastore.jars', '/databricks/databricks-hive/*'),
('spark.driver.tempDirectory', '/local_disk0/tmp'),
('spark.sql.warehouse.dir', 'dbfs:/user/hive/warehouse'),
('spark.databricks.managedCatalog.clientClassName','com.databricks.managedcatalog.ManagedCatalogClientImpl'),
('spark.app.id', 'local-1669374668342'),
('spark.hadoop.fs.gs.impl','shaded.databricks.com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem'),
('spark.executor.extraJavaOptions','-XX:+IgnoreUnrecognizedVMOptions --add-opens=java.base/java.lang=ALL-UNNAMED '),
('spark.hadoop.fs.fcfs-s3.impl.disable.cache', 'true'),
('spark.hadoop.fs.s3a.retry.limit', '20'),
('spark.sql.streaming.checkpointFileManagerClass','com.databricks.spark.sql.streaming.DatabricksCheckpointFileManager'),
('spark.databricks.service.dbutils.repl.backend','com.databricks.dbconnect.ReplDBUtils'),
('spark.streaming.driver.writeAheadLog.allowBatching', 'true'),
('spark.databricks.clusterSource', 'UI'),
('spark.databricks.clusterUsageTags.sparkVersion', '10.4.x-scala2.12'),
('spark.hadoop.hive.server2.transport.mode', 'http'),
('spark.executor.memory', '8278m'),
('spark.databricks.clusterUsageTags.effectiveSparkVersion','10.4.x-scala2.12'),
('spark.hadoop.fs.cpfs-adl.impl.disable.cache', 'true'),
('spark.databricks.clusterUsageTags.driverInstanceId', 'i-066fce11db7f7ff12'),
('spark.databricks.clusterUsageTags.hailEnabled', 'false'),
('spark.hadoop.fs.mcfs-s3.impl','com.databricks.sql.acl.fs.ManagedCatalogFileSystem'),
('spark.databricks.clusterUsageTags.clusterLogDeliveryEnabled', 'false'),
('spark.databricks.clusterUsageTags.containerType', 'LXC'),
('spark.eventLog.enabled', 'false'),
('spark.driver.extraJavaOptions','-XX:+IgnoreUnrecognizedVMOptions'),
('spark.databricks.clusterUsageTags.isIMv2Enabled', 'false'),
('spark.hadoop.hive.hmshandler.retry.interval', '2000'),
('spark.executor.tempDirectory', '/local_disk0/tmp'),
('spark.hadoop.fs.azure.authorization.caching.enable', 'false'),
('spark.hadoop.fs.fcfs-abfss.impl','com.databricks.sql.acl.fs.FixedCredentialsFileSystem'),
('spark.hadoop.mapred.output.committer.class','com.databricks.backend.daemon.data.client.DirectOutputCommitter'),
('spark.hadoop.hive.server2.thrift.http.port', '10000'),
('spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version', '2'),
('spark.sql.allowMultipleContexts', 'false'),
('spark.databricks.eventLog.enabled', 'true'),
('spark.home', '/databricks/spark'),
('spark.databricks.clusterUsageTags.clusterTargetWorkers', '0'),
('spark.hadoop.hive.server2.idle.operation.timeout', '7200000'),
('spark.task.reaper.enabled', 'true'),
('spark.storage.memoryFraction', '0.5'),
('spark.databricks.clusterUsageTags.clusterFirstOnDemand', '0'),
('spark.databricks.sql.configMapperClass','com.databricks.dbsql.config.SqlConfigMapperBridge'),
('spark.driver.maxResultSize', '4g'),
('spark.databricks.clusterUsageTags.sparkEnvVarContainsNewline', 'false'),
('spark.hadoop.fs.fcfs-s3.impl','com.databricks.sql.acl.fs.FixedCredentialsFileSystem'),
('spark.databricks.delta.multiClusterWrites.enabled', 'true'),
('spark.worker.cleanup.enabled', 'false'),
('spark.sql.legacy.createHiveTableByDefault', 'false'),
('spark.ui.port', '40001'),
('spark.hadoop.fs.fcfs-s3a.impl.disable.cache', 'true'),
('spark.databricks.workspace.matplotlibInline.enabled', 'true'),
('spark.hadoop.fs.s3a.attempts.maximum', '10'),
('spark.databricks.clusterUsageTags.enableCredentialPassthrough', 'false'),
('spark.databricks.clusterUsageTags.sparkEnvVarContainsDollarSign', 'false'),
('spark.databricks.clusterUsageTags.userProvidedRemoteVolumeType','ebs_volume_type: GENERAL_PURPOSE_SSD\n'),
('spark.databricks.clusterUsageTags.enableJdbcAutoStart', 'true'),
('spark.hadoop.fs.azure.user.agent.prefix', ''),
('spark.databricks.clusterUsageTags.enableGlueCatalogCredentialPassthrough','false'),
('spark.hadoop.fs.fcfs-s3n.impl','com.databricks.sql.acl.fs.FixedCredentialsFileSystem'),
('spark.hadoop.fs.s3a.retry.throttle.interval', '500ms'),
('spark.hadoop.fs.wasb.impl.disable.cache', 'true'),
('spark.databricks.clusterUsageTags.clusterLogDestination', ''),
('spark.cleaner.referenceTracking.blocking', 'false'),
('spark.databricks.clusterUsageTags.isSingleUserCluster', 'false'),
('spark.databricks.clusterUsageTags.clusterState', 'Pending'),
('spark.databricks.clusterUsageTags.sparkEnvVarContainsSingleQuotes','false'),
('spark.databricks.tahoe.logStore.azure.class','com.databricks.tahoe.store.AzureLogStore'),
('spark.hadoop.fs.azure.skip.metrics', 'true'),
('spark.hadoop.fs.s3.impl','shaded.databricks.org.apache.hadoop.fs.s3a.S3AFileSystem'),
('spark.hadoop.hive.hmshandler.retry.attempts', '10'),
('spark.scheduler.mode', 'FAIR'),
('spark.sql.sources.default', 'delta'),