-
Notifications
You must be signed in to change notification settings - Fork 66
Description
Hi guys , I got stuck in this issue for two days! my machine is m2.
Bucket 'rainforest' created.
docker exec spark-master spark-sql --master spark://spark-master:7077 --deploy-mode client -f ./setup.sql
ERROR StatusLogger Could not reconfigure JMX
java.lang.NullPointerException
at java.base/jdk.internal.platform.cgroupv2.CgroupV2Subsystem.getInstance(Unknown Source)
at java.base/jdk.internal.platform.CgroupSubsystemFactory.create(Unknown Source)
at java.base/jdk.internal.platform.CgroupMetrics.getInstance(Unknown Source)
at java.base/jdk.internal.platform.SystemMetrics.instance(Unknown Source)
at java.base/jdk.internal.platform.Metrics.systemMetrics(Unknown Source)
at java.base/jdk.internal.platform.Container.metrics(Unknown Source)
at jdk.management/com.sun.management.internal.OperatingSystemImpl.(Unknown Source)
at jdk.management/com.sun.management.internal.PlatformMBeanProviderImpl.getOperatingSystemMXBean(Unknown Source)
at jdk.management/com.sun.management.internal.PlatformMBeanProviderImpl$3.nameToMBeanMap(Unknown Source)
at java.management/java.lang.management.ManagementFactory.lambda$getPlatformMBeanServer$0(Unknown Source)
at java.base/java.util.stream.ReferencePipeline$7$1.accept(Unknown Source)
at java.base/java.util.stream.ReferencePipeline$2$1.accept(Unknown Source)
at java.base/java.util.HashMap$ValueSpliterator.forEachRemaining(Unknown Source)
at java.base/java.util.stream.AbstractPipeline.copyInto(Unknown Source)
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(Unknown Source)
at java.base/java.util.stream.ForEachOps$ForEachOp.evaluateSequential(Unknown Source)
at java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.evaluateSequential(Unknown Source)
at java.base/java.util.stream.AbstractPipeline.evaluate(Unknown Source)
at java.base/java.util.stream.ReferencePipeline.forEach(Unknown Source)
at java.management/java.lang.management.ManagementFactory.getPlatformMBeanServer(Unknown Source)
at org.apache.logging.log4j.core.jmx.Server.reregisterMBeansAfterReconfigure(Server.java:140)
at org.apache.logging.log4j.core.LoggerContext.setConfiguration(LoggerContext.java:632)
at org.apache.logging.log4j.core.LoggerContext.reconfigure(LoggerContext.java:694)
at org.apache.logging.log4j.core.LoggerContext.reconfigure(LoggerContext.java:711)
at org.apache.logging.log4j.core.LoggerContext.start(LoggerContext.java:253)
at org.apache.logging.log4j.core.impl.Log4jContextFactory.getContext(Log4jContextFactory.java:245)
at org.apache.logging.log4j.core.impl.Log4jContextFactory.getContext(Log4jContextFactory.java:47)
at org.apache.logging.log4j.LogManager.getContext(LogManager.java:176)
at org.apache.logging.log4j.LogManager.getLogger(LogManager.java:666)
at org.apache.logging.log4j.LogManager.getRootLogger(LogManager.java:700)
at org.apache.spark.internal.Logging.initializeLogging(Logging.scala:130)
at org.apache.spark.internal.Logging.initializeLogIfNecessary(Logging.scala:115)
at org.apache.spark.internal.Logging.initializeLogIfNecessary$(Logging.scala:109)
at org.apache.spark.deploy.SparkSubmit.initializeLogIfNecessary(SparkSubmit.scala:75)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:83)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1046)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1055)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
:: loading settings :: url = jar:file:/opt/spark/jars/ivy-2.5.1.jar!/org/apache/ivy/core/settings/ivysettings.xml
Ivy Default Cache set to: /root/.ivy2/cache
The jars for the packages stored in: /root/.ivy2/jars
io.delta#delta-core_2.12 added as a dependency
org.apache.hadoop#hadoop-aws added as a dependency
org.postgresql#postgresql added as a dependency
:: resolving dependencies :: org.apache.spark#spark-submit-parent-65896a27-290e-4e08-80b8-a60e0d5d67a5;1.0
confs: [default]
found io.delta#delta-core_2.12;2.3.0 in central
found io.delta#delta-storage;2.3.0 in central
found org.antlr#antlr4-runtime;4.8 in central
found org.apache.hadoop#hadoop-aws;3.3.2 in central
found com.amazonaws#aws-java-sdk-bundle;1.11.1026 in central
found org.wildfly.openssl#wildfly-openssl;1.0.7.Final in central
found org.postgresql#postgresql;42.7.3 in central
found org.checkerframework#checker-qual;3.42.0 in central
downloading https://repo1.maven.org/maven2/io/delta/delta-core_2.12/2.3.0/delta-core_2.12-2.3.0.jar ...
[SUCCESSFUL ] io.delta#delta-core_2.12;2.3.0!delta-core_2.12.jar (492ms)
downloading https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.2/hadoop-aws-3.3.2.jar ...
[SUCCESSFUL ] org.apache.hadoop#hadoop-aws;3.3.2!hadoop-aws.jar (130ms)
downloading https://repo1.maven.org/maven2/org/postgresql/postgresql/42.7.3/postgresql-42.7.3.jar ...
[SUCCESSFUL ] org.postgresql#postgresql;42.7.3!postgresql.jar (191ms)
downloading https://repo1.maven.org/maven2/io/delta/delta-storage/2.3.0/delta-storage-2.3.0.jar ...
[SUCCESSFUL ] io.delta#delta-storage;2.3.0!delta-storage.jar (34ms)
downloading https://repo1.maven.org/maven2/org/antlr/antlr4-runtime/4.8/antlr4-runtime-4.8.jar ...
[SUCCESSFUL ] org.antlr#antlr4-runtime;4.8!antlr4-runtime.jar (76ms)
downloading https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.11.1026/aws-java-sdk-bundle-1.11.1026.jar ...
[SUCCESSFUL ] com.amazonaws#aws-java-sdk-bundle;1.11.1026!aws-java-sdk-bundle.jar (21449ms)
downloading https://repo1.maven.org/maven2/org/wildfly/openssl/wildfly-openssl/1.0.7.Final/wildfly-openssl-1.0.7.Final.jar ...
[SUCCESSFUL ] org.wildfly.openssl#wildfly-openssl;1.0.7.Final!wildfly-openssl.jar (67ms)
downloading https://repo1.maven.org/maven2/org/checkerframework/checker-qual/3.42.0/checker-qual-3.42.0.jar ...
[SUCCESSFUL ] org.checkerframework#checker-qual;3.42.0!checker-qual.jar (60ms)
:: resolution report :: resolve 2773ms :: artifacts dl 22517ms
:: modules in use:
com.amazonaws#aws-java-sdk-bundle;1.11.1026 from central in [default]
io.delta#delta-core_2.12;2.3.0 from central in [default]
io.delta#delta-storage;2.3.0 from central in [default]
org.antlr#antlr4-runtime;4.8 from central in [default]
org.apache.hadoop#hadoop-aws;3.3.2 from central in [default]
org.checkerframework#checker-qual;3.42.0 from central in [default]
org.postgresql#postgresql;42.7.3 from central in [default]
org.wildfly.openssl#wildfly-openssl;1.0.7.Final from central in [default]
---------------------------------------------------------------------
| | modules || artifacts |
| conf | number| search|dwnlded|evicted|| number|dwnlded|
---------------------------------------------------------------------
| default | 8 | 8 | 8 | 0 || 8 | 8 |
---------------------------------------------------------------------
:: retrieving :: org.apache.spark#spark-submit-parent-65896a27-290e-4e08-80b8-a60e0d5d67a5
confs: [default]
8 artifacts copied, 0 already retrieved (227964kB/611ms)
25/03/03 11:02:38 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
Exception in thread "main" java.lang.ExceptionInInitializerError
at org.apache.spark.metrics.ExecutorMetricType$.(ExecutorMetricType.scala:201)
at org.apache.spark.metrics.ExecutorMetricType$.(ExecutorMetricType.scala)
at org.apache.spark.executor.ExecutorMetricsSource.(ExecutorMetricsSource.scala:41)
at org.apache.spark.SparkContext.(SparkContext.scala:581)
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2714)
at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:953)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:947)
at org.apache.spark.sql.hive.thriftserver.SparkSQLEnv$.init(SparkSQLEnv.scala:54)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.(SparkSQLCLIDriver.scala:327)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:159)
at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.base/java.lang.reflect.Method.invoke(Unknown Source)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:958)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1046)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1055)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.IllegalArgumentException: javax.management.InstanceNotFoundException: java.nio:type=BufferPool,name=direct
at java.management/java.lang.management.ManagementFactory.newPlatformMXBeanProxy(Unknown Source)
at org.apache.spark.metrics.MBeanExecutorMetricType.(ExecutorMetricType.scala:64)
at org.apache.spark.metrics.DirectPoolMemory$.(ExecutorMetricType.scala:184)
at org.apache.spark.metrics.DirectPoolMemory$.(ExecutorMetricType.scala)
... 24 more
Caused by: javax.management.InstanceNotFoundException: java.nio:type=BufferPool,name=direct
at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.getMBean(Unknown Source)
at java.management/com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.isInstanceOf(Unknown Source)
at java.management/com.sun.jmx.mbeanserver.JmxMBeanServer.isInstanceOf(Unknown Source)
at java.management/java.lang.management.ManagementFactory.isInstanceOf(Unknown Source)
... 28 more
make: *** [create-tables] Error 1