forked from opensci/piflow
fix bug: sparklauncher launch executor number error
This commit is contained in:
parent
a62302c680
commit
f599d6e36d
|
@ -34,7 +34,7 @@ object FlowLauncher {
|
|||
.setConf("spark.jars", PropertyUtil.getPropertyValue("piflow.bundle"))
|
||||
.setConf("spark.hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
|
||||
.setConf("spark.driver.memory", flow.getDriverMemory())
|
||||
.setConf("spark.num.executors", flow.getExecutorNum())
|
||||
.setConf("spark.executor.instances", flow.getExecutorNum())
|
||||
.setConf("spark.executor.memory", flow.getExecutorMem())
|
||||
.setConf("spark.executor.cores",flow.getExecutorCores())
|
||||
.addFile(PropertyUtil.getConfigureFile())
|
||||
|
|
|
@ -113,7 +113,7 @@ object API {
|
|||
.setVerbose(true)
|
||||
.setConf("spark.hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
|
||||
.setConf("spark.driver.memory", dirverMem)
|
||||
.setConf("spark.num.executors",executorNum)
|
||||
.setConf("spark.executor.instances",executorNum)
|
||||
.setConf("spark.executor.memory", executorMem)
|
||||
.setConf("spark.executor.cores",executorCores)
|
||||
.addFile(PropertyUtil.getConfigureFile())
|
||||
|
|
Loading…
Reference in New Issue