remove server.ip configure item

This commit is contained in:
judy0131 2020-01-19 18:15:09 +08:00
parent 4a36e05c08
commit 714f48ce80
7 changed files with 62 additions and 26 deletions

View File

@ -1,34 +1,23 @@
server.ip=10.0.86.98
server.port=8001
spark.master=yarn
spark.deploy.mode=cluster
#hdfs default file system
fs.defaultFS=hdfs://10.0.86.191:9000
#yarn resourcemanager hostname
yarn.resourcemanager.hostname=10.0.86.191
#yarn.resourcemanager.address=10.0.86.191:8032
#yarn.access.namenode=hdfs://10.0.86.191:9000
#yarn.stagingDir=hdfs://10.0.86.191:9000/tmp/
#yarn.jars=hdfs://10.0.86.191:9000/user/spark/share/lib/*.jar
#yarn.url=http://10.0.86.191:8088/ws/v1/cluster/apps/
#hive metastore uris
hive.metastore.uris=thrift://10.0.88.71:9083
#Hdfs path, these paths will be created autometicly
#checkpoint.path=hdfs://10.0.86.89:9000/user/piflow/checkpoints/
#debug.path=hdfs://10.0.86.89:9000/user/piflow/debug/
#increment.path=hdfs://10.0.86.89:9000/user/piflow/increment/
#show data in log, set 0 if you do not show the logs
data.show=10
#monitor the throughput of flow
monitor.throughput=true
#server port
server.port=8001
#h2db port
h2.port=50001
#piflow.bundle=piflow-server/target/piflow-server-0.9.jar

View File

@ -1,6 +1,6 @@
package cn.piflow.util
import java.io.File
import java.io.{File, PrintWriter}
object FileUtil {
@ -11,6 +11,11 @@ object FileUtil {
}
def writeFile(text: String, path: String) = {
val writer = new PrintWriter(new File(path))
writer.write(text)
writer.close()
}
def main(args: Array[String]): Unit = {
val classPath = PropertyUtil.getClassPath()

View File

@ -39,6 +39,7 @@ object FlowLauncher {
.setConf("spark.executor.memory", flow.getExecutorMem())
.setConf("spark.executor.cores",flow.getExecutorCores())
.addFile(PropertyUtil.getConfigureFile())
.addFile(ServerIpUtil.getServerIpFile())
.setMainClass("cn.piflow.api.StartFlowMain")
.addAppArgs(flowJson)

View File

@ -17,10 +17,7 @@ object H2Util {
val CREATE_STOP_TABLE = "create table if not exists stop (flowId varchar(255), name varchar(255), state varchar(255), startTime varchar(255), endTime varchar(255))"
val CREATE_THOUGHPUT_TABLE = "create table if not exists thoughput (flowId varchar(255), stopName varchar(255), portName varchar(255), count long)"
val CREATE_FLAG_TABLE = "create table if not exists configFlag(id bigint auto_increment, item varchar(255), flag int, createTime varchar(255))"
val serverIP = PropertyUtil.getPropertyValue("server.ip") + ":" + PropertyUtil.getPropertyValue("h2.port")
//val ip = InetAddress.getLocalHost.getHostAddress
//val serverIP = ip + ":" + PropertyUtil.getPropertyValue("h2.port")
//print("getHostAddress:" + ip + " in H2Util!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!1")
val serverIP = ServerIpUtil.getServerIp() + ":" + PropertyUtil.getPropertyValue("h2.port")
val CONNECTION_URL = "jdbc:h2:tcp://" + serverIP + "/~/piflow;AUTO_SERVER=true"
var connection : Connection= null

View File

@ -0,0 +1,41 @@
package cn.piflow.util
import java.io.{FileInputStream, InputStream}
import java.net.InetAddress
import java.util.Properties
object ServerIpUtil {
private val prop: Properties = new Properties()
var fis: InputStream = null
var path :String = ""
try{
val userDir = System.getProperty("user.dir")
path = userDir + "/server.ip"
prop.load(new FileInputStream(path))
} catch{
case ex: Exception => ex.printStackTrace()
}
def getServerIpFile() : String = {
path
}
def getServerIp(): String ={
val obj = prop.get("server.ip")
if(obj != null){
return obj.toString
}
null
}
def main(args: Array[String]): Unit = {
val ip = InetAddress.getLocalHost.getHostAddress
//write ip to server.ip file
FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
println(ServerIpUtil.getServerIp())
}
}

View File

@ -121,6 +121,7 @@ object API {
.setConf("spark.executor.memory", executorMem)
.setConf("spark.executor.cores",executorCores)
.addFile(PropertyUtil.getConfigureFile())
.addFile(ServerIpUtil.getServerIpFile())
.setMainClass("cn.piflow.api.StartFlowMain")
.addAppArgs(flowJson.stripMargin)
//.redirectOutput(stdout)

View File

@ -12,7 +12,7 @@ import akka.stream.ActorMaterializer
import cn.piflow.{FlowGroupExecution, ProjectExecution}
import cn.piflow.api.util.PropertyUtil
import cn.piflow.conf.util.{MapUtil, OptionUtil}
import cn.piflow.util.{HdfsUtil, IdGenerator, JsonUtil}
import cn.piflow.util._
import com.typesafe.akka.extension.quartz.QuartzSchedulerExtension
import com.typesafe.config.ConfigFactory
@ -446,9 +446,11 @@ object HTTPService extends DefaultJsonProtocol with Directives with SprayJsonSup
def run = {
//val ip = PropertyUtil.getPropertyValue("server.ip")
val ip = InetAddress.getLocalHost.getHostAddress
print("getHostAddress:" + ip + " in HTTPService!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!1")
//write ip to server.ip file
FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
val port = PropertyUtil.getIntPropertyValue("server.port")
Http().bindAndHandleAsync(route, ip, port)
println("Server:" + ip + ":" + port + " Started!!!")