update stop
This commit is contained in:
parent
ff2e1be674
commit
6dd3df9612
|
@ -6,12 +6,12 @@
|
|||
|
||||
{
|
||||
"uuid":"1111",
|
||||
"name":"SelectHiveQLByJdbc",
|
||||
"bundle":"cn.piflow.bundle.hive.SelectHiveQLByJdbc",
|
||||
"name":"SelectHiveQLByJDBC",
|
||||
"bundle":"cn.piflow.bundle.hive.SelectHiveQLByJDBC",
|
||||
"properties":{
|
||||
"hiveUser": "root",
|
||||
"hivePassword": "123456",
|
||||
"jdbcUrl": "jdbc:hive2://192.168.3.140:10000/default",
|
||||
"jdbcUrl": "jdbc:hive2://192.168.3.140:10000/test",
|
||||
"sql":"select * from test.csvparser"
|
||||
}
|
||||
},
|
||||
|
@ -23,7 +23,8 @@
|
|||
"csvSavePath":"hdfs://192.168.3.138:8020/test/",
|
||||
"header": "true",
|
||||
"delimiter":",",
|
||||
"partition":"1"
|
||||
"partition":"1",
|
||||
"saveMode": "append"
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -31,7 +32,7 @@
|
|||
],
|
||||
"paths":[
|
||||
{
|
||||
"from":"OptionalSelectHiveQL",
|
||||
"from":"SelectHiveQLByJDBC",
|
||||
"outport":"",
|
||||
"inport":"",
|
||||
"to":"CsvSave"
|
|
@ -34,7 +34,7 @@ class JdbcReadFromOracleTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("JdbcReadFromOracleTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -30,7 +30,7 @@ class MysqlReadIncrementalTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("MysqlReadIncrementalTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
package cn.piflow.bundle.JDBC
|
||||
|
||||
import java.net.InetAddress
|
||||
|
||||
import cn.piflow.Runner
|
||||
import cn.piflow.conf.bean.FlowBean
|
||||
import cn.piflow.conf.util.{FileUtil, OptionUtil}
|
||||
import cn.piflow.util.PropertyUtil
|
||||
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
|
||||
import org.apache.spark.sql.SparkSession
|
||||
import org.h2.tools.Server
|
||||
import org.junit.Test
|
||||
|
@ -24,13 +26,15 @@ class MysqlReadTest {
|
|||
//create flow
|
||||
val flowBean = FlowBean(map)
|
||||
val flow = flowBean.constructFlow()
|
||||
val ip = InetAddress.getLocalHost.getHostAddress
|
||||
cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
|
||||
|
||||
val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()
|
||||
|
||||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("MysqlReadTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -30,7 +30,7 @@ class MysqlWriteTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("MysqlWriteTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -30,7 +30,7 @@ class OracleReadByPartitionTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("OracleReadByPartitionTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -35,7 +35,7 @@ class OracleReadTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("OracleReadTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -30,7 +30,7 @@ class OracleWriteTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("OracleWriteTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -30,7 +30,7 @@ class CsvSaveTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("piflow-hive-bundle")
|
||||
.appName("CsvSaveTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -28,7 +28,7 @@ class CsvStringParserTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("piflow-hive-bundle")
|
||||
.appName("CsvStringParserTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -30,7 +30,7 @@ class PutHiveModeTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("PutHiveModeTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -30,7 +30,7 @@ class PutHiveQLTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("PutHiveQLTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -30,7 +30,7 @@ class PutHiveStreamingTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("PutHiveStreamingTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -1,22 +1,24 @@
|
|||
package cn.piflow.bundle.hive
|
||||
|
||||
import java.net.InetAddress
|
||||
|
||||
import cn.piflow.Runner
|
||||
import cn.piflow.conf.bean.FlowBean
|
||||
import cn.piflow.conf.util.{FileUtil, OptionUtil}
|
||||
import cn.piflow.util.PropertyUtil
|
||||
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
|
||||
import org.apache.spark.sql.SparkSession
|
||||
import org.h2.tools.Server
|
||||
import org.junit.Test
|
||||
|
||||
import scala.util.parsing.json.JSON
|
||||
|
||||
class SelectHiveQLByJdbcTest {
|
||||
class SelectHiveQLByJDBCTest {
|
||||
|
||||
@Test
|
||||
def testFlow(): Unit ={
|
||||
|
||||
//parse flow json
|
||||
val file = "src/main/resources/flow/hive/SelectHiveQLByJdbc.json"
|
||||
val file = "src/main/resources/flow/hive/SelectHiveQLByJDBC.json"
|
||||
val flowJsonStr = FileUtil.fileReader(file)
|
||||
val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
|
||||
println(map)
|
||||
|
@ -25,12 +27,15 @@ class SelectHiveQLByJdbcTest {
|
|||
val flowBean = FlowBean(map)
|
||||
val flow = flowBean.constructFlow()
|
||||
|
||||
val ip = InetAddress.getLocalHost.getHostAddress
|
||||
cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
|
||||
|
||||
val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()
|
||||
|
||||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("SelectHiveQLByJdbcTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
|
@ -30,7 +30,7 @@ class SelectHiveQLTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("SelectHiveQLTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -35,7 +35,7 @@ class HiveToNeo4jTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("HiveToNeo4jTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -35,7 +35,7 @@ class RunCypherTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("RunCypherTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -35,7 +35,7 @@ class GetFromSolrTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("GetFromSolrTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
|
@ -35,7 +35,7 @@ class PutIntoSolrTest {
|
|||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("local[*]")
|
||||
.appName("CsvParserTest")
|
||||
.appName("GetFromSolrTest")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
|
|
Loading…
Reference in New Issue