From a94bcacd0138ab38c331f20263cc090ef2b43b07 Mon Sep 17 00:00:00 2001 From: bao319 <51151344+bao319@users.noreply.github.com> Date: Tue, 31 Mar 2020 21:18:21 +0800 Subject: [PATCH] Delete FolderCsvParserTest.scala --- .../bundle/csv/FolderCsvParserTest.scala | 52 ------------------- 1 file changed, 52 deletions(-) delete mode 100644 piflow-bundle/src/test/scala/cn/piflow/bundle/csv/FolderCsvParserTest.scala diff --git a/piflow-bundle/src/test/scala/cn/piflow/bundle/csv/FolderCsvParserTest.scala b/piflow-bundle/src/test/scala/cn/piflow/bundle/csv/FolderCsvParserTest.scala deleted file mode 100644 index 30a00fe..0000000 --- a/piflow-bundle/src/test/scala/cn/piflow/bundle/csv/FolderCsvParserTest.scala +++ /dev/null @@ -1,52 +0,0 @@ -package cn.piflow.bundle.csv - -import cn.piflow.Runner -import cn.piflow.conf.bean.FlowBean -import cn.piflow.conf.util.{FileUtil, OptionUtil} -import cn.piflow.util.PropertyUtil -import org.apache.spark.sql.SparkSession -import org.h2.tools.Server -import org.junit.Test - -import scala.util.parsing.json.JSON - -class FolderCsvParserTest { - - @Test - def testFlow(): Unit ={ - - //parse flow json - val file = "src/main/resources/flow/csv/FolderCsvParser.json" - val flowJsonStr = FileUtil.fileReader(file) - val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]] - println(map) - - //create flow - val flowBean = FlowBean(map) - val flow = flowBean.constructFlow() - val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start() - - //execute flow - val spark = SparkSession.builder() - .master("local[*]") - .appName("piflow-hive-bundle") - .config("spark.driver.memory", "1g") - .config("spark.executor.memory", "2g") - .config("spark.cores.max", "2") - .config("hive.metastore.uris", PropertyUtil.getPropertyValue("hive.metastore.uris")) - .enableHiveSupport() - .getOrCreate() - - val process = Runner.create() - .bind(classOf[SparkSession].getName, spark) - .bind("checkpoint.path", "") - .bind("debug.path","") - .start(flow); - - process.awaitTermination(); - val pid = process.pid(); - println(pid + "!!!!!!!!!!!!!!!!!!!!!") - spark.close(); - } - -}