add hive group and jdbc group's getPropertyDescriptor api
This commit is contained in:
parent
7ebaba5df2
commit
058325d089
|
@ -1,7 +1,7 @@
|
|||
package cn.piflow.bundle.hive
|
||||
|
||||
import cn.piflow.conf.bean.PropertyDescriptor
|
||||
import cn.piflow.conf.util.MapUtil
|
||||
import cn.piflow.conf.util.{ImageUtil, MapUtil}
|
||||
import cn.piflow.conf.{ConfigurableStop, HiveGroup, StopGroup, StopGroupEnum}
|
||||
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
|
||||
import org.apache.spark.sql.SparkSession
|
||||
|
@ -48,14 +48,17 @@ class PutHiveQL extends ConfigurableStop {
|
|||
|
||||
override def getPropertyDescriptor(): List[PropertyDescriptor] = {
|
||||
var descriptor : List[PropertyDescriptor] = List()
|
||||
val hiveQL_path = new PropertyDescriptor().name("hiveQL_Path").displayName("HiveQL_Path").defaultValue("").required(true)
|
||||
val database=new PropertyDescriptor().name("database").displayName("DataBase").defaultValue("").required(true)
|
||||
val hiveQL_path = new PropertyDescriptor().name("hiveQL_Path").displayName("HiveQL_Path").description("The path of the hiveQL file").defaultValue("").required(true)
|
||||
val database=new PropertyDescriptor().name("database").displayName("DataBase").description("The database name which the hiveQL" +
|
||||
"will execute on").defaultValue("").required(true)
|
||||
descriptor = hiveQL_path :: descriptor
|
||||
descriptor = database :: descriptor
|
||||
descriptor
|
||||
}
|
||||
|
||||
override def getIcon(): Array[Byte] = ???
|
||||
override def getIcon(): Array[Byte] = {
|
||||
ImageUtil.getImage("./src/main/resources/selectHiveQL.jpg")
|
||||
}
|
||||
|
||||
override def getGroup(): List[String] = {
|
||||
List(StopGroupEnum.HiveGroup.toString)
|
||||
|
|
|
@ -3,7 +3,7 @@ package cn.piflow.bundle.hive
|
|||
import cn.piflow._
|
||||
import cn.piflow.conf.{ConfigurableStop, HiveGroup, StopGroup, StopGroupEnum}
|
||||
import cn.piflow.conf.bean.PropertyDescriptor
|
||||
import cn.piflow.conf.util.MapUtil
|
||||
import cn.piflow.conf.util.{ImageUtil, MapUtil}
|
||||
import org.apache.spark.sql.SparkSession
|
||||
|
||||
import scala.beans.BeanProperty
|
||||
|
@ -37,9 +37,18 @@ class PutHiveStreaming extends ConfigurableStop {
|
|||
table = MapUtil.get(map,"table").asInstanceOf[String]
|
||||
}
|
||||
|
||||
override def getPropertyDescriptor(): List[PropertyDescriptor] = ???
|
||||
override def getPropertyDescriptor(): List[PropertyDescriptor] = {
|
||||
var descriptor : List[PropertyDescriptor] = List()
|
||||
val database=new PropertyDescriptor().name("database").displayName("DataBase").description("The database name").defaultValue("").required(true)
|
||||
val table = new PropertyDescriptor().name("table").displayName("Table").description("The table name").defaultValue("").required(true)
|
||||
descriptor = database :: descriptor
|
||||
descriptor = table :: descriptor
|
||||
descriptor
|
||||
}
|
||||
|
||||
override def getIcon(): Array[Byte] = ???
|
||||
override def getIcon(): Array[Byte] = {
|
||||
ImageUtil.getImage("./src/main/resources/selectHiveQL.jpg")
|
||||
}
|
||||
|
||||
override def getGroup(): List[String] = {
|
||||
List(StopGroupEnum.HiveGroup.toString)
|
||||
|
|
|
@ -3,7 +3,7 @@ package cn.piflow.bundle.jdbc
|
|||
import cn.piflow._
|
||||
import cn.piflow.conf.{ConfigurableStop, JdbcGroup, StopGroup, StopGroupEnum}
|
||||
import cn.piflow.conf.bean.PropertyDescriptor
|
||||
import cn.piflow.conf.util.MapUtil
|
||||
import cn.piflow.conf.util.{ImageUtil, MapUtil}
|
||||
import org.apache.spark.sql.SparkSession
|
||||
|
||||
import scala.beans.BeanProperty
|
||||
|
@ -13,7 +13,7 @@ class JdbcRead extends ConfigurableStop {
|
|||
val inportCount: Int = 0
|
||||
val outportCount: Int = 1
|
||||
|
||||
var driver:String = _
|
||||
//var driver:String = _
|
||||
var url:String = _
|
||||
var user:String = _
|
||||
var password:String = _
|
||||
|
@ -40,16 +40,41 @@ class JdbcRead extends ConfigurableStop {
|
|||
}
|
||||
|
||||
override def setProperties(map: Map[String, Any]): Unit = {
|
||||
driver = MapUtil.get(map,"driver").asInstanceOf[String]
|
||||
//driver = MapUtil.get(map,"driver").asInstanceOf[String]
|
||||
url = MapUtil.get(map,"url").asInstanceOf[String]
|
||||
user = MapUtil.get(map,"user").asInstanceOf[String]
|
||||
password = MapUtil.get(map,"password").asInstanceOf[String]
|
||||
sql = MapUtil.get(map,"sql").asInstanceOf[String]
|
||||
}
|
||||
|
||||
override def getPropertyDescriptor(): List[PropertyDescriptor] = ???
|
||||
override def getPropertyDescriptor(): List[PropertyDescriptor] = {
|
||||
var descriptor : List[PropertyDescriptor] = List()
|
||||
//val driver=new PropertyDescriptor().name("driver").displayName("Driver").description("The driver name, for example com.mysql.jdbc.Driver").defaultValue("").required(true)
|
||||
//descriptor = driver :: descriptor
|
||||
|
||||
override def getIcon(): Array[Byte] = ???
|
||||
val url=new PropertyDescriptor().name("url").displayName("url").description("The Url, for example jdbc:mysql://127.0.0.1/dbname").defaultValue("").required(true)
|
||||
descriptor = url :: descriptor
|
||||
|
||||
val user=new PropertyDescriptor().name("user").displayName("user").description("The user name of database").defaultValue("").required(true)
|
||||
descriptor = user :: descriptor
|
||||
|
||||
val password=new PropertyDescriptor().name("password").displayName("password").description("The password of database").defaultValue("").required(true)
|
||||
descriptor = password :: descriptor
|
||||
|
||||
val sql=new PropertyDescriptor().name("sql").displayName("sql").description("The sql sentence you want to execute").defaultValue("").required(true)
|
||||
descriptor = sql :: descriptor
|
||||
|
||||
//descriptor = driver :: descriptor
|
||||
descriptor = url :: descriptor
|
||||
descriptor = user :: descriptor
|
||||
descriptor = password :: descriptor
|
||||
descriptor = sql :: descriptor
|
||||
descriptor
|
||||
}
|
||||
|
||||
override def getIcon(): Array[Byte] = {
|
||||
ImageUtil.getImage("./src/main/resources/selectHiveQL.jpg")
|
||||
}
|
||||
|
||||
override def getGroup(): List[String] = {
|
||||
List(StopGroupEnum.JdbcGroup.toString)
|
||||
|
|
|
@ -5,7 +5,7 @@ import java.util.Properties
|
|||
import cn.piflow._
|
||||
import cn.piflow.conf.{ConfigurableStop, JdbcGroup, StopGroup, StopGroupEnum}
|
||||
import cn.piflow.conf.bean.PropertyDescriptor
|
||||
import cn.piflow.conf.util.MapUtil
|
||||
import cn.piflow.conf.util.{ImageUtil, MapUtil}
|
||||
import org.apache.spark.sql.{SaveMode, SparkSession}
|
||||
|
||||
import scala.beans.BeanProperty
|
||||
|
@ -43,9 +43,31 @@ class JdbcWrite extends ConfigurableStop{
|
|||
dbtable = MapUtil.get(map,"dbtable").asInstanceOf[String]
|
||||
}
|
||||
|
||||
override def getPropertyDescriptor(): List[PropertyDescriptor] = ???
|
||||
override def getPropertyDescriptor(): List[PropertyDescriptor] = {
|
||||
var descriptor : List[PropertyDescriptor] = List()
|
||||
|
||||
override def getIcon(): Array[Byte] = ???
|
||||
val url=new PropertyDescriptor().name("url").displayName("url").description("The Url, for example jdbc:mysql://127.0.0.1/dbname").defaultValue("").required(true)
|
||||
descriptor = url :: descriptor
|
||||
|
||||
val user=new PropertyDescriptor().name("user").displayName("user").description("The user name of database").defaultValue("").required(true)
|
||||
descriptor = user :: descriptor
|
||||
|
||||
val password=new PropertyDescriptor().name("password").displayName("password").description("The password of database").defaultValue("").required(true)
|
||||
descriptor = password :: descriptor
|
||||
|
||||
val dbtable=new PropertyDescriptor().name("dbtable").displayName("dbtable").description("The table you want to write").defaultValue("").required(true)
|
||||
descriptor = dbtable :: descriptor
|
||||
|
||||
descriptor = url :: descriptor
|
||||
descriptor = user :: descriptor
|
||||
descriptor = password :: descriptor
|
||||
descriptor = dbtable :: descriptor
|
||||
descriptor
|
||||
}
|
||||
|
||||
override def getIcon(): Array[Byte] = {
|
||||
ImageUtil.getImage("./src/main/resources/selectHiveQL.jpg")
|
||||
}
|
||||
|
||||
override def getGroup(): List[String] = {
|
||||
List(StopGroupEnum.JdbcGroup.toString)
|
||||
|
|
Loading…
Reference in New Issue