rename PortEnum to Port

This commit is contained in:
judy0131 2020-03-16 16:32:48 +08:00
parent bbd9505801
commit 7b68ff6005
150 changed files with 397 additions and 397 deletions

View File

@ -19,8 +19,8 @@ class ChineseSpeechRecognition extends ConfigurableStop {
val authorEmail: String = "huchuan0901@163.com"
val description: String = "Speech recognition"
val inportList: List[String] = List(PortEnum.AnyPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.AnyPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
//val url = "http://10.0.86.128:8081/service/classify/asr/"

View File

@ -13,8 +13,8 @@ import org.apache.spark.sql.types.StructField
class EmailClean extends ConfigurableStop{
val authorEmail: String = "songdongze@cnic.cn"
val description: String = "Clean email format data."
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var columnName:String=_

View File

@ -17,8 +17,8 @@ import scala.reflect.macros.ParseException
class IdentityNumberClean extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Clean Id Card data."
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
//var regex:String =_
var columnName:String=_
//var replaceStr:String=_

View File

@ -11,8 +11,8 @@ import org.apache.spark.sql.types.StructField
class PhoneNumberClean extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Clean phone number format data."
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var columnName:String=_
def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {

View File

@ -10,8 +10,8 @@ import org.apache.spark.sql.SparkSession
class ProvinceClean extends ConfigurableStop{
val authorEmail: String = "songdongze@cnic.cn"
val description: String = "Clean email format data."
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var columnName:String=_

View File

@ -11,8 +11,8 @@ import org.apache.spark.sql.types.StructField
class TitleClean extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Clean title format data."
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var columnName:String=_
def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {

View File

@ -4,15 +4,15 @@ import java.util.UUID
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.sql.{DataFrame, SparkSession}
class AddUUIDStop extends ConfigurableStop{
override val authorEmail: String = "ygang@cnic.cn"
override val description: String = "Increase the column with uuid"
override val inportList: List[String] =List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var column:String=_

View File

@ -10,8 +10,8 @@ class ConvertSchema extends ConfigurableStop {
val authorEmail: String = "yangqidong@cnic.cn"
val description: String = "Transform field name"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var schema:String = _

View File

@ -14,8 +14,8 @@ class ConvertSchemaType extends ConfigurableStop {
val authorEmail: String = "ygang@cnic.cn"
val description: String = "Transform the schema dataType"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var stringType:String = _

View File

@ -2,15 +2,15 @@ package cn.piflow.bundle.common
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.sql.DataFrame
class Distinct extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Reduplicate data according to all fields or fields you specify"
override val inportList: List[String] =List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var files:String=_

View File

@ -13,8 +13,8 @@ class DoFlatMapStop extends ConfigurableStop{
val authorEmail: String = "ygang@cnic.cn"
val description: String = "Do flat map"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var SCRIPT: String = _

View File

@ -15,8 +15,8 @@ class DoMapStop extends ConfigurableStop{
val authorEmail: String = "ygang@cnic.cn"
val description: String = "Do map"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var schema: String = _
var SCRIPT: String = _

View File

@ -10,8 +10,8 @@ class DropField extends ConfigurableStop {
val authorEmail: String = "ygang@cnic.cn"
val description: String = "drop data field"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var schema:String = _

View File

@ -16,8 +16,8 @@ class ExecuteSQLStop extends ConfigurableStop{
val authorEmail: String = "ygang@cnic.cn"
val description: String = "Execute sql"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var sql: String = _
var tableName: String = _

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.common
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.sql.{Column, DataFrame}
@ -9,8 +9,8 @@ import org.apache.spark.sql.{Column, DataFrame}
class Filter extends ConfigurableStop{
override val authorEmail: String = "xjzhu@cnic.cn"
override val description: String = "Do filter by condition"
override val inportList: List[String] = List(PortEnum.DefaultPort)
override val outportList: List[String] = List(PortEnum.DefaultPort)
override val inportList: List[String] = List(Port.DefaultPort)
override val outportList: List[String] = List(Port.DefaultPort)
var condition: String = _

View File

@ -11,8 +11,8 @@ class Fork extends ConfigurableStop{
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Fork data into different stops"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.AnyPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.AnyPort.toString)
var outports : List[String] = _

View File

@ -2,15 +2,15 @@ package cn.piflow.bundle.common
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.sql.{Column, DataFrame}
class Join extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Table connection, including full connection, left connection, right connection and inner connection"
override val inportList: List[String] =List(PortEnum.LeftPort.toString,PortEnum.RightPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.LeftPort.toString,Port.RightPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var joinMode:String=_
var correlationField:String=_
@ -19,8 +19,8 @@ class Join extends ConfigurableStop{
override def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {
val leftDF = in.read(PortEnum.LeftPort)
val rightDF = in.read(PortEnum.RightPort)
val leftDF = in.read(Port.LeftPort)
val rightDF = in.read(Port.RightPort)
var seq: Seq[String]= Seq()
correlationField.split(",").foreach(x=>{

View File

@ -11,8 +11,8 @@ class Merge extends ConfigurableStop{
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Merge data into one stop"
val inportList: List[String] = List(PortEnum.AnyPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.AnyPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var inports : List[String] = _

View File

@ -9,8 +9,8 @@ class Route extends ConfigurableStop{
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Route data by customizedProperties, key is port & value is filter condition"
val inportList: List[String] = List(PortEnum.DefaultPort)
val outportList: List[String] = List(PortEnum.RoutePort)
val inportList: List[String] = List(Port.DefaultPort)
val outportList: List[String] = List(Port.RoutePort)
override val isCustomized: Boolean = true

View File

@ -13,8 +13,8 @@ class SelectField extends ConfigurableStop {
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Select data field"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var schema:String = _

View File

@ -2,7 +2,7 @@ package cn.piflow.bundle.common
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.ImageUtil
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.sql.types.StructType
@ -11,8 +11,8 @@ import org.apache.spark.sql.{DataFrame, Row, SparkSession}
class Subtract extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Delete duplicates of the first and second tables from the first table"
override val inportList: List[String] =List(PortEnum.AnyPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.AnyPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
override def setProperties(map: Map[String, Any]): Unit = {
}

View File

@ -9,8 +9,8 @@ class Trager extends ConfigurableStop{
val authorEmail: String = "ygang@cnic.cn"
val description: String = "Upstream and downstream middleware"
val inportList: List[String] = List(PortEnum.AnyPort.toString)
val outportList: List[String] = List(PortEnum.AnyPort.toString)
val inportList: List[String] = List(Port.AnyPort.toString)
val outportList: List[String] = List(Port.AnyPort.toString)
def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {

View File

@ -12,8 +12,8 @@ class CsvParser extends ConfigurableStop{
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Parse csv file"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var csvPath: String = _
var header: Boolean = _

View File

@ -9,8 +9,8 @@ import org.apache.spark.sql.SaveMode
class CsvSave extends ConfigurableStop{
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Save data into csv file."
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var csvSavePath: String = _
var header: Boolean = _

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.csv
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.SparkContext
@ -11,8 +11,8 @@ import org.apache.spark.sql.{DataFrame, Row, SparkSession}
class CsvStringParser extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
override val description: String = "Parse csv string"

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.csv
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.SparkContext
@ -11,8 +11,8 @@ import org.apache.spark.sql.{DataFrame, Row, SparkSession}
class FolderCsvParser extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
override val description: String = "Parse csv folder"
var FolderPath:String=_

View File

@ -2,7 +2,7 @@ package cn.piflow.bundle.es
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.sql.SparkSession
@ -11,8 +11,8 @@ class FetchEs extends ConfigurableStop {
val authorEmail: String = "ygang@cnic.cn"
override val inportList: List[String] = List(PortEnum.NonePort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] = List(Port.NonePort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
override val description: String = "Fetch data from Elasticsearch"
var es_nodes : String = _

View File

@ -2,7 +2,7 @@ package cn.piflow.bundle.es
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark.sql.EsSparkSQL
@ -12,8 +12,8 @@ class PutEs extends ConfigurableStop {
override val description: String = "Put data into Elasticsearch"
val authorEmail: String = "ygang@cnic.cn"
override val inportList: List[String] = List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.NonePort.toString)
override val inportList: List[String] = List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.NonePort.toString)
var es_nodes : String = _
var es_port : String = _

View File

@ -2,15 +2,15 @@ package cn.piflow.bundle.es
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.sql.SparkSession
class QueryEs extends ConfigurableStop {
val authorEmail: String = "ygang@cnic.cn"
override val inportList: List[String] = List(PortEnum.NonePort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] = List(Port.NonePort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
override val description: String = "Query data from Elasticsearch"
var es_nodes : String = _

View File

@ -16,8 +16,8 @@ class ExcelParser extends ConfigurableStop{
val authorEmail: String = "ygang@cnic.cn"
val description: String = "Parse excel file to json"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath: String = _

View File

@ -12,8 +12,8 @@ import org.apache.hadoop.fs.{FileSystem, Path}
class FetchFile extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Fetch file from hdfs to local"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var hdfs_path:String =_
var local_path:String=_
var fs:FileSystem=null

View File

@ -15,8 +15,8 @@ import org.apache.spark.sql.SparkSession
class PutFile extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Put local file to hdfs"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var hdfs_path:String =_
var local_path:String=_
var fs:FileSystem=null

View File

@ -11,8 +11,8 @@ import org.apache.spark.sql.SparkSession
class RegexTextProcess extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Use regex to replace text"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var regex:String =_
var columnName:String=_
var replaceStr:String=_

View File

@ -5,7 +5,7 @@ import java.util.regex.Pattern
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import com.enterprisedt.net.ftp._
import org.apache.hadoop.conf.Configuration
@ -17,8 +17,8 @@ import scala.collection.mutable.ArrayBuffer
class LoadFromFtpToHDFS extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Download files or folders to HDFS"
override val inportList: List[String] = List(PortEnum.NonePort.toString)
override val outportList: List[String] = List(PortEnum.NonePort.toString)
override val inportList: List[String] = List(Port.NonePort.toString)
override val outportList: List[String] = List(Port.NonePort.toString)
var ftp_url:String =_
var port:String=_

View File

@ -4,7 +4,7 @@ import java.io.{DataOutputStream, File, InputStream, OutputStream}
import java.util
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import sun.net.TelnetOutputStream
@ -15,8 +15,8 @@ import scala.reflect.io.Directory
class UploadToFtp extends ConfigurableStop{
val authorEmail: String = "xiaoxiao@cnic.cn"
val description: String = "Upload file to ftp server"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.NonePort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.NonePort.toString)
var url_str:String =_
var port:Int=_
var username:String=_

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.graphx
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.graphx._

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.graphx
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.sql.SparkSession
@ -10,7 +10,7 @@ class LoadGraph extends ConfigurableStop {
val authorEmail: String = "06whuxx@163.com"
val description: String = "Load data and construct a graph"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
var edgePort : String = "edges"

View File

@ -4,7 +4,7 @@ package cn.piflow.bundle.hdfs
import cn.piflow._
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import org.apache.spark.sql.SparkSession
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
@ -14,8 +14,8 @@ import org.apache.hadoop.fs.Path
class DeleteHdfs extends ConfigurableStop{
override val authorEmail: String = "ygang@cnic.com"
override val inportList: List[String] = List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] = List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
override val description: String = "Delete file or directory on hdfs"
var hdfsUrl :String= _

View File

@ -3,15 +3,15 @@ package cn.piflow.bundle.hdfs
import cn.piflow._
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import org.apache.spark.sql.SparkSession
class GetHdfs extends ConfigurableStop{
override val authorEmail: String = "ygang@cnic.com"
override val description: String = "Get data from hdfs"
override val inportList: List[String] = List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] = List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var hdfsUrl : String=_
var hdfsPath :String= _

View File

@ -3,7 +3,7 @@ package cn.piflow.bundle.hdfs
import cn.piflow._
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
import org.apache.spark.rdd.RDD
@ -17,8 +17,8 @@ import scala.collection.mutable.ArrayBuffer
class ListHdfs extends ConfigurableStop{
override val authorEmail: String = "ygang@cnic.com"
override val inportList: List[String] = List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] = List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
override val description: String = "Retrieve a list of files from hdfs"
var HDFSPath :String= _

View File

@ -4,7 +4,7 @@ package cn.piflow.bundle.hdfs
import cn.piflow._
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.spark.sql.SparkSession
@ -12,8 +12,8 @@ import org.apache.spark.sql.SparkSession
class PutHdfs extends ConfigurableStop{
override val authorEmail: String = "ygang@cnic.com"
override val inportList: List[String] = List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] = List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
override val description: String = "Put data to hdfs"
var hdfsPath :String= _

View File

@ -2,7 +2,7 @@ package cn.piflow.bundle.hdfs
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
@ -18,8 +18,8 @@ class SaveToHdfs extends ConfigurableStop {
override val description: String = "Put data to hdfs "
val authorEmail: String = "ygang@cnic.cn"
override val inportList: List[String] = List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] = List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var hdfsDirPath :String= _
var hdfsUrl :String= _

View File

@ -4,7 +4,7 @@ import java.util.regex.Pattern
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
@ -17,8 +17,8 @@ import scala.collection.mutable.ArrayBuffer
class SelectFilesByName extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Select files by file name"
override val inportList: List[String] = List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] = List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var HDFSUrl:String=_
var HDFSPath:String=_

View File

@ -19,8 +19,8 @@ import scala.collection.mutable.ArrayBuffer
class UnzipFilesOnHDFS extends ConfigurableStop {
val authorEmail: String = "yangqidong@cnic.cn"
val description: String = "Unzip files on hdfs"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var isCustomize:String=_
var hdfsUrl:String=_

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.hive
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.SparkContext
@ -13,8 +13,8 @@ import org.apache.spark.sql.{DataFrame, Row, SQLContext, SparkSession}
class OptionalSelectHiveQL extends ConfigurableStop {
override val authorEmail: String = "xiaomeng7890@gmail.com"
override val description: String = "some hive can only achieve by jdbc, this stop is designed for this"
override val inportList: List[String] = List(PortEnum.NonePort)
override val outportList: List[String] = List(PortEnum.DefaultPort)
override val inportList: List[String] = List(Port.NonePort)
override val outportList: List[String] = List(Port.DefaultPort)
private val driverName = "org.apache.hive.jdbc.HiveDriver"
var hiveUser : String = _

View File

@ -10,8 +10,8 @@ class PutHive extends ConfigurableStop {
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Save data to hive by overwrite mode"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.NonePort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.NonePort.toString)
var database:String = _
var table:String = _

View File

@ -11,8 +11,8 @@ class PutHiveQL extends ConfigurableStop {
val authorEmail: String = "xiaoxiao@cnic.cn"
val description: String = "Execute hiveQL script"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var database:String =_

View File

@ -12,8 +12,8 @@ class PutHiveStreaming extends ConfigurableStop {
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Save data to hive"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var database:String = _
var table:String = _

View File

@ -14,8 +14,8 @@ class SelectHiveQL extends ConfigurableStop {
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Execute select clause of hiveQL"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var hiveQL:String = _

View File

@ -16,8 +16,8 @@ import org.apache.spark.sql.{DataFrame, Row, SparkSession}
class FileDownHDFS extends ConfigurableStop{
val authorEmail: String = "yangqidong@cnic.cn"
val description: String = "Download url to hdfs"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var url_str:String =_
var savePath:String=_

View File

@ -4,7 +4,7 @@ import java.util
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.http.client.methods.{CloseableHttpResponse, HttpGet}
import org.apache.http.impl.client.HttpClients
@ -22,8 +22,8 @@ class GetUrl extends ConfigurableStop{
override val authorEmail: String = "ygang@cnic.com"
override val description: String = "Http Get"
override val inportList: List[String] = List(PortEnum.NonePort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] = List(Port.NonePort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var url :String= _

View File

@ -7,7 +7,7 @@ import java.util
import cn.piflow._
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FileSystem, Path}
import org.apache.http.client.methods._
@ -24,8 +24,8 @@ import scala.collection.mutable.{ArrayBuffer, ListBuffer}
class InvokeUrl extends ConfigurableStop{
override val authorEmail: String = "ygang@cnic.com"
override val inportList: List[String] = List(PortEnum.NonePort.toString)
override val outportList: List[String] = List(PortEnum.NonePort.toString)
override val inportList: List[String] = List(Port.NonePort.toString)
override val outportList: List[String] = List(Port.NonePort.toString)
override val description: String = "Http Invoke"

View File

@ -5,7 +5,7 @@ import java.net.URI
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.commons.httpclient.HttpClient
import org.apache.hadoop.conf.Configuration
@ -19,8 +19,8 @@ import org.apache.spark.sql.SparkSession
class PostUrl extends ConfigurableStop{
override val authorEmail: String = "ygang@cnic.com"
override val inportList: List[String] = List(PortEnum.NonePort.toString)
override val outportList: List[String] = List(PortEnum.NonePort.toString)
override val inportList: List[String] = List(Port.NonePort.toString)
override val outportList: List[String] = List(Port.NonePort.toString)
override val description: String = "HTTP Post"
var url : String= _

View File

@ -19,8 +19,8 @@ class AnimalClassification extends ConfigurableStop {
val authorEmail: String = "huchuan0901@163.com"
val description: String = "Image classification"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
//val url = "http://10.0.86.128:8081/service/classify/dogorcat/"

View File

@ -7,7 +7,7 @@ import java.util.Date
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{StringType, StructField, StructType}
@ -21,8 +21,8 @@ import scala.collection.mutable.ArrayBuffer
class spider extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Crawl data from websites"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var rootUrl:String=_

View File

@ -11,8 +11,8 @@ import scala.beans.BeanProperty
class JdbcRead extends ConfigurableStop {
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Read data from jdbc database"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
//var driver:String = _
var url:String = _

View File

@ -17,8 +17,8 @@ class JdbcReadFromOracle extends ConfigurableStop{
val authorEmail: String = "yangqidong@cnic.cn"
val description: String = "Read from oracle"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var url:String = _
var user:String = _

View File

@ -15,8 +15,8 @@ JdbcWrite extends ConfigurableStop{
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Write data into jdbc database"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.NonePort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.NonePort.toString)
var url:String = _
var user:String = _

View File

@ -12,8 +12,8 @@ class JdbcWriteToOracle extends ConfigurableStop{
val authorEmail: String = "yangqidong@cnic.cn"
val description: String = "Write data to oracle"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var url:String = _
var user:String = _

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.jdbc
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableIncrementalStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableIncrementalStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.sql.SparkSession
@ -14,8 +14,8 @@ class MysqlReadIncremental extends ConfigurableIncrementalStop{
override var incrementalStart: String = _
override val authorEmail: String = "xjzhu@cnic.cn"
override val description: String = "Read data from jdbc database"
override val inportList: List[String] = List(PortEnum.NonePort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] = List(Port.NonePort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
//var driver:String = _
var url:String = _

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.jdbc
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.sql.SparkSession
@ -12,8 +12,8 @@ import org.apache.spark.sql.SparkSession
class OracleRead extends ConfigurableStop{
override val authorEmail: String = "xjzhu@cnic.cn"
override val description: String = "Read data From oracle"
override val inportList: List[String] = List(PortEnum.DefaultPort)
override val outportList: List[String] = List(PortEnum.DefaultPort)
override val inportList: List[String] = List(Port.DefaultPort)
override val outportList: List[String] = List(Port.DefaultPort)
var url:String = _
var user:String = _

View File

@ -2,7 +2,7 @@ package cn.piflow.bundle.jdbc
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.sql.SparkSession
@ -12,8 +12,8 @@ import org.apache.spark.sql.SparkSession
class OracleReadByPartition extends ConfigurableStop{
override val authorEmail: String = "xjzhu@cnic.cn"
override val description: String = "Read data From oracle"
override val inportList: List[String] = List(PortEnum.DefaultPort)
override val outportList: List[String] = List(PortEnum.DefaultPort)
override val inportList: List[String] = List(Port.DefaultPort)
override val outportList: List[String] = List(Port.DefaultPort)
var url:String = _
var user:String = _

View File

@ -4,7 +4,7 @@ import java.sql.{Connection, DriverManager, ResultSet, Statement}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{StringType, StructField, StructType}
@ -16,8 +16,8 @@ import scala.collection.mutable.ArrayBuffer
class SelectImpala extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Get data from impala"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var url:String=_
var user:String=_

View File

@ -1,15 +1,15 @@
package cn.piflow.bundle.json
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.sql.{DataFrame, SparkSession}
class EvaluateJsonPath extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
override val description: String = "Parse multiple json files"

View File

@ -5,7 +5,7 @@ import java.net.URI
import cn.piflow.bundle.util.JsonUtil
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.hadoop.conf.Configuration
@ -20,8 +20,8 @@ import org.apache.spark.sql.{DataFrame, SQLContext}
class FolderJsonParser extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
override val description: String ="Parse json folder"
var FolderPath:String = _

View File

@ -13,8 +13,8 @@ class JsonParser extends ConfigurableStop{
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Parse json file"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var jsonPath: String = _
var tag : String = _

View File

@ -12,8 +12,8 @@ class JsonSave extends ConfigurableStop{
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Save data into json file"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.NonePort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.NonePort.toString)
var jsonSavePath: String = _

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.json
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.sql.SparkSession
@ -9,8 +9,8 @@ import org.apache.spark.sql.SparkSession
class JsonStringParser extends ConfigurableStop{
val authorEmail: String = "xjzhu@cnic.cn"
val description: String = "Parse json string"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var jsonString: String = _

View File

@ -2,7 +2,7 @@ package cn.piflow.bundle.json
import cn.piflow.bundle.util.JsonUtil
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.sql
@ -12,8 +12,8 @@ import scala.util.control.Breaks.{break, breakable}
class MultiFolderJsonParser extends ConfigurableStop{
val authorEmail: String = "yangqidong@cnic.cn"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
val description: String = "Parse json folders"
var jsonPathes: String = _

View File

@ -20,8 +20,8 @@ import org.apache.kafka.common.serialization.StringSerializer
class ReadFromKafka extends ConfigurableStop{
val description: String = "Read data from kafka"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var kafka_host:String =_
var topic:String=_
var schema:String=_

View File

@ -17,8 +17,8 @@ import scala.collection.mutable
class WriteToKafka extends ConfigurableStop{
val description: String = "Write data to kafka"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.NonePort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.NonePort.toString)
var kafka_host:String =_
var topic:String=_

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.memcached
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import com.danga.MemCached.{MemCachedClient, SockIOPool}
@ -14,8 +14,8 @@ import scala.collection.mutable
class ComplementByMemcache extends ConfigurableStop {
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Complement by Memcache"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var servers:String=_ //Server address and port number,If you have multiple servers, use "," segmentation.
var keyFile:String=_ //The field you want to use as a query condition

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.memcached
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import com.danga.MemCached.{MemCachedClient, SockIOPool}
@ -16,8 +16,8 @@ import scala.collection.mutable.ArrayBuffer
class GetMemcache extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Get data from memache"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var servers:String=_ //Server address and port number,If you have multiple servers, use "," segmentation.
var keyFile:String=_ //The field you want to use as a query condition

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.memcached
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import com.danga.MemCached.{MemCachedClient, SockIOPool}
@ -10,8 +10,8 @@ import org.apache.spark.sql.{DataFrame, Row, SparkSession}
class PutMemcache extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Put data to memcache"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.NonePort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.NonePort.toString)
var servers:String=_ //Server address and port number,If you have multiple servers, use "," segmentation.
var keyFile:String=_ //You want to be used as a field for key.

View File

@ -6,7 +6,7 @@ import java.util.regex.{Matcher, Pattern}
import cn.piflow.bundle.microorganism.util.BioProject
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -17,8 +17,8 @@ import org.json.{JSONArray, JSONObject, XML}
class BioProjetData extends ConfigurableStop{
val authorEmail: String = "ygang@cnic.cn"
val description: String = "Parse BioProjet data"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _

View File

@ -5,7 +5,7 @@ import java.io._
import cn.piflow.bundle.microorganism.util.BioProject
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -15,8 +15,8 @@ import org.json.{JSONArray, JSONObject, XML}
class BioSample extends ConfigurableStop{
val authorEmail: String = "ygang@cnic.cn"
val description: String = "Parse BioSample data"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _

View File

@ -5,7 +5,7 @@ import java.io._
import cn.piflow.bundle.microorganism.util.{CustomIOTools, Process}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -16,8 +16,8 @@ import org.json.JSONObject
class EmblData extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Parse EMBL data"
override val inportList: List[String] =List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _

View File

@ -5,7 +5,7 @@ import java.io._
import cn.piflow.bundle.microorganism.util.ParserGff3Data
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -16,8 +16,8 @@ import org.json.JSONObject
class Ensembl extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Parse ensembl data"
override val inportList: List[String] =List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _
def setProperties(map: Map[String, Any]): Unit = {

View File

@ -5,7 +5,7 @@ import java.io._
import cn.piflow.bundle.microorganism.util.{CustomIOTools, Process}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -16,8 +16,8 @@ import org.json.JSONObject
class GenBankData extends ConfigurableStop{
val authorEmail: String = "ygang@cnic.cn"
val description: String = " Parse GenBank data"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _

View File

@ -5,7 +5,7 @@ import java.text.SimpleDateFormat
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -15,8 +15,8 @@ import org.json.JSONObject
class Gene extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Parse gene data"
override val inportList: List[String] =List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _
override def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {

View File

@ -6,7 +6,7 @@ import java.util.regex.{Matcher, Pattern}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -17,8 +17,8 @@ import org.json.JSONObject
class GoData extends ConfigurableStop{
val authorEmail: String = "ygang@cnic.cn"
val description: String = "Parse Go data"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _

View File

@ -3,7 +3,7 @@ package cn.piflow.bundle.microorganism
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.ImageUtil
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.sql. SparkSession
@ -11,8 +11,8 @@ import org.apache.spark.sql. SparkSession
class GoldData extends ConfigurableStop{
val authorEmail: String = "ygang@cnic.cn"
val description: String = "Parse GoldData data"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {

View File

@ -4,7 +4,7 @@ import java.io._
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -15,8 +15,8 @@ import org.json.{JSONObject, XML}
class InterproData extends ConfigurableStop{
val authorEmail: String = "ygang@cnic.cn"
val description: String = "Parse Interpro data"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _

View File

@ -5,7 +5,7 @@ import java.text.SimpleDateFormat
import java.util.Locale
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.hadoop.conf.Configuration
@ -16,8 +16,8 @@ import org.json.{JSONObject, XML}
class MedlineData extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Parse Medline data"
override val inportList: List[String] =List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _

View File

@ -5,7 +5,7 @@ import java.io._
import cn.piflow.bundle.microorganism.util.{CustomIOTools, Process}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -18,8 +18,8 @@ import org.json.JSONObject
class MicrobeGenomeData extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Parse MicrobeGenome data"
override val inportList: List[String] =List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _
override def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {

View File

@ -5,7 +5,7 @@ import java.io._
import cn.piflow.bundle.microorganism.util.PDB
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -16,8 +16,8 @@ import org.json.JSONObject
class PDBData extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Parse PDB data"
override val inportList: List[String] =List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _
override def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {

View File

@ -3,7 +3,7 @@ package cn.piflow.bundle.microorganism
import java.io.{BufferedReader, InputStreamReader, OutputStreamWriter}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.hadoop.conf.Configuration
@ -15,8 +15,8 @@ import org.json.JSONObject
class Pathway extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Parse Pathway data"
override val inportList: List[String] =List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _

View File

@ -5,7 +5,7 @@ import java.io._
import cn.piflow.bundle.microorganism.util.Pfam
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -16,8 +16,8 @@ import org.json.JSONObject
class PfamData extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Parse pfam data"
override val inportList: List[String] =List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _

View File

@ -5,7 +5,7 @@ import java.io._
import cn.piflow.bundle.microorganism.util.{CustomIOTools, Process}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -16,8 +16,8 @@ import org.json.JSONObject
class RefseqData extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Parse Refseq_genome data"
override val inportList: List[String] =List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _
override def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {

View File

@ -5,7 +5,7 @@ import java.io._
import cn.piflow.bundle.microorganism.util.{CustomIOTools, Process}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -16,8 +16,8 @@ import org.json.JSONObject
class SwissprotData extends ConfigurableStop{
override val authorEmail: String = "yangqidong@cnic.cn"
override val description: String = "Parse Swissprot_TrEMBL data"
override val inportList: List[String] =List(PortEnum.DefaultPort.toString)
override val outportList: List[String] = List(PortEnum.DefaultPort.toString)
override val inportList: List[String] =List(Port.DefaultPort.toString)
override val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _

View File

@ -5,7 +5,7 @@ import java.util.HashMap
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
@ -16,8 +16,8 @@ import org.json.JSONObject
class TaxonomyData extends ConfigurableStop{
val authorEmail: String = "ygang@cnic.cn"
val description: String = "Parse Taxonomy data"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var cachePath:String = _

View File

@ -2,7 +2,7 @@ package cn.piflow.bundle.ml_classification
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.ml.classification.DecisionTreeClassificationModel
import org.apache.spark.sql.SparkSession
@ -10,8 +10,8 @@ import org.apache.spark.sql.SparkSession
class DecisionTreePrediction extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Use an existing decision tree model to predict."
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var test_data_path:String =_
var model_path:String=_

View File

@ -2,7 +2,7 @@ package cn.piflow.bundle.ml_classification
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.ml.classification.DecisionTreeClassifier
import org.apache.spark.sql.SparkSession
@ -10,8 +10,8 @@ import org.apache.spark.sql.SparkSession
class DecisionTreeTraining extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Train a decision tree model"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var training_data_path:String =_
var model_save_path:String=_
var maxBins:String=_

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.ml_classification
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.ml.classification.GBTClassificationModel
@ -10,8 +10,8 @@ import org.apache.spark.sql.SparkSession
class GBTPrediction extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Use an existing GBT Model to predict"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var test_data_path:String =_
var model_path:String=_

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.ml_classification
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.ml.classification.GBTClassifier
@ -10,8 +10,8 @@ import org.apache.spark.sql.SparkSession
class GBTTraining extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Train a GBT Model"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var training_data_path:String =_
var model_save_path:String=_
var maxBins:String=_

View File

@ -2,7 +2,7 @@ package cn.piflow.bundle.ml_classification
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.ml.classification.LogisticRegressionModel
import org.apache.spark.sql.SparkSession
@ -10,8 +10,8 @@ import org.apache.spark.sql.SparkSession
class LogisticRegressionPrediction extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Use an existing logistic regression model to predict"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var test_data_path:String =_
var model_path:String=_

View File

@ -2,7 +2,7 @@ package cn.piflow.bundle.ml_classification
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.sql.SparkSession
import org.apache.spark.ml.classification.LogisticRegression
@ -10,8 +10,8 @@ import org.apache.spark.ml.classification.LogisticRegression
class LogisticRegressionTraining extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Train a logistic regression model"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var training_data_path:String =_
var model_save_path:String=_
var maxIter:String=_

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.ml_classification
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
@ -10,8 +10,8 @@ import org.apache.spark.sql.SparkSession
class MultilayerPerceptronPrediction extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Use an existing multilayer perceptron model to predict"
val inportList: List[String] = List(PortEnum.DefaultPort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.DefaultPort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var test_data_path:String =_
var model_path:String=_

View File

@ -1,7 +1,7 @@
package cn.piflow.bundle.ml_classification
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.spark.ml.classification.MultilayerPerceptronClassifier
@ -10,8 +10,8 @@ import org.apache.spark.sql.SparkSession
class MultilayerPerceptronTraining extends ConfigurableStop{
val authorEmail: String = "xiaoxiao@cnic.cn"
val description: String = "Train a multilayer perceptron model"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var training_data_path:String =_
var model_save_path:String=_
var layers:String=_

View File

@ -2,7 +2,7 @@ package cn.piflow.bundle.ml_classification
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, PortEnum, StopGroup}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.spark.ml.classification.NaiveBayesModel
import org.apache.spark.sql.SparkSession
@ -10,8 +10,8 @@ import org.apache.spark.sql.SparkSession
class NaiveBayesPrediction extends ConfigurableStop{
val authorEmail: String = "06whuxx@163.com"
val description: String = "Use an existing NaiveBayes model to predict"
val inportList: List[String] = List(PortEnum.NonePort.toString)
val outportList: List[String] = List(PortEnum.DefaultPort.toString)
val inportList: List[String] = List(Port.NonePort.toString)
val outportList: List[String] = List(Port.DefaultPort.toString)
var test_data_path:String =_
var model_path:String=_

Some files were not shown because too many files have changed in this diff Show More