forked from opensci/piflow
putEs,qurryEs(*),fetchEs(*)
This commit is contained in:
parent
97aed5b250
commit
2cab1d07d1
|
@ -90,6 +90,11 @@
|
|||
<artifactId>kafka-clients</artifactId>
|
||||
<version>0.11.0.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.elasticsearch</groupId>
|
||||
<artifactId>elasticsearch-spark-20_2.11</artifactId>
|
||||
<version>5.6.3</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
</dependencies>
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"flow":{
|
||||
"name":"test",
|
||||
"uuid":"1234",
|
||||
"stops":[
|
||||
{
|
||||
"uuid":"0000",
|
||||
"name":"SelectHiveQL",
|
||||
"bundle":"cn.piflow.bundle.hive.SelectHiveQL",
|
||||
"properties":{
|
||||
"hiveQL":"select * from sparktest.dblp_phdthesis"
|
||||
}
|
||||
|
||||
},
|
||||
{
|
||||
"uuid":"1111",
|
||||
"name":"esPut",
|
||||
"bundle":"cn.piflow.bundle.es.PutEs",
|
||||
"properties":{
|
||||
"es_nodes":"10.0.86.239",
|
||||
"port":"9200",
|
||||
"es_index":"json000",
|
||||
"es_type":"json_spark000",
|
||||
"schema":"author,pages"
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
],
|
||||
"paths":[
|
||||
{
|
||||
"from":"SelectHiveQL",
|
||||
"outport":"",
|
||||
"inport":"",
|
||||
"to":"esPut"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"flow":{
|
||||
"name":"test",
|
||||
"uuid":"1234",
|
||||
"stops":[
|
||||
{
|
||||
"uuid":"0000",
|
||||
"name":"JsonSave",
|
||||
"bundle":"cn.piflow.bundle.json.JsonSave",
|
||||
"properties":{
|
||||
"jsonSavePath":"hdfs://10.0.86.89:9000/yg/EsFetch.json"
|
||||
}
|
||||
|
||||
},
|
||||
{
|
||||
"uuid":"1111",
|
||||
"name":"esGet",
|
||||
"bundle":"cn.piflow.bundle.es.FetchEs",
|
||||
"properties":{
|
||||
"es_nodes":"10.0.86.239",
|
||||
"port":"9200",
|
||||
"es_index":"sparkToEs",
|
||||
"es_type":"toEs",
|
||||
"schema":"author,pages"
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
],
|
||||
"paths":[
|
||||
{
|
||||
"from":"esGet",
|
||||
"outport":"",
|
||||
"inport":"",
|
||||
"to":"JsonSave"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"flow":{
|
||||
"name":"test",
|
||||
"uuid":"1234",
|
||||
"stops":[
|
||||
{
|
||||
"uuid":"0000",
|
||||
"name":"JsonSave",
|
||||
"bundle":"cn.piflow.bundle.json.JsonSave",
|
||||
"properties":{
|
||||
"jsonSavePath":"hdfs://10.0.86.89:9000/yg/EsFetch.json"
|
||||
}
|
||||
|
||||
},
|
||||
{
|
||||
"uuid":"1111",
|
||||
"name":"esQurry",
|
||||
"bundle":"cn.piflow.bundle.es.QurryEs",
|
||||
"properties":{
|
||||
"es_nodes":"10.0.86.239",
|
||||
"port":"9200",
|
||||
"es_index":"sparkToEs",
|
||||
"es_type":"toEs",
|
||||
"schema":"author,pages"
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
],
|
||||
"paths":[
|
||||
{
|
||||
"from":"esQurry",
|
||||
"outport":"",
|
||||
"inport":"",
|
||||
"to":"JsonSave"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,86 @@
|
|||
package cn.piflow.bundle.es
|
||||
|
||||
import cn.piflow.conf.bean.PropertyDescriptor
|
||||
import cn.piflow.conf.util.MapUtil
|
||||
import cn.piflow.conf.{ConfigurableStop, StopGroupEnum}
|
||||
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
|
||||
import org.apache.spark.sql.SparkSession
|
||||
|
||||
class FetchEs extends ConfigurableStop{
|
||||
val description: String = "Fetch data from Es."
|
||||
override val authorEmail: String = "xiaoxiao@cnic.cn"
|
||||
override val inportCount: Int = 0
|
||||
override val outportCount: Int = 1
|
||||
|
||||
var es_nodes:String = _ //es的节点,多个用逗号隔开
|
||||
var port:Int= _ //es的端口好
|
||||
var es_index:String = _ //es的索引
|
||||
var es_type:String = _ //es的类型
|
||||
|
||||
def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {
|
||||
val spark = pec.get[SparkSession]()
|
||||
|
||||
|
||||
|
||||
val sc = spark.sqlContext
|
||||
|
||||
val options = Map("es.index.auto.create"-> "true",
|
||||
"es.nodes"->"10.0.86.239","es.port"->"9200")
|
||||
|
||||
|
||||
val sdf = sc.read.format("org.elasticsearch.spark.sql").options(options).load("test/test")
|
||||
// sdf.select("id").collect().foreach(println(_))
|
||||
|
||||
//连接es
|
||||
// val df = EsSparkSQL.esDF(sc,"test/test")
|
||||
// println(sdf.schema)
|
||||
sdf.show()
|
||||
out.write(sdf)
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
def setProperties(map: Map[String, Any]): Unit = {
|
||||
es_nodes=MapUtil.get(map,key="es_nodes").asInstanceOf[String]
|
||||
port=Integer.parseInt(MapUtil.get(map,key="port").toString)
|
||||
es_index=MapUtil.get(map,key="es_index").asInstanceOf[String]
|
||||
es_type=MapUtil.get(map,key="es_type").asInstanceOf[String]
|
||||
|
||||
println(es_index)
|
||||
println(port)
|
||||
println(es_nodes)
|
||||
println(es_type)
|
||||
|
||||
|
||||
}
|
||||
|
||||
override def getPropertyDescriptor(): List[PropertyDescriptor] = {
|
||||
var descriptor : List[PropertyDescriptor] = List()
|
||||
val es_nodes = new PropertyDescriptor().name("es_nodes").displayName("REDIS_HOST").defaultValue("").required(true)
|
||||
val port = new PropertyDescriptor().name("port").displayName("PORT").defaultValue("").required(true)
|
||||
val es_index = new PropertyDescriptor().name("es_index").displayName("ES_INDEX").defaultValue("").required(true)
|
||||
val es_type = new PropertyDescriptor().name("es_type").displayName("ES_TYPE").defaultValue("").required(true)
|
||||
descriptor = es_nodes :: descriptor
|
||||
descriptor = port :: descriptor
|
||||
descriptor = es_index :: descriptor
|
||||
descriptor = es_type :: descriptor
|
||||
descriptor
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
override def getIcon(): Array[Byte] = ???
|
||||
|
||||
override def getGroup(): List[String] = {
|
||||
List(StopGroupEnum.ESGroup.toString)
|
||||
}
|
||||
|
||||
override def initialize(ctx: ProcessContext): Unit = {
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
package cn.piflow.bundle.es
|
||||
|
||||
import cn.piflow.conf.bean.PropertyDescriptor
|
||||
import cn.piflow.conf.util.MapUtil
|
||||
import cn.piflow.conf.{ConfigurableStop, StopGroupEnum}
|
||||
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
|
||||
import org.apache.spark.sql.SparkSession
|
||||
import org.elasticsearch.spark.sql.EsSparkSQL
|
||||
|
||||
class PutEs extends ConfigurableStop{
|
||||
val description: String = "Put data to Es."
|
||||
|
||||
override val authorEmail: String = "xiaoxiao@cnic.cn"
|
||||
override val inportCount: Int = 0
|
||||
override val outportCount: Int = 1
|
||||
|
||||
var es_nodes:String = _ //es的节点,多个用逗号隔开
|
||||
var port:Int= _ //es的端口好
|
||||
var es_index:String = _ //es的索引
|
||||
var es_type:String = _ //es的类型
|
||||
|
||||
def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {
|
||||
|
||||
|
||||
|
||||
val spark = pec.get[SparkSession]()
|
||||
|
||||
|
||||
|
||||
// val options = Map("es.index.auto.create"-> "true",
|
||||
// "es.nodes"->"10.0.86.239","es.port"->"9200")
|
||||
//
|
||||
// val conf = new SparkConf()
|
||||
// .set("spark.driver.allowMultipleContexts", "true")
|
||||
//
|
||||
// conf.set("es.nodes", "10.0.86.239")
|
||||
// .set("es.port", "9200")
|
||||
// .set("es.index.auto.create", "true")
|
||||
|
||||
|
||||
|
||||
val sc = spark.sqlContext
|
||||
|
||||
val inDF = in.read()
|
||||
inDF.show()
|
||||
|
||||
println(inDF.schema)
|
||||
|
||||
//连接es
|
||||
EsSparkSQL.saveToEs(inDF,"/test/test5")
|
||||
|
||||
println("cunchuchenggong")
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
def setProperties(map: Map[String, Any]): Unit = {
|
||||
es_nodes=MapUtil.get(map,key="es_nodes").asInstanceOf[String]
|
||||
port=Integer.parseInt(MapUtil.get(map,key="port").toString)
|
||||
es_index=MapUtil.get(map,key="es_index").asInstanceOf[String]
|
||||
es_type=MapUtil.get(map,key="es_type").asInstanceOf[String]
|
||||
|
||||
}
|
||||
|
||||
override def getPropertyDescriptor(): List[PropertyDescriptor] = {
|
||||
var descriptor : List[PropertyDescriptor] = List()
|
||||
val es_nodes = new PropertyDescriptor().name("es_nodes").displayName("REDIS_HOST").defaultValue("").required(true)
|
||||
val port = new PropertyDescriptor().name("port").displayName("PORT").defaultValue("").required(true)
|
||||
val es_index = new PropertyDescriptor().name("es_index").displayName("ES_INDEX").defaultValue("").required(true)
|
||||
val es_type = new PropertyDescriptor().name("es_type").displayName("ES_TYPE").defaultValue("").required(true)
|
||||
descriptor = es_nodes :: descriptor
|
||||
descriptor = port :: descriptor
|
||||
descriptor = es_index :: descriptor
|
||||
descriptor = es_type :: descriptor
|
||||
descriptor
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
override def getIcon(): Array[Byte] = ???
|
||||
|
||||
override def getGroup(): List[String] = {
|
||||
List(StopGroupEnum.ESGroup.toString)
|
||||
}
|
||||
|
||||
override def initialize(ctx: ProcessContext): Unit = {
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,92 @@
|
|||
package cn.piflow.bundle.es
|
||||
|
||||
import cn.piflow.conf.bean.PropertyDescriptor
|
||||
import cn.piflow.conf.util.MapUtil
|
||||
import cn.piflow.conf.{ConfigurableStop, StopGroupEnum}
|
||||
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
|
||||
import org.apache.spark.sql.SparkSession
|
||||
import org.elasticsearch.spark.sql.EsSparkSQL
|
||||
|
||||
class QurryEs extends ConfigurableStop{
|
||||
val description: String = "Qurry data from Es."
|
||||
|
||||
override val authorEmail: String = "xiaoxiao@cnic.cn"
|
||||
override val inportCount: Int = 0
|
||||
override val outportCount: Int = 1
|
||||
|
||||
var es_nodes:String = _ //es的节点,多个用逗号隔开
|
||||
var port:Int= _ //es的端口好
|
||||
var es_index:String = _ //es的索引
|
||||
var es_type:String = _ //es的类型
|
||||
|
||||
def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {
|
||||
val spark = pec.get[SparkSession]()
|
||||
|
||||
val sc = spark.sqlContext
|
||||
//连接es
|
||||
val qurry =
|
||||
"""
|
||||
|{
|
||||
| "query":{
|
||||
| "match":{
|
||||
| "id":2
|
||||
| }
|
||||
| }
|
||||
|}
|
||||
""".stripMargin
|
||||
|
||||
val options = Map("es.index.auto.create"-> "true",
|
||||
"es.nodes"->"10.0.86.239","es.port"->"9200")
|
||||
|
||||
// val sdf = sc.read.format("org.elasticsearch.spark.sql").options(options).load("test/test",qurry)
|
||||
|
||||
val df = EsSparkSQL.esDF(sc,"customer/doc",qurry)
|
||||
println(df.schema)
|
||||
out.write(df)
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
def setProperties(map: Map[String, Any]): Unit = {
|
||||
es_nodes=MapUtil.get(map,key="es_nodes").asInstanceOf[String]
|
||||
port=Integer.parseInt(MapUtil.get(map,key="port").toString)
|
||||
es_index=MapUtil.get(map,key="es_index").asInstanceOf[String]
|
||||
es_type=MapUtil.get(map,key="es_type").asInstanceOf[String]
|
||||
|
||||
println(es_index)
|
||||
println(port)
|
||||
println(es_nodes)
|
||||
println(es_type)
|
||||
|
||||
|
||||
}
|
||||
|
||||
override def getPropertyDescriptor(): List[PropertyDescriptor] = {
|
||||
var descriptor : List[PropertyDescriptor] = List()
|
||||
val es_nodes = new PropertyDescriptor().name("es_nodes").displayName("REDIS_HOST").defaultValue("").required(true)
|
||||
val port = new PropertyDescriptor().name("port").displayName("PORT").defaultValue("").required(true)
|
||||
val es_index = new PropertyDescriptor().name("es_index").displayName("ES_INDEX").defaultValue("").required(true)
|
||||
val es_type = new PropertyDescriptor().name("es_type").displayName("ES_TYPE").defaultValue("").required(true)
|
||||
descriptor = es_nodes :: descriptor
|
||||
descriptor = port :: descriptor
|
||||
descriptor = es_index :: descriptor
|
||||
descriptor = es_type :: descriptor
|
||||
descriptor
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
override def getIcon(): Array[Byte] = ???
|
||||
|
||||
override def getGroup(): List[String] = {
|
||||
List(StopGroupEnum.ESGroup.toString)
|
||||
}
|
||||
|
||||
override def initialize(ctx: ProcessContext): Unit = {
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -15,6 +15,7 @@ case object FileGroup extends StopGroup
|
|||
case object CleanGroup extends StopGroup
|
||||
case object RedisGroup extends StopGroup
|
||||
case object KafkaGroup extends StopGroup
|
||||
case object ESGroup extends StopGroup
|
||||
|
||||
|
||||
object StopGroup{
|
||||
|
|
|
@ -17,5 +17,6 @@ object StopGroupEnum extends Enumeration {
|
|||
val KafkaGroup = Value("kafkaGroup")
|
||||
val RedisGroup = Value("RedisGroup")
|
||||
val SolrGroup = Value("SolrGroup")
|
||||
val ESGroup = Value("ESGroup")
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,122 @@
|
|||
package cn.piflow.bundle
|
||||
|
||||
import cn.piflow.Runner
|
||||
import cn.piflow.conf.bean.FlowBean
|
||||
import cn.piflow.conf.util.{FileUtil, OptionUtil}
|
||||
import org.apache.spark.sql.SparkSession
|
||||
import org.junit.Test
|
||||
|
||||
import scala.util.parsing.json.JSON
|
||||
|
||||
class EsTest {
|
||||
|
||||
@Test
|
||||
def testFetchEs():Unit ={
|
||||
val file = "src/main/resources/esGet.json"
|
||||
//解析json
|
||||
val flowJsonStr = FileUtil.fileReader(file)
|
||||
val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
|
||||
println(map)
|
||||
|
||||
//create flow
|
||||
val flowBean = FlowBean(map)
|
||||
val flow = flowBean.constructFlow()
|
||||
|
||||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("spark://10.0.86.89:7077")
|
||||
.appName("piflow-hive-bundle")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
.config("spark.jars", "/opt/project/piflow-master/out/artifacts/piflow_bundle/piflow_bundle.jar")
|
||||
.enableHiveSupport()
|
||||
.getOrCreate()
|
||||
|
||||
val process = Runner.create()
|
||||
.bind(classOf[SparkSession].getName, spark)
|
||||
.start(flow)
|
||||
|
||||
process.awaitTermination()
|
||||
spark.close()
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
def testPutEs(): Unit = {
|
||||
|
||||
val file = "src/main/resources/es.json"
|
||||
//解析json
|
||||
val flowJsonStr = FileUtil.fileReader(file)
|
||||
val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
|
||||
println(map)
|
||||
|
||||
//create flow
|
||||
val flowBean = FlowBean(map)
|
||||
val flow = flowBean.constructFlow()
|
||||
|
||||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("spark://10.0.86.89:7077")
|
||||
.appName("piflow-hive-bundle")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
.config("spark.jars", "/opt/project/piflow-master/out/artifacts/piflow_bundle/piflow_bundle.jar")
|
||||
//.config("spark.jars","/root/.m2/repository/org/elasticsearch/elasticsearch-spark-20_2.11/5.6.3/elasticsearch-spark-20_2.11-5.6.3.jar")
|
||||
.config("es.index.auto.create", "true") //开启自动创建索引
|
||||
.config("es.nodes","10.0.86.239") //es的节点
|
||||
.config("es.port","9200") //端口号
|
||||
.enableHiveSupport()
|
||||
.getOrCreate()
|
||||
|
||||
val process = Runner.create()
|
||||
.bind(classOf[SparkSession].getName, spark)
|
||||
.start(flow)
|
||||
|
||||
process.awaitTermination()
|
||||
spark.close()
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
def queryEs(): Unit = {
|
||||
val file = "src/main/resources/esQurry.json"
|
||||
//解析json
|
||||
val flowJsonStr = FileUtil.fileReader(file)
|
||||
val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
|
||||
println(map)
|
||||
|
||||
//create flow
|
||||
val flowBean = FlowBean(map)
|
||||
val flow = flowBean.constructFlow()
|
||||
|
||||
//execute flow
|
||||
val spark = SparkSession.builder()
|
||||
.master("spark://10.0.86.89:7077")
|
||||
.appName("piflow-hive-bundle")
|
||||
.config("spark.driver.memory", "1g")
|
||||
.config("spark.executor.memory", "2g")
|
||||
.config("spark.cores.max", "2")
|
||||
.config("spark.jars", "/opt/project/piflow-master/out/artifacts/piflow_bundle/piflow_bundle.jar")
|
||||
.config("es.index.auto.create", "true") //开启自动创建索引
|
||||
.config("es.nodes","10.0.86.239") //es的节点
|
||||
.config("es.port","9200") //端口号
|
||||
.enableHiveSupport()
|
||||
.getOrCreate()
|
||||
|
||||
val process = Runner.create()
|
||||
.bind(classOf[SparkSession].getName, spark)
|
||||
.start(flow)
|
||||
|
||||
process.awaitTermination()
|
||||
spark.close()
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,182 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module org.jetbrains.idea.maven.project.MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8">
|
||||
<output url="file://$MODULE_DIR$/target/classes" />
|
||||
<output-test url="file://$MODULE_DIR$/target/test-classes" />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/target" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" name="Maven: org.scala-lang:scala-library:2.11.8" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.scala-lang:scala-reflect:2.11.8" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.scala-lang:scala-compiler:2.11.8" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.scala-lang.modules:scala-xml_2.11:1.0.4" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.scala-lang.modules:scala-parser-combinators_2.11:1.0.4" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.11" level="project" />
|
||||
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.spark:spark-core_2.11:2.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.avro:avro-mapred:hadoop2:1.7.7" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.avro:avro-ipc:1.7.7" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.avro:avro-ipc:tests:1.7.7" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-core-asl:1.9.13" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.twitter:chill_2.11:0.8.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.esotericsoftware:kryo-shaded:3.0.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.esotericsoftware:minlog:1.3.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.objenesis:objenesis:2.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.twitter:chill-java:0.8.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.xbean:xbean-asm5-shaded:4.4" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-client:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-common:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.commons:commons-math:2.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: xmlenc:xmlenc:0.52" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-configuration:commons-configuration:1.6" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-collections:commons-collections:3.2.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-digester:commons-digester:1.8" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-beanutils:commons-beanutils:1.7.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-beanutils:commons-beanutils-core:1.8.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-auth:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-hdfs:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.mortbay.jetty:jetty-util:6.1.26" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-app:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-common:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-shuffle:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-core:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-jobclient:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-annotations:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.spark:spark-launcher_2.11:2.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.spark:spark-network-common_2.11:2.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.fusesource.leveldbjni:leveldbjni-all:1.8" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-annotations:2.6.5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.spark:spark-network-shuffle_2.11:2.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.spark:spark-unsafe_2.11:2.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: net.java.dev.jets3t:jets3t:0.7.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.curator:curator-recipes:2.4.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.curator:curator-framework:2.4.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.curator:curator-client:2.4.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.zookeeper:zookeeper:3.4.5" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.google.guava:guava:14.0.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.servlet:javax.servlet-api:3.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.commons:commons-lang3:3.5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.commons:commons-math3:3.4.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.google.code.findbugs:jsr305:1.3.9" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.16" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.slf4j:jul-to-slf4j:1.7.16" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.slf4j:jcl-over-slf4j:1.7.16" level="project" />
|
||||
<orderEntry type="library" name="Maven: log4j:log4j:1.2.17" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.slf4j:slf4j-log4j12:1.7.16" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.ning:compress-lzf:1.0.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.xerial.snappy:snappy-java:1.1.2.6" level="project" />
|
||||
<orderEntry type="library" name="Maven: net.jpountz.lz4:lz4:1.3.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.roaringbitmap:RoaringBitmap:0.5.11" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-net:commons-net:2.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.json4s:json4s-jackson_2.11:3.2.11" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.json4s:json4s-core_2.11:3.2.11" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.json4s:json4s-ast_2.11:3.2.11" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.scala-lang:scalap:2.11.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.jersey.core:jersey-client:2.22.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.ws.rs:javax.ws.rs-api:2.0.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.hk2:hk2-api:2.4.0-b34" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.hk2:hk2-utils:2.4.0-b34" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.hk2.external:aopalliance-repackaged:2.4.0-b34" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.hk2.external:javax.inject:2.4.0-b34" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.hk2:hk2-locator:2.4.0-b34" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.javassist:javassist:3.18.1-GA" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.jersey.core:jersey-common:2.22.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.annotation:javax.annotation-api:1.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.jersey.bundles.repackaged:jersey-guava:2.22.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.hk2:osgi-resource-locator:1.0.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.jersey.core:jersey-server:2.22.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.jersey.media:jersey-media-jaxb:2.22.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.validation:validation-api:1.1.0.Final" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.jersey.containers:jersey-container-servlet:2.22.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.glassfish.jersey.containers:jersey-container-servlet-core:2.22.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.netty:netty-all:4.0.42.Final" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.netty:netty:3.8.0.Final" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.clearspring.analytics:stream:2.7.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-core:3.1.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-jvm:3.1.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-json:3.1.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-graphite:3.1.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-databind:2.6.5" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-core:2.6.5" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.fasterxml.jackson.module:jackson-module-scala_2.11:2.6.5" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.fasterxml.jackson.module:jackson-module-paranamer:2.6.5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.ivy:ivy:2.4.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: oro:oro:2.0.8" level="project" />
|
||||
<orderEntry type="library" name="Maven: net.razorvine:pyrolite:4.13" level="project" />
|
||||
<orderEntry type="library" name="Maven: net.sf.py4j:py4j:0.10.4" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.spark:spark-tags_2.11:2.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.scalatest:scalatest_2.11:2.2.6" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.commons:commons-crypto:1.0.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.spark-project.spark:unused:1.0.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.spark:spark-sql_2.11:2.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.univocity:univocity-parsers:2.2.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.spark:spark-sketch_2.11:2.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.spark:spark-catalyst_2.11:2.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.codehaus.janino:janino:3.0.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.codehaus.janino:commons-compiler:3.0.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.antlr:antlr4-runtime:4.5.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.parquet:parquet-column:1.8.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.parquet:parquet-common:1.8.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.parquet:parquet-encoding:1.8.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.parquet:parquet-hadoop:1.8.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.parquet:parquet-format:2.3.0-incubating" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.parquet:parquet-jackson:1.8.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.spark:spark-hive_2.11:2.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.twitter:parquet-hadoop-bundle:1.6.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.spark-project.hive:hive-exec:1.2.1.spark2" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-io:commons-io:2.4" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-lang:commons-lang:2.6" level="project" />
|
||||
<orderEntry type="library" name="Maven: javolution:javolution:5.5.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: log4j:apache-log4j-extras:1.2.17" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.antlr:antlr-runtime:3.4" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.antlr:stringtemplate:3.2.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: antlr:antlr:2.7.7" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.antlr:ST4:4.0.4" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.commons:commons-compress:1.4.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.tukaani:xz:1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.googlecode.javaewah:JavaEWAH:0.3.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.iq80.snappy:snappy:0.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: stax:stax-api:1.0.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: net.sf.opencsv:opencsv:2.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.spark-project.hive:hive-metastore:1.2.1.spark2" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.jolbox:bonecp:0.8.0.RELEASE" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-cli:commons-cli:1.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-logging:commons-logging:1.1.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.derby:derby:10.10.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:datanucleus-api-jdo:3.2.6" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:datanucleus-rdbms:3.2.9" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-pool:commons-pool:1.5.4" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-dbcp:commons-dbcp:1.4" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.jdo:jdo-api:3.0.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.transaction:jta:1.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.avro:avro:1.7.7" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.thoughtworks.paranamer:paranamer:2.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-httpclient:commons-httpclient:3.1" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.calcite:calcite-avatica:1.2.0-incubating" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.calcite:calcite-core:1.2.0-incubating" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.calcite:calcite-linq4j:1.2.0-incubating" level="project" />
|
||||
<orderEntry type="library" name="Maven: net.hydromatic:eigenbase-properties:1.1.5" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.httpcomponents:httpclient:4.5.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.httpcomponents:httpcore:4.4.4" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.9.13" level="project" />
|
||||
<orderEntry type="library" name="Maven: commons-codec:commons-codec:1.10" level="project" />
|
||||
<orderEntry type="library" name="Maven: joda-time:joda-time:2.9.3" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.jodd:jodd-core:3.5.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.datanucleus:datanucleus-core:3.2.10" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.thrift:libthrift:0.9.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.thrift:libfb303:0.9.2" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.spark:spark-yarn_2.11:2.1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-api:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.google.inject.extensions:guice-servlet:3.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.google.protobuf:protobuf-java:2.5.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: com.google.inject:guice:3.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: javax.inject:javax.inject:1" level="project" />
|
||||
<orderEntry type="library" name="Maven: aopalliance:aopalliance:1.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-common:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-server-web-proxy:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-server-common:2.2.0" level="project" />
|
||||
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-client:2.2.0" level="project" />
|
||||
</component>
|
||||
</module>
|
Loading…
Reference in New Issue