forked from opensci/piflow
mv all *.java to java package
This commit is contained in:
parent
7d9133c025
commit
3d8dd3720c
|
@ -1,6 +1,7 @@
|
|||
package cn.piflow.bundle.microorganism.util;
|
||||
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.biojava.nbio.structure.*;
|
||||
|
@ -8,13 +9,14 @@ import org.biojava.nbio.structure.io.PDBFileReader;
|
|||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
|
||||
import java.io.*;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.text.DateFormat;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
|
||||
public class PDB {
|
||||
|
|
@ -4,8 +4,13 @@ package cn.piflow.bundle.microorganism.util;
|
|||
import org.biojava.bio.seq.io.ParseException;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
|
@ -7,7 +7,6 @@ import org.apache.hadoop.fs.FileSystem;
|
|||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.poi.hssf.usermodel.HSSFDateUtil;
|
||||
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
|
||||
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
|
||||
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
|
||||
import org.apache.poi.ss.usermodel.Cell;
|
||||
import org.apache.poi.ss.usermodel.Row;
|
||||
|
@ -15,9 +14,6 @@ import org.apache.poi.ss.usermodel.Sheet;
|
|||
import org.apache.poi.ss.usermodel.Workbook;
|
||||
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
|
@ -8,7 +8,7 @@ import java.io.IOException;
|
|||
import java.io.ObjectStreamException;
|
||||
import java.io.Serializable;
|
||||
|
||||
public class JedisClusterImplSer implements Serializable {
|
||||
public class JedisClusterImplSer implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = -51L;
|
||||
|
|
@ -29,14 +29,14 @@ class PutHdfs extends ConfigurableStop{
|
|||
config.set("fs.defaultFS",hdfsUrl)
|
||||
val fs = FileSystem.get(config)
|
||||
|
||||
if (types=="json"){
|
||||
inDF.repartition(partition).write.json(hdfsUrl+hdfsPath)
|
||||
} else if (types=="csv"){
|
||||
inDF.repartition(partition).write.csv(hdfsUrl+hdfsPath)
|
||||
} else {
|
||||
//parquet
|
||||
inDF.repartition(partition).write.save(hdfsUrl+hdfsPath)
|
||||
}
|
||||
if (types=="json"){
|
||||
inDF.repartition(partition).write.json(hdfsUrl+hdfsPath)
|
||||
} else if (types=="csv"){
|
||||
inDF.repartition(partition).write.csv(hdfsUrl+hdfsPath)
|
||||
} else {
|
||||
//parquet
|
||||
inDF.repartition(partition).write.save(hdfsUrl+hdfsPath)
|
||||
}
|
||||
|
||||
}
|
||||
override def setProperties(map: Map[String, Any]): Unit = {
|
||||
|
@ -50,8 +50,10 @@ class PutHdfs extends ConfigurableStop{
|
|||
var descriptor : List[PropertyDescriptor] = List()
|
||||
val hdfsPath = new PropertyDescriptor().name("hdfsPath").displayName("hdfsPath").defaultValue("").required(true)
|
||||
val hdfsUrl = new PropertyDescriptor().name("hdfsUrl").displayName("hdfsUrl").defaultValue("").required(true)
|
||||
val types = new PropertyDescriptor().name("types").displayName("json,csv,parquet").defaultValue("").required(true)
|
||||
val partition = new PropertyDescriptor().name("partition").displayName("repartition").defaultValue("").required(true)
|
||||
val types = new PropertyDescriptor().name("types").displayName("json,csv,parquet").description("json,csv,parquet")
|
||||
.defaultValue("csv").allowableValues(Set("json","csv","parquet")).required(true)
|
||||
|
||||
val partition = new PropertyDescriptor().name("partition").displayName("repartition").description("partition").defaultValue("").required(true)
|
||||
descriptor = partition :: descriptor
|
||||
descriptor = types :: descriptor
|
||||
descriptor = hdfsPath :: descriptor
|
||||
|
|
Loading…
Reference in New Issue