mv all *.java to java package

This commit is contained in:
yanggang 2019-04-09 19:08:54 +08:00
parent 7d9133c025
commit 3d8dd3720c
14 changed files with 25 additions and 20 deletions

View File

@ -1,6 +1,7 @@
package cn.piflow.bundle.microorganism.util; package cn.piflow.bundle.microorganism.util;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.biojava.nbio.structure.*; import org.biojava.nbio.structure.*;
@ -8,13 +9,14 @@ import org.biojava.nbio.structure.io.PDBFileReader;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import java.io.*; import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.text.DateFormat; import java.text.DateFormat;
import java.text.ParseException; import java.text.ParseException;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.*; import java.util.*;
import java.util.zip.GZIPInputStream;
import org.apache.hadoop.fs.FileSystem;
public class PDB { public class PDB {

View File

@ -4,8 +4,13 @@ package cn.piflow.bundle.microorganism.util;
import org.biojava.bio.seq.io.ParseException; import org.biojava.bio.seq.io.ParseException;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import java.io.*;
import java.util.*; import java.io.BufferedReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;

View File

@ -7,7 +7,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.poi.hssf.usermodel.HSSFDateUtil; import org.apache.poi.hssf.usermodel.HSSFDateUtil;
import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
import org.apache.poi.poifs.filesystem.POIFSFileSystem; import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Row;
@ -15,9 +14,6 @@ import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.text.DateFormat; import java.text.DateFormat;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;

View File

@ -8,7 +8,7 @@ import java.io.IOException;
import java.io.ObjectStreamException; import java.io.ObjectStreamException;
import java.io.Serializable; import java.io.Serializable;
public class JedisClusterImplSer implements Serializable { public class JedisClusterImplSer implements Serializable {
private static final long serialVersionUID = -51L; private static final long serialVersionUID = -51L;

View File

@ -29,14 +29,14 @@ class PutHdfs extends ConfigurableStop{
config.set("fs.defaultFS",hdfsUrl) config.set("fs.defaultFS",hdfsUrl)
val fs = FileSystem.get(config) val fs = FileSystem.get(config)
if (types=="json"){ if (types=="json"){
inDF.repartition(partition).write.json(hdfsUrl+hdfsPath) inDF.repartition(partition).write.json(hdfsUrl+hdfsPath)
} else if (types=="csv"){ } else if (types=="csv"){
inDF.repartition(partition).write.csv(hdfsUrl+hdfsPath) inDF.repartition(partition).write.csv(hdfsUrl+hdfsPath)
} else { } else {
//parquet //parquet
inDF.repartition(partition).write.save(hdfsUrl+hdfsPath) inDF.repartition(partition).write.save(hdfsUrl+hdfsPath)
} }
} }
override def setProperties(map: Map[String, Any]): Unit = { override def setProperties(map: Map[String, Any]): Unit = {
@ -50,8 +50,10 @@ class PutHdfs extends ConfigurableStop{
var descriptor : List[PropertyDescriptor] = List() var descriptor : List[PropertyDescriptor] = List()
val hdfsPath = new PropertyDescriptor().name("hdfsPath").displayName("hdfsPath").defaultValue("").required(true) val hdfsPath = new PropertyDescriptor().name("hdfsPath").displayName("hdfsPath").defaultValue("").required(true)
val hdfsUrl = new PropertyDescriptor().name("hdfsUrl").displayName("hdfsUrl").defaultValue("").required(true) val hdfsUrl = new PropertyDescriptor().name("hdfsUrl").displayName("hdfsUrl").defaultValue("").required(true)
val types = new PropertyDescriptor().name("types").displayName("json,csv,parquet").defaultValue("").required(true) val types = new PropertyDescriptor().name("types").displayName("json,csv,parquet").description("json,csv,parquet")
val partition = new PropertyDescriptor().name("partition").displayName("repartition").defaultValue("").required(true) .defaultValue("csv").allowableValues(Set("json","csv","parquet")).required(true)
val partition = new PropertyDescriptor().name("partition").displayName("repartition").description("partition").defaultValue("").required(true)
descriptor = partition :: descriptor descriptor = partition :: descriptor
descriptor = types :: descriptor descriptor = types :: descriptor
descriptor = hdfsPath :: descriptor descriptor = hdfsPath :: descriptor