示例代码如下:
package h5.all.demo;
import java.io.File;
import java.io.FileReader;
import java.io.LineNumberReader;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
public class Example {
private static String FILENAME = "G:/txt_write_hdf5/market_20090104.h5";//将.h5文件写入到该路径下
private static String PATH="/";//传入跟组
private static int count = 0 ;
private static int file_id = -1;
private static int dataset_id = -1;
public static final long DIM0 = 100000;//设置数据集行数
private static int DIMC=100000;
private static int DIME=0;
private static long[] dims = {DIMC};//初始大小
private static long[] extdims = { DIME };
private static final int CHUNK_X = 4;
private static final int CHUNK_Y = 4;
private static final int NDIMS = 1;
private static final int RANK = 1;//维度
protected static final int INTEGERSIZE = 4;
protected static final int LONGSIZE=8;
protected static final int FLOATSIZE=4;
protected static final int DOUBLESIZE = 8;
protected final static int MAXSTRINGSIZE = 80;
private static byte[] dset_data;
private static ByteBuffer outBuf;
static class Sensor_Datatype {
static int numberMembers = 5;//表示列项数
static int[] memberDims = { 1, 1, 1, 1 , 1 };
static String[] memberNames = {
"trading_day", "updatetime", "instrument_id", "gap_number", "reserve"
};//数据元素为具体的表字段名称
static long[] memberMemTypes = {
HDF5Constants.H5T_NATIVE_INT,HDF5Constants.H5T_NATIVE_LONG, HDF5Constants.H5T_NATIVE_FLOAT, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_C_S1
};//用于定义每个字段元素的类型 指定字段元素类型的时候注意类型长度,如果小于存储数长度,则会发生数据溢出
static long[] memberFileTypes = {
HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_STD_I64BE, HDF5Constants.H5T_IEEE_F32BE, HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_C_S1
};//对应的字段类型的大小
static int[] memberStorage = { INTEGERSIZE, LONGSIZE, FLOATSIZE, DOUBLESIZE, MAXSTRINGSIZE };//定义对应类型的长度大小
// Data size is the storage size for the members.
static long getTotalDataSize() {
long data_size = 0;
for (int indx = 0; indx < numberMembers; indx++)
data_size += memberStorage[indx] * memberDims[indx];
return count * data_size;
}
static long getDataSize() {
long data_size = 0;
for (int indx = 0; indx < numberMembers; indx++)
data_size += memberStorage[indx] * memberDims[indx];
return data_size;
}
static int getOffset(int memberItem) {
int data_offset = 0;
for (int indx = 0; indx < memberItem; indx++)
data_offset += memberStorage[indx];
return data_offset;
}
//读取.txt文本数据的行数
public static int readTxtLineNum(String path) {
int dataCount=-1;//置计数初始值为-1,用于抵消表头字段信息
try {
File file=new File(path);
if(file.exists()) {
FileReader fr=new FileReader(file);
LineNumberReader lnr=new LineNumberReader(fr);
while(null != lnr.readLine()) {
dataCount++;
}
lnr.close();
}else {
System.out.println("文件不存在!");
}
} catch (Exception e) {
e.printStackTrace();
}
return dataCount;
}
//读取.h5文件中的行数
public static long readH5LineNum(String FILENAME,String DATASETNAME) {
int file_id = -1;
int dataspace_id = -1;
int dataset_id = -1;
long[] dims = { DIM0 };
// Open an existing file.
try {
file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (file_id >= 0)
dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Get dataspace and allocate memory for read buffer.
try {
if (dataset_id >= 0)
dataspace_id = H5.H5Dget_space(dataset_id);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataspace_id >= 0)
H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataset_id >= 0)
H5.H5Dclose(dataset_id);
} catch (Exception e) {
e.printStackTrace();
}
// Terminate access to the data space.
try {
if (dataspace_id >= 0)
H5.H5Sclose(dataspace_id);
} catch (Exception e){
e.printStackTrace();
}
// Close the file.
try {
if (file_id >= 0)
H5.H5Fclose(file_id);
} catch (Exception e) {
e.printStackTrace();
}
return dims[0];
}
}
static class Sensor {
public Integer trading_day;
public Long updatetime;
public Float instrument_id;
public Double gap_number;
public String reserve;
public Sensor() {}
public Sensor(Integer trading_day, Long updatetime, Float instrument_id, Double gap_number, String reserve) {
super();
this.trading_day = trading_day;
this.updatetime = updatetime;
this.instrument_id = instrument_id;
this.gap_number = gap_number;
this.reserve = reserve;
}
public Integer getTrading_day() {
return trading_day;
}
public void setTrading_day(Integer trading_day) {
this.trading_day = trading_day;
}
public Long getUpdatetime() {
return updatetime;
}
public void setUpdatetime(Long updatetime) {
this.updatetime = updatetime;
}
public Float getInstrument_id() {
return instrument_id;
}
public void setInstrument_id(Float instrument_id) {
this.instrument_id = instrument_id;
}
public Double getGap_number() {
return gap_number;
}
public void setGap_number(Double gap_number) {
this.gap_number = gap_number;
}
public String getReserve() {
return reserve;
}
public void setReserve(String reserve) {
this.reserve = reserve;
}
//遍历.h5下边的所有数据集,并以数组的方式返回数据集名称
public static String[] do_iterate(String FILENAME,String PATH) {
int file_id = -1;
// Open a file using default properties.
try {
file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Begin iteration.
// System.out.println("Objects in root group:");//----
try {
if (file_id >= 0) {
int count = (int) H5.H5Gn_members(file_id, PATH);
String[] oname = new String[count];
int[] otype = new int[count];
int[] ltype = new int[count];
long[] orefs = new long[count];
H5.H5Gget_obj_info_all(file_id, PATH, oname, otype, ltype, orefs, HDF5Constants.H5_INDEX_NAME);
// Get type of the object and display its name and type.
for (int indx = 0; indx < otype.length; indx++) {
switch (H5O_type.get(otype[indx])) {
case H5O_TYPE_GROUP:
System.out.print(" Group: " + oname[indx]+","+oname.length+" ");
break;
case H5O_TYPE_DATASET:
// System.out.print(" Dataset: " + oname[indx]+","+oname.length+" ");//----
break;
case H5O_TYPE_NAMED_DATATYPE:
System.out.print(" Datatype: " + oname[indx]+","+oname.length+" ");
break;
default:
System.out.print(" Unknown: " + oname[indx]+","+oname.length+" ");
}
}
// System.out.println();//----
// Close the file.
try {
if (file_id >= 0)
H5.H5Fclose(file_id);
}
catch (Exception e) {
e.printStackTrace();
}
return oname;
}
}
catch (Exception e) {
e.printStackTrace();
}
return new String[]{"数据集遍历出错"};
}
void writeBuffer(ByteBuffer databuf, int dbposition) {
//0
databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), this.trading_day);
//1
databuf.putLong(dbposition + Sensor_Datatype.getOffset(1), this.updatetime);
//2
databuf.putFloat(dbposition + Sensor_Datatype.getOffset(2), this.instrument_id);
//3
databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), this.gap_number);
//4
byte[] t_reserve = this.reserve.getBytes(Charset.forName("UTF-8"));
int a_reserve = (t_reserve.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : t_reserve.length;
for (int ndx = 0; ndx < a_reserve; ndx++)
databuf.put(dbposition + Sensor_Datatype.getOffset(4) + ndx, t_reserve[ndx]);
for (int ndx = a_reserve; ndx < MAXSTRINGSIZE; ndx++)
databuf.put(dbposition + Sensor_Datatype.getOffset(4) + a_reserve, (byte) 0);
}
// void readBuffer(ByteBuffer databuf, int dbposition) {
// //0
// this.trading_day = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
// //1
// this.updatetime = databuf.getLong(dbposition + Sensor_Datatype.getOffset(1));
// //2
// this.instrument_id = databuf.getFloat(dbposition + Sensor_Datatype.getOffset(2));
// //3
// this.gap_number = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
// //64
// ByteBuffer stringbuf_reserve = databuf.duplicate();
// stringbuf_reserve.position(dbposition + Sensor_Datatype.getOffset(4));
// stringbuf_reserve.limit(dbposition + Sensor_Datatype.getOffset(4) + MAXSTRINGSIZE);
// byte[] bytearr_reserve = new byte[stringbuf_reserve.remaining()];
// stringbuf_reserve.get(bytearr_reserve);
// this.reserve = new String(bytearr_reserve, Charset.forName("UTF-8")).trim();
// }
@Override
public String toString() {
return "Sensor [trading_day=" + trading_day + ", updatetime=" + updatetime + ", instrument_id="
+ instrument_id + ", gap_number=" + gap_number + ", reserve=" + reserve + "]";
}
}
enum H5O_type {
H5O_TYPE_UNKNOWN(-1), // Unknown object type
H5O_TYPE_GROUP(0), // Object is a group
H5O_TYPE_DATASET(1), // Object is a dataset
H5O_TYPE_NAMED_DATATYPE(2), // Object is a named data type
H5O_TYPE_NTYPES(3); // Number of different object types
private static final Map<Integer, H5O_type> lookup = new HashMap<Integer, H5O_type>();
static {
for (H5O_type s : EnumSet.allOf(H5O_type.class))
lookup.put(s.getCode(), s);
}
private int code;
H5O_type(int layout_type) {
this.code = layout_type;
}
public int getCode() {
return this.code;
}
public static H5O_type get(int code) {
return lookup.get(code);
}
}
//将数据写成.h5格式
public static void CreateDataset(Sensor object_data,String DATASETNAME,int timeCount,boolean flag,boolean flagDset) {
// System.out.println("本次输出的对象值为:"+object_data+"数组下标>:"+timeCount);//----
int strtype_id = -1;
int memtype_id = -1;
int filetype_id = -1;
int dataspace_id = -1;
int dcpl_id = -1;
long[] chunk_dims = { CHUNK_X, CHUNK_Y };
long[] maxdims = { HDF5Constants.H5S_UNLIMITED };
// Initialize data. 读出文本文件中的数据 方法参数,传入一个数据对象数组
try {
if(flag || file_id >= 0) {//存在 文件直接打开
file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
}else {//不存在
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
HDF5Constants.H5P_DEFAULT);
}
} catch (Exception e) {
e.printStackTrace();
}
// Create string datatype.
try {
strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
if (strtype_id >= 0)
H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
}
catch (Exception e) {
e.printStackTrace();
}
// Create the compound datatype for memory.
try {
memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
if (memtype_id >= 0) {
for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
int type_id = (int) Sensor_Datatype.memberMemTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
type_id);
}
}
}
catch (Exception e) {
e.printStackTrace();
}
// Create the compound datatype for the file. Because the standard
// types we are using for the file may have different sizes than
// the corresponding native types, we must manually calculate the
// offset of each member.
try {
filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
if (filetype_id >= 0) {
for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
int type_id = (int) Sensor_Datatype.memberFileTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
type_id);
}
}
}
catch (Exception e) {
e.printStackTrace();
}
// Create dataspace. Setting maximum size to NULL sets the maximum
// size to be the current size.
try {
dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);//无限制添加需要设置第三个参数
}
catch (Exception e) {
e.printStackTrace();
}
// Create the dataset creation property list.
try {
dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
}
catch (Exception e) {
e.printStackTrace();
}
// Set the chunk size.
try {
if (dcpl_id >= 0)
H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
}
catch (Exception e) {
e.printStackTrace();
}
// Create the unlimited dataset.
try {
if(flagDset || dataset_id >= 0) {//数据集存在 直接打开
dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
}else if((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0) && (dataset_id < 0)){//数据集不存在 创建
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
dcpl_id, HDF5Constants.H5P_DEFAULT);
}
}
catch (Exception e) {
e.printStackTrace();
}
object_data.writeBuffer(outBuf, timeCount * (int)Sensor_Datatype.getDataSize());
try {
if ((dataset_id >= 0) && (memtype_id >= 0))
H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
HDF5Constants.H5P_DEFAULT, dset_data);
}
catch (Exception e) {
e.printStackTrace();
}
// End access to the dataset and release resources used by it.
try {
if (dataset_id >= 0)
H5.H5Dclose(dataset_id);
}
catch (Exception e) {
e.printStackTrace();
}
// Terminate access to the data space.
try {
if (dataspace_id >= 0)
H5.H5Sclose(dataspace_id);
}
catch (Exception e) {
e.printStackTrace();
}
// Terminate access to the file type.
try {
if (filetype_id >= 0)
H5.H5Tclose(filetype_id);
}
catch (Exception e) {
e.printStackTrace();
}
// Terminate access to the mem type.
try {
if (memtype_id >= 0)
H5.H5Tclose(memtype_id);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (strtype_id >= 0)
H5.H5Tclose(strtype_id);
}
catch (Exception e) {
e.printStackTrace();
}
// Close the file.
try {
if (file_id >= 0)
H5.H5Fclose(file_id);
}
catch (Exception e) {
e.printStackTrace();
}
}
private static void extendUnlimited(Sensor object_data,String DATASETNAME,int timeCount,boolean flag,boolean flagDset,int h5Line) {
// System.out.println("每次传入的数据参数为:"+object_data+"数组下标>:"+timeCount);//----
int strtype_id = -1;
int memtype_id = -1;
int filetype_id = -1;
int dataspace_id = -1;
long[] start = { 0, 0 };
long[] count = new long[2];
// Open an existing file.
try {
file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Open an existing dataset.
try {
if (file_id >= 0)
dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
}
catch (Exception e) {
e.printStackTrace();
}
// Create string datatype.
try {
strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
if (strtype_id >= 0)
H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
}
catch (Exception e) {
e.printStackTrace();
}
// Create the compound datatype for memory.
try {
memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
if (memtype_id >= 0) {
for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {//列项数
int type_id = (int) Sensor_Datatype.memberMemTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
type_id);
}
}
}
catch (Exception e) {
e.printStackTrace();
}
// Create the compound datatype for the file. Because the standard
// types we are using for the file may have different sizes than
// the corresponding native types, we must manually calculate the
// offset of each member.
try {
filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
if (filetype_id >= 0) {
for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {//列项数
int type_id = (int) Sensor_Datatype.memberFileTypes[indx];
if (type_id == HDF5Constants.H5T_C_S1)
type_id = strtype_id;
H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
type_id);
}
}
}
catch (Exception e) {
e.printStackTrace();
}
// Extend the dataset.
try {
if (dataset_id >= 0)
H5.H5Dset_extent(dataset_id, extdims);//可能引发错误
}
catch (Exception e) {
e.printStackTrace();
}
// Retrieve the dataspace for the newly extended dataset.
try {
if (dataset_id >= 0)
dataspace_id = H5.H5Dget_space(dataset_id);
}
catch (Exception e) {
e.printStackTrace();
}
// Select the entire dataspace.
try {
if (dataspace_id >= 0) {
H5.H5Sselect_all(dataspace_id);
// Subtract a hyperslab reflecting the original dimensions from the
// selection. The selection now contains only the newly extended
// portions of the dataset.
count[0] = dims[0];
count[1] = 65;
H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null);
object_data.writeBuffer(outBuf, timeCount * (int)Sensor_Datatype.getDataSize());
// Write the data to the selected portion of the dataset.
if (dataset_id >= 0)
H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, dataspace_id,
HDF5Constants.H5P_DEFAULT, dset_data);
}
}
catch (Exception e) {
e.printStackTrace();
}
// End access to the dataset and release resources used by it.
try {
if (dataset_id >= 0)
H5.H5Dclose(dataset_id);
}
catch (Exception e) {
e.printStackTrace();
}
try {
if (dataspace_id >= 0)
H5.H5Sclose(dataspace_id);
}
catch (Exception e) {
e.printStackTrace();
}
// Close the file.
try {
if (file_id >= 0)
H5.H5Fclose(file_id);
}
catch (Exception e) {
e.printStackTrace();
}
}
//将本地存入的文本数据遍历读出
public static void readLocalExcel() {
String url="G:/hdf5_write_txt";
File file=new File(url);
File[] files=file.listFiles();
String dsetString="";
if(null != files) {
for(File f:files) {//遍历得到文本路径
String path="";
String filePath=f.getPath();//得到遍历文件名
String[] fpStr=filePath.split("\\\\");//进行拆分 重组路径
for(int i=0;i<fpStr.length;i++) {
dsetString=""+fpStr[fpStr.length-1].split("\\.")[0];
if(i != fpStr.length -1) {//路径进行重组
path+=fpStr[i]+"/";
}else {
path+=fpStr[i];
}
}
Example.readAllPath(path,dsetString);
}
}
}
//读数据 路径G:/hdf5_write_txt/a0901.txt
public static int readAllPath(String path,String dsetString) {
//判断创建文件在指定路径下是否存在 若文件存在,判断传入的数据集名称是否存在
boolean flagE=Example.judgeH5File(FILENAME);//存在-true,不存在-false
boolean flagDsetE=false;
if(flagE) {
flagDsetE=Example.judgeH5Dset(dsetString);
}
//读数据
String line_record=null;
count=0;//每读出一个文件的数据,都要执行一次计数器的清空
int h5Line=0;
boolean first=false;
try {
RandomAccessFile raf=new RandomAccessFile(path, "r");
int txtLine=Sensor_Datatype.readTxtLineNum(path);//文件行数
if(flagE && flagDsetE) {//文件与数据集都存在
h5Line=(int)Sensor_Datatype.readH5LineNum(FILENAME, dsetString);
}
while(null != (line_record=raf.readLine())) {//读到数据
if(count != 0) {//count=1 ->
line_record=new String(line_record.getBytes("UTF-8"),"GBK");
Sensor sen=Example.parseRecord(line_record);//每读到一条数据,就将该条数据存入到Sensor对象中
int timeCount=count-1;
boolean flag=Example.judgeH5File(FILENAME);//存在-true,不存在-false
boolean flagDset=false;
if(flag) {
flagDset=Example.judgeH5Dset(dsetString);
}
//如果文件不存在 创建
if(!flag || !flagDset) {
dims[0]=1;
dset_data = new byte[(int)dims[0] * (int)Sensor_Datatype.getDataSize()];
outBuf = ByteBuffer.wrap(dset_data).order(ByteOrder.nativeOrder());
Example.CreateDataset(sen,dsetString,timeCount,flag,flagDset);
first=true;
}
// //如果文件存在 拼接
if(flag && flagDset) {//如果文本中有多于对应.h5的数据,执行写入(文本中含有需要写入的数据,之前不存在与.h5中)
if(first) {
h5Line=(int)Sensor_Datatype.readH5LineNum(FILENAME, dsetString);
}
// h5Line=(int)Sensor_Datatype.readH5LineNum(FILENAME, dsetString);
if((h5Line < txtLine) && (DIME <= h5Line)) {//文本中仅有一条数据需要追加 或无数据追加
DIME+=1;
extdims[0]=h5Line+1;
dims[0]=h5Line;
dset_data = new byte[(int)extdims[0] * (int)Sensor_Datatype.getDataSize()];
outBuf = ByteBuffer.wrap(dset_data).order(ByteOrder.nativeOrder());
Example.extendUnlimited(sen, dsetString, timeCount, flag, flagDset,h5Line);//只要进入该方法,表容量扩大一行
if(DIME > h5Line && h5Line <= txtLine) {
h5Line=(int)Sensor_Datatype.readH5LineNum(FILENAME, dsetString);
}
}
}
}
count+=1;
}
dataset_id = -1;
count-=1;
DIME=0;
h5Line=0;
System.out.println("本次共读出数据"+count+"条");
raf.close();
} catch (Exception e) {
e.printStackTrace();
}
return count;//文本数据所含数据条数
}
//判断.h5文件创建路径是否存在要创建的.h5文件
public static boolean judgeH5File(String path) {
boolean flag=false;//默认为假 不存在设置为false
String[] pathStr=path.split("/");
String fileH5Name=pathStr[pathStr.length-1];//得到创建文件名称
String url="";//定义指定路径
for(int i=0;i<pathStr.length -1;i++) {//去掉最后的创建文件名称
if(i != pathStr.length -2) {
url+=pathStr[i]+"/";
}else {
url+=pathStr[i];
}
}
File file=new File(url);
String[] fileName=file.list();
for(int i=0;i<fileName.length;i++) {//如果该路径下没有文件 则文件不存在.flag使用默认值false 修改多个文件时的处理方式
if(fileName[i].equals(fileH5Name)) {//存在,将标志置为true
flag=true;
}
}
return flag;
}
public static boolean judgeH5Dset(String dsetString) {
boolean flagDset=false;//默认为假 不存在设置为false
String[] dsetName=Sensor.do_iterate(FILENAME, PATH);
for(int i=0;i<dsetName.length;i++) {
if(dsetString.equals(dsetName[i])) {//存在 true
flagDset=true;
}
}
return flagDset;
}
//读取数据,拆分字符串,放入到对象中
public static Sensor parseRecord(String line_record) {
Sensor sen=new Sensor();
String[] fields=line_record.split(",");//按格式进行拆分
sen.setTrading_day(Integer.parseInt(fields[0].trim()));
sen.setUpdatetime(Long.parseLong(fields[1].trim()));
sen.setInstrument_id(Float.parseFloat(fields[2].trim()));
sen.setGap_number(Double.parseDouble(fields[3].trim()));
sen.setReserve(fields[64].trim());
return sen;
}
public static void main(String[] args) {
long timeS=System.currentTimeMillis();
Example.readLocalExcel();
long timeE=System.currentTimeMillis();
System.out.println("最终执行时间为:"+(timeS-timeE));
}
}