package day1;
/**
* Created by hadoop on 2017/10/15.
*/
import java.io.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
public class XlsMain {
public List list = new ArrayList();
public List list1 = new ArrayList();
public static void main( String[] args) throws IOException {
XlsMain xlsMain = new XlsMain();
xlsMain.readXls();
// List a = list[1];
}
private void readXls() throws IOException{
InputStream is = new FileInputStream( “C:\Users\Administrator\Desktop\test\worktime.xls”);
HSSFWorkbook hssfWorkbook = new HSSFWorkbook( is);
// 循环工作表Sheet
for(int numSheet = 0; numSheet < hssfWorkbook.getNumberOfSheets(); numSheet++){
HSSFSheet hssfSheet = hssfWorkbook.getSheetAt( numSheet);
if(hssfSheet == null){
continue;
}
// 循环行Row
for(int rowNum = 1; rowNum <= hssfSheet.getLastRowNum(); rowNum++){
HSSFRow hssfRow = hssfSheet.getRow(rowNum);
if(hssfRow == null){
continue;
}
// 循环列Cell
for(int cellNum = 0; cellNum <= hssfRow.getLastCellNum(); cellNum++){
HSSFCell hssfCell = hssfRow.getCell(cellNum);
if(hssfCell == null){
continue;
}
if(hssfRow.getCell(cellNum)!=null){//处理函数单元格问题,先将单元格内容转为String
hssfRow.getCell(cellNum).setCellType(hssfCell.CELL_TYPE_STRING);
}
// PrintStream mytxt=new PrintStream(“C:\Users\Administrator\Desktop\test\output.txt”);
System.out.print( " "+ getValue(hssfCell));
// PrintStream out=System.out;
// System.setOut(mytxt);
}
//System.out.println(list.toArray(new String[0]));
// String[] time = list.toArray(new String[0]);
// int date = time.toString().indexOf(“-“);
// int sec = time.toString().lastIndexOf(” “);
// String[] num = list.toArray(new String[1]);
System.out.println(” “);
}
}
}
@SuppressWarnings("static-access")
private String getValue(HSSFCell hssfCell){
if(hssfCell.getCellType() == hssfCell.CELL_TYPE_BOOLEAN){
return String.valueOf( hssfCell.getBooleanCellValue());
}else if(hssfCell.getCellType() == hssfCell.CELL_TYPE_NUMERIC){
return String.valueOf( hssfCell.getNumericCellValue());
}else if (hssfCell.getCellType() ==hssfCell.CELL_TYPE_STRING){
return String.valueOf(hssfCell.getStringCellValue());
}else{
return String.valueOf( hssfCell.getStringCellValue());
}
}
}
package day1;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
/**
* Created by hadoop on 2017/10/16.
*/
public class timecount716 {
/**
* 读取txt文件的内容
* @param file 想要读取的文件对象
* @return 返回文件内容
*/
public static String txt2String(File file){
StringBuilder result = new StringBuilder();
try{
BufferedReader br = new BufferedReader(new FileReader(file));
//构造一个BufferedReader类来读取文件
String s = null;
while((s = br.readLine())!=null){//使用readLine方法,一次读一行
result.append(System.lineSeparator()+s);
}
br.close();
}catch(Exception e){
e.printStackTrace();
}
return result.toString();
}
public static void main(String[] args){
File file = new File("C:\\Users\\Administrator\\Desktop\\test\\716.txt");
String[] aa = txt2String(file).split("\\ ");
for (int i = 0 ; i <aa.length ; i++ ) {
System.out.println("--"+aa[i]);
}
// System.out.println(txt2String(file));
}
}