导出数据CSV文件有很多种方法,我要导出的表中数据有几百兆,如果放在内存中处理,对执行程序的客户端会有比较高的要求
///
<summary>
/// 从数据库中导出符合查询条件的数据到CSV文件,用StreamWriter和SqlDataReader读出,程序对系统内存要求小
/// </summary>
/// <param name="sql"> 符合查询条件的sql语句 </param>
/// <param name="FileName"> csv文件名 </param>
/// <param name="splitChar"> 分隔符 </param>
/// <returns></returns>
public static bool WriteCSVFlie( string sql, string FileName, string splitChar)
{
bool bl = true ;
try
{
// 实例化一个文件流--->与写入文件相关联
FileStream fs = new FileStream(FileName, FileMode.Create);
// 实例化一个StreamWriter-->与fs相关联
Encoding encode = Encoding.GetEncoding( " gb2312 " );
StreamWriter sw = new StreamWriter(fs, encode);
using (SqlConnection connection = new SqlConnection(ConfigurationManager.AppSettings[ " DBConnectionString " ])) // 初始化配置文件中的数据库连接
{
SqlCommand command = new SqlCommand(sql, connection);
connection.Open();
SqlDataReader reader = command.ExecuteReader();
int i = 0 ;
// Call Read before accessing data.
while (reader.Read())
{
if (i == 0 )
{
// write data column name 第一行导出列名
string columnname = string .Empty;
for ( int j = 0 ; j < reader.FieldCount; j ++ )
{
if (j == reader.FieldCount - 1 )
columnname += reader.GetName(j);
else
columnname += reader.GetName(j) + splitChar;
}
sw.WriteLine(columnname);
}
string linecontent = string .Empty;
for ( int j = 0 ; j < reader.FieldCount; j ++ )
{
if (j == reader.FieldCount - 1 )
linecontent += reader[j].ToString().Trim().Replace(splitChar, " -_- " ); // 考虑到分隔符可能在导出的数据中存在,需要把数据中存在的分隔符替换成特殊字符串处理,在导入数据库时可以再用程序把特殊字符替换回来
else
linecontent += reader[j].ToString().Trim().Replace(splitChar, " -_- " ) + splitChar;
}
sw.WriteLine(linecontent);
i += 1 ;
}
// Call Close when done reading.
reader.Close();
}
// 清空缓冲区
sw.Flush();
// 关闭流
sw.Close();
fs.Close();
}
catch (Exception ex)
{
bl = false ;
throw ex;
}
return bl;
}
/// 从数据库中导出符合查询条件的数据到CSV文件,用StreamWriter和SqlDataReader读出,程序对系统内存要求小
/// </summary>
/// <param name="sql"> 符合查询条件的sql语句 </param>
/// <param name="FileName"> csv文件名 </param>
/// <param name="splitChar"> 分隔符 </param>
/// <returns></returns>
public static bool WriteCSVFlie( string sql, string FileName, string splitChar)
{
bool bl = true ;
try
{
// 实例化一个文件流--->与写入文件相关联
FileStream fs = new FileStream(FileName, FileMode.Create);
// 实例化一个StreamWriter-->与fs相关联
Encoding encode = Encoding.GetEncoding( " gb2312 " );
StreamWriter sw = new StreamWriter(fs, encode);
using (SqlConnection connection = new SqlConnection(ConfigurationManager.AppSettings[ " DBConnectionString " ])) // 初始化配置文件中的数据库连接
{
SqlCommand command = new SqlCommand(sql, connection);
connection.Open();
SqlDataReader reader = command.ExecuteReader();
int i = 0 ;
// Call Read before accessing data.
while (reader.Read())
{
if (i == 0 )
{
// write data column name 第一行导出列名
string columnname = string .Empty;
for ( int j = 0 ; j < reader.FieldCount; j ++ )
{
if (j == reader.FieldCount - 1 )
columnname += reader.GetName(j);
else
columnname += reader.GetName(j) + splitChar;
}
sw.WriteLine(columnname);
}
string linecontent = string .Empty;
for ( int j = 0 ; j < reader.FieldCount; j ++ )
{
if (j == reader.FieldCount - 1 )
linecontent += reader[j].ToString().Trim().Replace(splitChar, " -_- " ); // 考虑到分隔符可能在导出的数据中存在,需要把数据中存在的分隔符替换成特殊字符串处理,在导入数据库时可以再用程序把特殊字符替换回来
else
linecontent += reader[j].ToString().Trim().Replace(splitChar, " -_- " ) + splitChar;
}
sw.WriteLine(linecontent);
i += 1 ;
}
// Call Close when done reading.
reader.Close();
}
// 清空缓冲区
sw.Flush();
// 关闭流
sw.Close();
fs.Close();
}
catch (Exception ex)
{
bl = false ;
throw ex;
}
return bl;
}
把csv文件导入数据库的代码介绍,采用bulkCopy方法批量导入数据比较方便。
///
<summary>
/// 将DataTable的数据导入到数据库
/// </summary>
/// <param name="datatable"> DataTable对象 </param>
/// <param name="conn"> 数据库连接串 </param>
/// <param name="destTable"> 目标表格 </param>
public static void BCPInForDatatable(DataTable datatable, string conn, string destTable)
{
SqlBulkCopy bulkCopy = null ;
try
{
// 先删除表内数据
GetSQL.TruncateTable(destTable);
using (bulkCopy = new SqlBulkCopy(conn))
{
// 一次批量的插入的数据量
bulkCopy.BatchSize = 1000 ;
// 超时之前操作完成所允许的秒数,如果超时则事务不会提交 ,数据将回滚,所有已复制的行都会从目标表中移除
bulkCopy.BulkCopyTimeout = 60 ;
// 設定 NotifyAfter 属性,以便在每插入10000 条数据时,呼叫相应事件。
bulkCopy.NotifyAfter = 10000 ;
bulkCopy.SqlRowsCopied += new SqlRowsCopiedEventHandler(OnSqlRowsCopied);
bulkCopy.DestinationTableName = destTable;
Console.WriteLine( " Import To DB! " );
bulkCopy.WriteToServer(datatable);
}
}
catch (Exception ex)
{
throw ex;
}
finally
{
if (bulkCopy != null )
{
try { bulkCopy.Close(); }
catch (Exception ex) { }
bulkCopy = null ;
}
}
}
private static void OnSqlRowsCopied( object sender, SqlRowsCopiedEventArgs e)
{
Console.WriteLine( " Current Insert Row Number: " + e.RowsCopied.ToString());
}
public static int TruncateTable( string tablename)
{
string sql = " truncate table " + tablename;
return SqlHelper.ExecuteNonQuery(SqlHelper.CenterConnStr, CommandType.Text, sql, null );
}
/// 将DataTable的数据导入到数据库
/// </summary>
/// <param name="datatable"> DataTable对象 </param>
/// <param name="conn"> 数据库连接串 </param>
/// <param name="destTable"> 目标表格 </param>
public static void BCPInForDatatable(DataTable datatable, string conn, string destTable)
{
SqlBulkCopy bulkCopy = null ;
try
{
// 先删除表内数据
GetSQL.TruncateTable(destTable);
using (bulkCopy = new SqlBulkCopy(conn))
{
// 一次批量的插入的数据量
bulkCopy.BatchSize = 1000 ;
// 超时之前操作完成所允许的秒数,如果超时则事务不会提交 ,数据将回滚,所有已复制的行都会从目标表中移除
bulkCopy.BulkCopyTimeout = 60 ;
// 設定 NotifyAfter 属性,以便在每插入10000 条数据时,呼叫相应事件。
bulkCopy.NotifyAfter = 10000 ;
bulkCopy.SqlRowsCopied += new SqlRowsCopiedEventHandler(OnSqlRowsCopied);
bulkCopy.DestinationTableName = destTable;
Console.WriteLine( " Import To DB! " );
bulkCopy.WriteToServer(datatable);
}
}
catch (Exception ex)
{
throw ex;
}
finally
{
if (bulkCopy != null )
{
try { bulkCopy.Close(); }
catch (Exception ex) { }
bulkCopy = null ;
}
}
}
private static void OnSqlRowsCopied( object sender, SqlRowsCopiedEventArgs e)
{
Console.WriteLine( " Current Insert Row Number: " + e.RowsCopied.ToString());
}
public static int TruncateTable( string tablename)
{
string sql = " truncate table " + tablename;
return SqlHelper.ExecuteNonQuery(SqlHelper.CenterConnStr, CommandType.Text, sql, null );
}