我这里的场景是把datatable里面的数据插入到数据库,但是数据量大的情况下批量插入会提示超时,所以把datatable的数据分批写入数据库的
using (SqlConnection connection = new SqlConnection(SQLServerDAL.Public.PubConstant.ConnectionString))
{
connection.Open();
int pageSize = 100000;//SqlBulkCopy大数据量的时候会超时,所以这里10万一次
int count = dt.Rows.Count / pageSize;
for (int i = 0; i <= count; i++)
{
using (SqlBulkCopy bulkCopy = new SqlBulkCopy(connection))
{
try
{
DataTable tmpDt = dt.AsEnumerable().Skip(i * pageSize).Take(pageSize).CopyToDataTable();
bulkCopy.DestinationTableName = "表名";
bulkCopy.BatchSize = tmpDt.Rows.Count;
//bulkCopy.ColumnMappings.Add("datatable的列名", "数据库表的类名");
bulkCopy.ColumnMappings.Add("门店名称", "shopid");
bulkCopy.ColumnMappings.Add("货号", "goodsid");
bulkCopy.ColumnMappings.Add("尺码", "sizeid");
bulkCopy.ColumnMappings.Add("销售价", "price");
bulkCopy.ColumnMappings.Add("全国库存", "stock");
bulkCopy.WriteToServer(tmpDt);
}
catch (Exception ex)
{
throw ex;
}
}
}
}