在SQL Server 中插入一條數據使用Insert語句,但是如果想要批量插入一堆數據的話,循環使用Insert不僅效率低,而且會導致SQL一系統性能問題。下面介紹SQL Server支持的兩種批量數據插入方法:Bulk和表值參數(Table-Valued Parameters),高效插入數據。
新建數據庫:
--Create DataBase
create database BulkTestDB; go
use BulkTestDB; go
--Create Table
Create table BulkTestTable( Id int primary key, UserName nvarchar(32), Pwd varchar(16)) go
一.傳統的INSERT方式
先看下傳統的INSERT方式:一條一條的插入(性能消耗越來越大,速度越來越慢)
//使用簡單的Insert方法一條條插入 [慢]
#region [ simpleInsert ]
static void simpleInsert() { Console.WriteLine("使用簡單的Insert方法一條條插入"); Stopwatch sw = new Stopwatch(); SqlConnection sqlconn = new SqlConnection("server=.;database=BulkTestDB;user=sa;password=123456;"); SqlCommand sqlcmd = new SqlCommand(); sqlcmd.CommandText = string.Format("insert into BulkTestTable(Id,UserName,Pwd)values(@p0,@p1,@p2)"); sqlcmd.Parameters.Add("@p0", SqlDbType.Int); sqlcmd.Parameters.Add("@p1", SqlDbType.NVarChar); sqlcmd.Parameters.Add("@p2", SqlDbType.NVarChar); sqlcmd.CommandType = CommandType.Text; sqlcmd.Connection = sqlconn; sqlconn.Open(); try { //循環插入1000條數據,每次插入100條,插入10次。
for (int multiply = 0; multiply < 10; multiply++) { for (int count = multiply * 100; count < (multiply + 1) * 100; count++) { sqlcmd.Parameters["@p0"].Value = count; sqlcmd.Parameters["@p1"].Value = string.Format("User-{0}", count * multiply); sqlcmd.Parameters["@p2"].Value = string.Format("Pwd-{0}", count * multiply); sw.Start(); sqlcmd.ExecuteNonQuery(); sw.Stop(); } //每插入10萬條數據后,顯示此次插入所用時間
Console.WriteLine(string.Format("Elapsed Time is {0} Milliseconds", sw.ElapsedMilliseconds)); } Console.ReadKey(); } catch (Exception ex) { Console.WriteLine(ex.Message); } } #endregion
循環插入1000條數據,每次插入100條,插入10次,效率是越來越慢。
二.較快速的Bulk插入方式:
使用使用Bulk插入[ 較快 ]
//使用Bulk插入的情況 [ 較快 ]
#region [ 使用Bulk插入的情況 ]
static void BulkToDB(DataTable dt) { Stopwatch sw = new Stopwatch(); SqlConnection sqlconn = new SqlConnection("server=.;database=BulkTestDB;user=sa;password=123456;"); SqlBulkCopy bulkCopy = new SqlBulkCopy(sqlconn); bulkCopy.DestinationTableName = "BulkTestTable"; bulkCopy.BatchSize = dt.Rows.Count; try { sqlconn.Open(); if (dt != null && dt.Rows.Count != 0) { bulkCopy.WriteToServer(dt); } } catch (Exception ex) { Console.WriteLine(ex.Message); } finally { sqlconn.Close(); if (bulkCopy != null) { bulkCopy.Close(); } } } static DataTable GetTableSchema() { DataTable dt = new DataTable(); dt.Columns.AddRange(new DataColumn[] { new DataColumn("Id",typeof(int)), new DataColumn("UserName",typeof(string)), new DataColumn("Pwd",typeof(string)) }); return dt; } static void BulkInsert() { Console.WriteLine("使用簡單的Bulk插入的情況"); Stopwatch sw = new Stopwatch(); for (int multiply = 0; multiply < 10; multiply++) { DataTable dt = GetTableSchema(); for (int count = multiply * 100; count < (multiply + 1) * 100; count++) { DataRow r = dt.NewRow(); r[0] = count; r[1] = string.Format("User-{0}", count * multiply); r[2] = string.Format("Pwd-{0}", count * multiply); dt.Rows.Add(r); } sw.Start(); BulkToDB(dt); sw.Stop(); Console.WriteLine(string.Format("Elapsed Time is {0} Milliseconds", sw.ElapsedMilliseconds)); } } #endregion
循環插入1000條數據,每次插入100條,插入10次,效率快了很多。
三.使用簡稱TVPs插入數據
打開sqlserrver,執行以下腳本:
--Create Table Valued CREATE TYPE BulkUdt AS TABLE (Id int, UserName nvarchar(32), Pwd varchar(16)
)
成功后在數據庫中發現多了BulkUdt的緩存表。
使用簡稱TVPs插入數據
//使用簡稱TVPs插入數據 [最快]
#region [ 使用簡稱TVPs插入數據 ]
static void TbaleValuedToDB(DataTable dt) { Stopwatch sw = new Stopwatch(); SqlConnection sqlconn = new SqlConnection("server=.;database=BulkTestDB;user=sa;password=123456;"); const string TSqlStatement =
"insert into BulkTestTable (Id,UserName,Pwd)" +
" SELECT nc.Id, nc.UserName,nc.Pwd" +
" FROM @NewBulkTestTvp AS nc"; SqlCommand cmd = new SqlCommand(TSqlStatement, sqlconn); SqlParameter catParam = cmd.Parameters.AddWithValue("@NewBulkTestTvp", dt); catParam.SqlDbType = SqlDbType.Structured; catParam.TypeName = "dbo.BulkUdt"; try { sqlconn.Open(); if (dt != null && dt.Rows.Count != 0) { cmd.ExecuteNonQuery(); } } catch (Exception ex) { Console.WriteLine("error>" + ex.Message); } finally { sqlconn.Close(); } } static void TVPsInsert() { Console.WriteLine("使用簡稱TVPs插入數據"); Stopwatch sw = new Stopwatch(); for (int multiply = 0; multiply < 10; multiply++) { DataTable dt = GetTableSchema(); for (int count = multiply * 100; count < (multiply + 1) * 100; count++) { DataRow r = dt.NewRow(); r[0] = count; r[1] = string.Format("User-{0}", count * multiply); r[2] = string.Format("Pwd-{0}", count * multiply); dt.Rows.Add(r); } sw.Start(); TbaleValuedToDB(dt); sw.Stop(); Console.WriteLine(string.Format("Elapsed Time is {0} Milliseconds", sw.ElapsedMilliseconds)); } Console.ReadLine(); } #endregion
循環插入1000條數據,每次插入100條,插入10次,效率是越來越慢,后面測試,將每次插入的數據量增大,會更大的體現TPVS插入的效率。
PS:
使用新特性TVPs插入100w數據 只需2秒,實例如下:
--創建表 CREATE TABLE BulkCategorySubscriber ( [category_id] [int] NOT NULL, [subscriber_id] [int] NOT NULL, [added_date] [datetime] NOT NULL DEFAULT (getdate()) ) --創建 type --Create Table Valued CREATE TYPE BulkCategorySubscriberType AS TABLE ( [category_id] [int] NOT NULL, [subscriber_id] [int] NOT NULL, [added_date] [datetime] NOT NULL DEFAULT (getdate()) )
--1、定於 dbo.BulkCategorySubscriberType 類型變量
declare @data dbo.BulkCategorySubscriberType --2、將100w數據插入到@data中
insert into @data(category_id,subscriber_id,added_date) select top 1000000 category_id,subscriber_id,added_date from NewsLetterSystem_CategorySubscriber --3、最后將@data中數據插入到目標數據表中
insert into BulkCategorySubscriber (category_id,subscriber_id,added_date) select category_id,subscriber_id,added_date from @data