我正在寻找插入实体框架的最快方法。
我之所以问这个问题,是因为您有一个活动的TransactionScope,并且插入量很大(4000+)。它可能会持续超过10分钟(事务的默认超时),这将导致事务不完整。
我正在寻找插入实体框架的最快方法。
我之所以问这个问题,是因为您有一个活动的TransactionScope,并且插入量很大(4000+)。它可能会持续超过10分钟(事务的默认超时),这将导致事务不完整。
当前回答
SqlBulkCopy速度极快
这是我的实现:
// at some point in my calling code, I will call:
var myDataTable = CreateMyDataTable();
myDataTable.Rows.Add(Guid.NewGuid,tableHeaderId,theName,theValue); // e.g. - need this call for each row to insert
var efConnectionString = ConfigurationManager.ConnectionStrings["MyWebConfigEfConnection"].ConnectionString;
var efConnectionStringBuilder = new EntityConnectionStringBuilder(efConnectionString);
var connectionString = efConnectionStringBuilder.ProviderConnectionString;
BulkInsert(connectionString, myDataTable);
private DataTable CreateMyDataTable()
{
var myDataTable = new DataTable { TableName = "MyTable"};
// this table has an identity column - don't need to specify that
myDataTable.Columns.Add("MyTableRecordGuid", typeof(Guid));
myDataTable.Columns.Add("MyTableHeaderId", typeof(int));
myDataTable.Columns.Add("ColumnName", typeof(string));
myDataTable.Columns.Add("ColumnValue", typeof(string));
return myDataTable;
}
private void BulkInsert(string connectionString, DataTable dataTable)
{
using (var connection = new SqlConnection(connectionString))
{
connection.Open();
SqlTransaction transaction = null;
try
{
transaction = connection.BeginTransaction();
using (var sqlBulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.TableLock, transaction))
{
sqlBulkCopy.DestinationTableName = dataTable.TableName;
foreach (DataColumn column in dataTable.Columns) {
sqlBulkCopy.ColumnMappings.Add(column.ColumnName, column.ColumnName);
}
sqlBulkCopy.WriteToServer(dataTable);
}
transaction.Commit();
}
catch (Exception)
{
transaction?.Rollback();
throw;
}
}
}
其他回答
下面是使用实体框架和使用SqlBulkCopy类之间的性能比较,具体示例为:如何将复杂对象批量插入SQL Server数据库
正如其他人已经强调的,ORM不应用于批量操作。它们提供了灵活性、关注点分离和其他好处,但批量操作(批量读取除外)不是其中之一。
但是,对于超过(+4000)个插入,我建议使用存储过程。附上经过的时间。我确实插入了20英寸的11.788行
这就是代码
public void InsertDataBase(MyEntity entity)
{
repository.Database.ExecuteSqlCommand("sp_mystored " +
"@param1, @param2"
new SqlParameter("@param1", entity.property1),
new SqlParameter("@param2", entity.property2));
}
我同意亚当·拉基斯的观点。SqlBulkCopy是将批量记录从一个数据源传输到另一数据源的最快方法。我用这个复制了20K张唱片,只用了不到3秒钟。看看下面的例子。
public static void InsertIntoMembers(DataTable dataTable)
{
using (var connection = new SqlConnection(@"data source=;persist security info=True;user id=;password=;initial catalog=;MultipleActiveResultSets=True;App=EntityFramework"))
{
SqlTransaction transaction = null;
connection.Open();
try
{
transaction = connection.BeginTransaction();
using (var sqlBulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.TableLock, transaction))
{
sqlBulkCopy.DestinationTableName = "Members";
sqlBulkCopy.ColumnMappings.Add("Firstname", "Firstname");
sqlBulkCopy.ColumnMappings.Add("Lastname", "Lastname");
sqlBulkCopy.ColumnMappings.Add("DOB", "DOB");
sqlBulkCopy.ColumnMappings.Add("Gender", "Gender");
sqlBulkCopy.ColumnMappings.Add("Email", "Email");
sqlBulkCopy.ColumnMappings.Add("Address1", "Address1");
sqlBulkCopy.ColumnMappings.Add("Address2", "Address2");
sqlBulkCopy.ColumnMappings.Add("Address3", "Address3");
sqlBulkCopy.ColumnMappings.Add("Address4", "Address4");
sqlBulkCopy.ColumnMappings.Add("Postcode", "Postcode");
sqlBulkCopy.ColumnMappings.Add("MobileNumber", "MobileNumber");
sqlBulkCopy.ColumnMappings.Add("TelephoneNumber", "TelephoneNumber");
sqlBulkCopy.ColumnMappings.Add("Deleted", "Deleted");
sqlBulkCopy.WriteToServer(dataTable);
}
transaction.Commit();
}
catch (Exception)
{
transaction.Rollback();
}
}
}
我对上面的@Slauma示例进行了一个通用扩展;
public static class DataExtensions
{
public static DbContext AddToContext<T>(this DbContext context, object entity, int count, int commitCount, bool recreateContext, Func<DbContext> contextCreator)
{
context.Set(typeof(T)).Add((T)entity);
if (count % commitCount == 0)
{
context.SaveChanges();
if (recreateContext)
{
context.Dispose();
context = contextCreator.Invoke();
context.Configuration.AutoDetectChangesEnabled = false;
}
}
return context;
}
}
用法:
public void AddEntities(List<YourEntity> entities)
{
using (var transactionScope = new TransactionScope())
{
DbContext context = new YourContext();
int count = 0;
foreach (var entity in entities)
{
++count;
context = context.AddToContext<TenancyNote>(entity, count, 100, true,
() => new YourContext());
}
context.SaveChanges();
transactionScope.Complete();
}
}
使用SqlBulkCopy:
void BulkInsert(GpsReceiverTrack[] gpsReceiverTracks)
{
if (gpsReceiverTracks == null)
{
throw new ArgumentNullException(nameof(gpsReceiverTracks));
}
DataTable dataTable = new DataTable("GpsReceiverTracks");
dataTable.Columns.Add("ID", typeof(int));
dataTable.Columns.Add("DownloadedTrackID", typeof(int));
dataTable.Columns.Add("Time", typeof(TimeSpan));
dataTable.Columns.Add("Latitude", typeof(double));
dataTable.Columns.Add("Longitude", typeof(double));
dataTable.Columns.Add("Altitude", typeof(double));
for (int i = 0; i < gpsReceiverTracks.Length; i++)
{
dataTable.Rows.Add
(
new object[]
{
gpsReceiverTracks[i].ID,
gpsReceiverTracks[i].DownloadedTrackID,
gpsReceiverTracks[i].Time,
gpsReceiverTracks[i].Latitude,
gpsReceiverTracks[i].Longitude,
gpsReceiverTracks[i].Altitude
}
);
}
string connectionString = (new TeamTrackerEntities()).Database.Connection.ConnectionString;
using (var connection = new SqlConnection(connectionString))
{
connection.Open();
using (var transaction = connection.BeginTransaction())
{
using (var sqlBulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.TableLock, transaction))
{
sqlBulkCopy.DestinationTableName = dataTable.TableName;
foreach (DataColumn column in dataTable.Columns)
{
sqlBulkCopy.ColumnMappings.Add(column.ColumnName, column.ColumnName);
}
sqlBulkCopy.WriteToServer(dataTable);
}
transaction.Commit();
}
}
return;
}