U4-9107 Change BulkInsertRecords to use BulkCopy or TableDirect (SQLCE)

This commit is contained in:
Shannon
2016-10-25 12:29:17 +02:00
parent 74a5bf5503
commit dadcbc8b31
17 changed files with 4901 additions and 284 deletions

View File

@@ -174,7 +174,7 @@ namespace Umbraco.Core
/// </summary>
public void ConfigureEmbeddedDatabaseConnection()
{
const string providerName = "System.Data.SqlServerCe.4.0";
const string providerName = Constants.DatabaseProviders.SqlCe;
var connectionString = GetEmbeddedDatabaseConnectionString();
SaveConnectionString(connectionString, providerName);

File diff suppressed because it is too large Load Diff

View File

@@ -9,7 +9,7 @@ namespace Umbraco.Core.Persistence.DatabaseModelDefinitions
{
internal static class DefinitionFactory
{
public static TableDefinition GetTableDefinition(Type modelType)
public static TableDefinition GetTableDefinition(ISqlSyntaxProvider syntaxProvider, Type modelType)
{
//Looks for PetaPoco's TableNameAtribute for the name of the table
//If no attribute is set we use the name of the Type as the default convention
@@ -32,7 +32,7 @@ namespace Umbraco.Core.Persistence.DatabaseModelDefinitions
//Otherwise use the name of the property itself as the default convention
var columnAttribute = propertyInfo.FirstAttribute<ColumnAttribute>();
string columnName = columnAttribute != null ? columnAttribute.Name : propertyInfo.Name;
var columnDefinition = GetColumnDefinition(modelType, propertyInfo, columnName, tableName);
var columnDefinition = GetColumnDefinition(syntaxProvider, modelType, propertyInfo, columnName, tableName);
tableDefinition.Columns.Add(columnDefinition);
//Creates a foreignkey definition and adds it to the collection on the table definition
@@ -58,7 +58,7 @@ namespace Umbraco.Core.Persistence.DatabaseModelDefinitions
return tableDefinition;
}
public static ColumnDefinition GetColumnDefinition(Type modelType, PropertyInfo propertyInfo, string columnName, string tableName)
public static ColumnDefinition GetColumnDefinition(ISqlSyntaxProvider syntaxProvider, Type modelType, PropertyInfo propertyInfo, string columnName, string tableName)
{
var definition = new ColumnDefinition{ Name = columnName, TableName = tableName, ModificationType = ModificationType.Create };
@@ -110,7 +110,7 @@ namespace Umbraco.Core.Persistence.DatabaseModelDefinitions
{
//Special case for MySQL as it can't have multiple default DateTime values, which
//is what the umbracoServer table definition is trying to create
if (SqlSyntaxContext.SqlSyntaxProvider is MySqlSyntaxProvider && definition.TableName == "umbracoServer" &&
if (syntaxProvider is MySqlSyntaxProvider && definition.TableName == "umbracoServer" &&
definition.TableName.ToLowerInvariant() == "lastNotifiedDate".ToLowerInvariant())
return definition;

View File

@@ -99,7 +99,7 @@ namespace Umbraco.Core.Persistence
public void CreateTable(bool overwrite, Type modelType)
{
var tableDefinition = DefinitionFactory.GetTableDefinition(modelType);
var tableDefinition = DefinitionFactory.GetTableDefinition(_syntaxProvider, modelType);
var tableName = tableDefinition.Name;
string createSql = _syntaxProvider.Format(tableDefinition);

View File

@@ -159,7 +159,7 @@ namespace Umbraco.Core.Persistence.Migrations.Initial
foreach (var item in OrderedTables.OrderBy(x => x.Key))
{
var tableDefinition = DefinitionFactory.GetTableDefinition(item.Value);
var tableDefinition = DefinitionFactory.GetTableDefinition(_sqlSyntaxProvider, item.Value);
result.TableDefinitions.Add(tableDefinition);
}

View File

@@ -3,9 +3,13 @@ using System.Collections.Generic;
using System.Data;
using System.Data.Common;
using System.Data.SqlClient;
using System.Data.SqlServerCe;
using System.Linq;
using System.Text.RegularExpressions;
using MySql.Data.MySqlClient;
using StackExchange.Profiling.Data;
using Umbraco.Core.Logging;
using Umbraco.Core.Persistence.DatabaseModelDefinitions;
using Umbraco.Core.Persistence.Querying;
using Umbraco.Core.Persistence.SqlSyntax;
@@ -150,7 +154,7 @@ namespace Umbraco.Core.Persistence
{
//this fancy regex will only match a single @ not a double, etc...
var regex = new Regex("(?<!@)@(?!@)");
return regex.Replace(value, "@@");
return regex.Replace(value, "@@");
}
return value;
@@ -172,6 +176,13 @@ namespace Umbraco.Core.Persistence
creator.CreateTable<T>(overwrite);
}
/// <summary>
/// Performs the bulk insertion in the context of a current transaction with an optional parameter to complete the transaction
/// when finished
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="db"></param>
/// <param name="collection"></param>
public static void BulkInsertRecords<T>(this Database db, IEnumerable<T> collection)
{
//don't do anything if there are no records.
@@ -180,10 +191,97 @@ namespace Umbraco.Core.Persistence
using (var tr = db.GetTransaction())
{
db.BulkInsertRecords(collection, tr, true);
db.BulkInsertRecords(collection, tr, SqlSyntaxContext.SqlSyntaxProvider, true);
}
}
/// <summary>
/// Performs the bulk insertion in the context of a current transaction with an optional parameter to complete the transaction
/// when finished
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="db"></param>
/// <param name="collection"></param>
/// <param name="tr"></param>
/// <param name="syntaxProvider"></param>
/// <param name="useNativeSqlPlatformBulkInsert">
/// If this is false this will try to just generate bulk insert statements instead of using the current SQL platform's bulk
/// insert logic. For SQLCE, bulk insert statements do not work so if this is false it will insert one at a time.
/// </param>
/// <param name="commitTrans"></param>
/// <returns>The number of items inserted</returns>
public static int BulkInsertRecords<T>(this Database db,
IEnumerable<T> collection,
Transaction tr,
ISqlSyntaxProvider syntaxProvider,
bool useNativeSqlPlatformBulkInsert = true,
bool commitTrans = false)
{
//don't do anything if there are no records.
if (collection.Any() == false)
{
return 0;
}
var pd = Database.PocoData.ForType(typeof(T));
if (pd == null) throw new InvalidOperationException("Could not find PocoData for " + typeof(T));
try
{
int processed = 0;
var usedNativeSqlPlatformInserts = useNativeSqlPlatformBulkInsert
&& NativeSqlPlatformBulkInsertRecords(db, syntaxProvider, pd, collection, out processed);
if (usedNativeSqlPlatformInserts == false)
{
//if it is sql ce or it is a sql server version less than 2008, we need to do individual inserts.
var sqlServerSyntax = syntaxProvider as SqlServerSyntaxProvider;
if ((sqlServerSyntax != null && (int) sqlServerSyntax.GetVersionName(db) < (int) SqlServerVersionName.V2008)
|| syntaxProvider is SqlCeSyntaxProvider)
{
//SqlCe doesn't support bulk insert statements!
foreach (var poco in collection)
{
db.Insert(poco);
}
}
else
{
//we'll need to generate insert statements instead
string[] sqlStatements;
var cmds = db.GenerateBulkInsertCommand(pd, collection, out sqlStatements);
for (var i = 0; i < sqlStatements.Length; i++)
{
using (var cmd = cmds[i])
{
cmd.CommandText = sqlStatements[i];
cmd.ExecuteNonQuery();
processed++;
}
}
}
}
if (commitTrans)
{
tr.Complete();
}
return processed;
}
catch
{
if (commitTrans)
{
tr.Dispose();
}
throw;
}
}
/// <summary>
/// Performs the bulk insertion in the context of a current transaction with an optional parameter to complete the transaction
/// when finished
@@ -193,64 +291,10 @@ namespace Umbraco.Core.Persistence
/// <param name="collection"></param>
/// <param name="tr"></param>
/// <param name="commitTrans"></param>
[Obsolete("Use the method that specifies an SqlSyntaxContext instance instead")]
public static void BulkInsertRecords<T>(this Database db, IEnumerable<T> collection, Transaction tr, bool commitTrans = false)
{
//TODO: We should change this to use BulkCopy, as an example see:
// https://ayende.com/blog/4137/nhibernate-perf-tricks
// Even though this just generates lots of raw sql INSERT statements BulkCopy is the fastest it can possibly be
// and we should be able to do this using the current connection from the PetaPoco db instance (and would probably be much cleaner)
//
// BulkCopy is available for SQL Server and MySqlBulkLoader is available for MySql, pretty sure BulkCopy works for SQLCE so
// we should be covered and of course could fallback to this method if that is not our database. But we would get huge perf
// increases for this.
//don't do anything if there are no records.
if (collection.Any() == false)
return;
try
{
//if it is sql ce or it is a sql server version less than 2008, we need to do individual inserts.
var sqlServerSyntax = SqlSyntaxContext.SqlSyntaxProvider as SqlServerSyntaxProvider;
if ((sqlServerSyntax != null && (int)sqlServerSyntax.GetVersionName(db) < (int)SqlServerVersionName.V2008)
|| SqlSyntaxContext.SqlSyntaxProvider is SqlCeSyntaxProvider)
{
//SqlCe doesn't support bulk insert statements!
foreach (var poco in collection)
{
db.Insert(poco);
}
}
else
{
string[] sqlStatements;
var cmds = db.GenerateBulkInsertCommand(collection, db.Connection, out sqlStatements);
for (var i = 0; i < sqlStatements.Length; i++)
{
using (var cmd = cmds[i])
{
cmd.CommandText = sqlStatements[i];
cmd.ExecuteNonQuery();
}
}
}
if (commitTrans)
{
tr.Complete();
}
}
catch
{
if (commitTrans)
{
tr.Dispose();
}
throw;
}
db.BulkInsertRecords<T>(collection, tr, SqlSyntaxContext.SqlSyntaxProvider, commitTrans);
}
/// <summary>
@@ -259,8 +303,8 @@ namespace Umbraco.Core.Persistence
/// <typeparam name="T"></typeparam>
/// <param name="db"></param>
/// <param name="collection"></param>
/// <param name="connection"></param>
/// <param name="sql"></param>
/// <param name="pd"></param>
/// <returns>Sql commands with populated command parameters required to execute the sql statement</returns>
/// <remarks>
/// The limits for number of parameters are 2100 (in sql server, I think there's many more allowed in mysql). So
@@ -269,33 +313,24 @@ namespace Umbraco.Core.Persistence
/// that is max. I've reduced it to 2000 anyways.
/// </remarks>
internal static IDbCommand[] GenerateBulkInsertCommand<T>(
this Database db,
IEnumerable<T> collection,
IDbConnection connection,
this Database db,
Database.PocoData pd,
IEnumerable<T> collection,
out string[] sql)
{
//A filter used below a few times to get all columns except result cols and not the primary key if it is auto-incremental
Func<Database.PocoData, KeyValuePair<string, Database.PocoColumn>, bool> includeColumn = (data, column) =>
{
if (column.Value.ResultColumn) return false;
if (data.TableInfo.AutoIncrement && column.Key == data.TableInfo.PrimaryKey) return false;
return true;
};
var pd = Database.PocoData.ForType(typeof(T));
var tableName = db.EscapeTableName(pd.TableInfo.TableName);
//get all columns to include and format for sql
var cols = string.Join(", ",
var cols = string.Join(", ",
pd.Columns
.Where(c => includeColumn(pd, c))
.Where(c => IncludeColumn(pd, c))
.Select(c => tableName + "." + db.EscapeSqlIdentifier(c.Key)).ToArray());
var itemArray = collection.ToArray();
//calculate number of parameters per item
var paramsPerItem = pd.Columns.Count(i => includeColumn(pd, i));
var paramsPerItem = pd.Columns.Count(i => IncludeColumn(pd, i));
//Example calc:
// Given: we have 4168 items in the itemArray, each item contains 8 command parameters (values to be inserterted)
// 2100 / 8 = 262.5
@@ -316,14 +351,14 @@ namespace Umbraco.Core.Persistence
.Skip(tIndex * (int)itemsPerTrans)
.Take((int)itemsPerTrans);
var cmd = db.CreateCommand(connection, "");
var cmd = db.CreateCommand(db.Connection, string.Empty);
var pocoValues = new List<string>();
var index = 0;
foreach (var poco in itemsForTrans)
{
var values = new List<string>();
//get all columns except result cols and not the primary key if it is auto-incremental
foreach (var i in pd.Columns.Where(x => includeColumn(pd, x)))
foreach (var i in pd.Columns.Where(x => IncludeColumn(pd, x)))
{
db.AddParam(cmd, i.Value.GetValue(poco), "@");
values.Add(string.Format("{0}{1}", "@", index++));
@@ -331,14 +366,211 @@ namespace Umbraco.Core.Persistence
pocoValues.Add("(" + string.Join(",", values.ToArray()) + ")");
}
var sqlResult = string.Format("INSERT INTO {0} ({1}) VALUES {2}", tableName, cols, string.Join(", ", pocoValues));
var sqlResult = string.Format("INSERT INTO {0} ({1}) VALUES {2}", tableName, cols, string.Join(", ", pocoValues));
sqlQueries.Add(sqlResult);
commands.Add(cmd);
}
sql = sqlQueries.ToArray();
return commands.ToArray();
return commands.ToArray();
}
/// <summary>
/// A filter used below a few times to get all columns except result cols and not the primary key if it is auto-incremental
/// </summary>
/// <param name="data"></param>
/// <param name="column"></param>
/// <returns></returns>
private static bool IncludeColumn(Database.PocoData data, KeyValuePair<string, Database.PocoColumn> column)
{
if (column.Value.ResultColumn) return false;
if (data.TableInfo.AutoIncrement && column.Key == data.TableInfo.PrimaryKey) return false;
return true;
}
/// <summary>
/// Bulk insert records with Sql BulkCopy or TableDirect or whatever sql platform specific bulk insert records should be used
/// </summary>
/// <param name="db"></param>
/// <param name="syntaxProvider"></param>
/// <param name="pd"></param>
/// <param name="collection"></param>
/// <param name="processed">The number of records inserted</param>
private static bool NativeSqlPlatformBulkInsertRecords<T>(Database db, ISqlSyntaxProvider syntaxProvider, Database.PocoData pd, IEnumerable<T> collection, out int processed)
{
var dbConnection = db.Connection;
//unwrap the profiled connection if there is one
var profiledConnection = dbConnection as ProfiledDbConnection;
if (profiledConnection != null)
{
dbConnection = profiledConnection.InnerConnection;
}
//check if it's SQL or SqlCe
var sqlConnection = dbConnection as SqlConnection;
if (sqlConnection != null)
{
processed = BulkInsertRecordsSqlServer(db, (SqlServerSyntaxProvider)syntaxProvider, pd, collection);
return true;
}
var sqlCeConnection = dbConnection as SqlCeConnection;
if (sqlCeConnection != null)
{
processed = BulkInsertRecordsSqlCe(db, pd, collection);
return true;
}
//could not use the SQL server's specific bulk insert operations
processed = 0;
return false;
}
/// <summary>
/// Logic used to perform bulk inserts with SqlCe's TableDirect
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="db"></param>
/// <param name="pd"></param>
/// <param name="collection"></param>
/// <returns></returns>
internal static int BulkInsertRecordsSqlCe<T>(Database db,
Database.PocoData pd,
IEnumerable<T> collection)
{
var cols = pd.Columns.ToArray();
using (var cmd = db.CreateCommand(db.Connection, string.Empty))
{
cmd.CommandText = pd.TableInfo.TableName;
cmd.CommandType = CommandType.TableDirect;
//cmd.Transaction = GetTypedTransaction<SqlCeTransaction>(db.Connection.);
//get the real command
using (var sqlCeCommand = GetTypedCommand<SqlCeCommand>(cmd))
{
// This seems to cause problems, I think this is primarily used for retrieval, not
// inserting. see: https://msdn.microsoft.com/en-us/library/system.data.sqlserverce.sqlcecommand.indexname%28v=vs.100%29.aspx?f=255&MSPPError=-2147217396
//sqlCeCommand.IndexName = pd.TableInfo.PrimaryKey;
var count = 0;
using (var rs = sqlCeCommand.ExecuteResultSet(ResultSetOptions.Updatable))
{
var rec = rs.CreateRecord();
foreach (var item in collection)
{
for (var i = 0; i < cols.Length; i++)
{
//skip the index if this shouldn't be included (i.e. PK)
if (IncludeColumn(pd, cols[i]))
{
var val = cols[i].Value.GetValue(item);
rec.SetValue(i, val);
}
}
rs.Insert(rec);
count++;
}
}
return count;
}
}
}
/// <summary>
/// Logic used to perform bulk inserts with SqlServer's BulkCopy
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="db"></param>
/// <param name="sqlSyntaxProvider"></param>
/// <param name="pd"></param>
/// <param name="collection"></param>
/// <returns></returns>
internal static int BulkInsertRecordsSqlServer<T>(Database db, SqlServerSyntaxProvider sqlSyntaxProvider,
Database.PocoData pd, IEnumerable<T> collection)
{
//NOTE: We need to use the original db.Connection here to create the command, but we need to pass in the typed
// connection below to the SqlBulkCopy
using (var cmd = db.CreateCommand(db.Connection, string.Empty))
{
using (var copy = new SqlBulkCopy(
GetTypedConnection<SqlConnection>(db.Connection),
SqlBulkCopyOptions.Default,
GetTypedTransaction<SqlTransaction>(cmd.Transaction))
{
BulkCopyTimeout = 10000,
DestinationTableName = pd.TableInfo.TableName
})
{
//var cols = pd.Columns.Where(x => IncludeColumn(pd, x)).Select(x => x.Value).ToArray();
using (var bulkReader = new PocoDataDataReader<T, SqlServerSyntaxProvider>(collection, pd, sqlSyntaxProvider))
{
copy.WriteToServer(bulkReader);
return bulkReader.RecordsAffected;
}
}
}
}
/// <summary>
/// Returns the underlying connection as a typed connection - this is used to unwrap the profiled mini profiler stuff
/// </summary>
/// <typeparam name="TConnection"></typeparam>
/// <param name="connection"></param>
/// <returns></returns>
private static TConnection GetTypedConnection<TConnection>(IDbConnection connection)
where TConnection : class, IDbConnection
{
var profiled = connection as ProfiledDbConnection;
if (profiled != null)
{
return profiled.InnerConnection as TConnection;
}
return connection as TConnection;
}
/// <summary>
/// Returns the underlying connection as a typed connection - this is used to unwrap the profiled mini profiler stuff
/// </summary>
/// <typeparam name="TTransaction"></typeparam>
/// <param name="connection"></param>
/// <returns></returns>
private static TTransaction GetTypedTransaction<TTransaction>(IDbTransaction connection)
where TTransaction : class, IDbTransaction
{
var profiled = connection as ProfiledDbTransaction;
if (profiled != null)
{
return profiled.WrappedTransaction as TTransaction;
}
return connection as TTransaction;
}
/// <summary>
/// Returns the underlying connection as a typed connection - this is used to unwrap the profiled mini profiler stuff
/// </summary>
/// <typeparam name="TCommand"></typeparam>
/// <param name="command"></param>
/// <returns></returns>
private static TCommand GetTypedCommand<TCommand>(IDbCommand command)
where TCommand : class, IDbCommand
{
var profiled = command as ProfiledDbCommand;
if (profiled != null)
{
return profiled.InternalCommand as TCommand;
}
return command as TCommand;
}
[Obsolete("Use the DatabaseSchemaHelper instead")]
@@ -415,8 +647,8 @@ namespace Umbraco.Core.Persistence
return ApplicationContext.Current.DatabaseContext.DatabaseProvider;
}
}
}

View File

@@ -0,0 +1,159 @@
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using Umbraco.Core.Persistence.DatabaseAnnotations;
using Umbraco.Core.Persistence.DatabaseModelDefinitions;
using Umbraco.Core.Persistence.SqlSyntax;
namespace Umbraco.Core.Persistence
{
/// <summary>
/// A data reader used for reading collections of PocoData entity types
/// </summary>
/// <remarks>
/// We are using a custom data reader so that tons of memory is not consumed when rebuilding this table, previously
/// we'd generate SQL insert statements, but we'd have to put all of the XML structures into memory first. Alternatively
/// we can use .net's DataTable, but this also requires putting everything into memory. By using a DataReader we don't have to
/// store every content item and it's XML structure in memory to get it into the DB, we can stream it into the db with this
/// reader.
/// </remarks>
internal class PocoDataDataReader<T, TSyntax> : BulkDataReader
where TSyntax : ISqlSyntaxProvider
{
private readonly MicrosoftSqlSyntaxProviderBase<TSyntax> _sqlSyntaxProvider;
private readonly TableDefinition _tableDefinition;
private readonly Database.PocoColumn[] _readerColumns;
private readonly IEnumerator<T> _enumerator;
private readonly ColumnDefinition[] _columnDefinitions;
private int _recordsAffected = -1;
public PocoDataDataReader(
IEnumerable<T> dataSource,
Database.PocoData pd,
MicrosoftSqlSyntaxProviderBase<TSyntax> sqlSyntaxProvider)
{
if (dataSource == null) throw new ArgumentNullException("dataSource");
if (sqlSyntaxProvider == null) throw new ArgumentNullException("sqlSyntaxProvider");
_tableDefinition = DefinitionFactory.GetTableDefinition(sqlSyntaxProvider, pd.type);
if (_tableDefinition == null) throw new InvalidOperationException("No table definition found for type " + pd.type);
_readerColumns = pd.Columns.Select(x => x.Value).ToArray();
_sqlSyntaxProvider = sqlSyntaxProvider;
_enumerator = dataSource.GetEnumerator();
_columnDefinitions = _tableDefinition.Columns.ToArray();
}
protected override string SchemaName
{
get { return _tableDefinition.SchemaName; }
}
protected override string TableName
{
get { return _tableDefinition.Name; }
}
public override int RecordsAffected
{
get { return _recordsAffected <= 0 ? -1 : _recordsAffected; }
}
/// <summary>
/// This will automatically add the schema rows based on the Poco table definition and the columns passed in
/// </summary>
protected override void AddSchemaTableRows()
{
//var colNames = _readerColumns.Select(x => x.ColumnName).ToArray();
//foreach (var col in _columnDefinitions.Where(x => colNames.Contains(x.Name, StringComparer.OrdinalIgnoreCase)))
foreach (var col in _columnDefinitions)
{
var sqlDbType = SqlDbType.NVarChar;
if (col.HasSpecialDbType)
{
//get the SqlDbType from the 'special type'
switch (col.DbType)
{
case SpecialDbTypes.NTEXT:
sqlDbType = SqlDbType.NText;
break;
case SpecialDbTypes.NCHAR:
sqlDbType = SqlDbType.NChar;
break;
default:
throw new ArgumentOutOfRangeException();
}
}
else if (col.Type.HasValue)
{
//get the SqlDbType from the DbType
sqlDbType = _sqlSyntaxProvider.GetSqlDbType(col.Type.Value);
}
else
{
//get the SqlDbType from the clr type
sqlDbType = _sqlSyntaxProvider.GetSqlDbType(col.PropertyType);
}
AddSchemaTableRow(
col.Name,
col.Size > 0 ? (int?)col.Size : null,
col.Precision > 0 ? (short?)col.Precision : null,
null, col.IsUnique, col.IsIdentity, col.IsNullable, sqlDbType,
null, null, null, null, null);
}
}
/// <summary>
/// Get the value from the column index for the current object
/// </summary>
/// <param name="i"></param>
/// <returns></returns>
public override object GetValue(int i)
{
if (_enumerator.Current != null)
{
return _readerColumns[i].GetValue(_enumerator.Current);
//return _columnDefinitions[i]. .GetValue(_enumerator.Current);
}
return null;
//TODO: Or throw ?
}
/// <summary>
/// Advance the cursor
/// </summary>
/// <returns></returns>
public override bool Read()
{
var result = _enumerator.MoveNext();
if (result)
{
if (_recordsAffected == -1)
{
_recordsAffected = 0;
}
_recordsAffected++;
}
return result;
}
/// <summary>
/// Ensure the enumerator is disposed
/// </summary>
/// <param name="disposing"></param>
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
if (disposing)
{
_enumerator.Dispose();
}
}
}
}

View File

@@ -248,12 +248,12 @@ namespace Umbraco.Core.Persistence.Repositories
var xmlItems = (from descendant in descendants
let xml = serializer(descendant)
select new ContentXmlDto { NodeId = descendant.Id, Xml = xml.ToDataString() }).ToArray();
select new ContentXmlDto { NodeId = descendant.Id, Xml = xml.ToDataString() });
//bulk insert it into the database
Database.BulkInsertRecords(xmlItems, tr);
var count = Database.BulkInsertRecords(xmlItems, tr, SqlSyntax);
processed += xmlItems.Length;
processed += count;
pageIndex++;
} while (processed < total);

View File

@@ -4,6 +4,7 @@ using System.Data;
namespace Umbraco.Core.Persistence.SqlSyntax
{
//TODO: TSyntax should be removed, it's not used/needed here
public class DbTypes<TSyntax>
where TSyntax : ISqlSyntaxProvider
{

View File

@@ -1,4 +1,6 @@
using System;
using System.Data;
using System.Linq;
using Umbraco.Core.Persistence.Querying;
namespace Umbraco.Core.Persistence.SqlSyntax
@@ -133,5 +135,111 @@ namespace Umbraco.Core.Persistence.SqlSyntax
throw new ArgumentOutOfRangeException("columnType");
}
}
/// <summary>
/// This uses a the DbTypeMap created and custom mapping to resolve the SqlDbType
/// </summary>
/// <param name="clrType"></param>
/// <returns></returns>
public virtual SqlDbType GetSqlDbType(Type clrType)
{
var dbType = DbTypeMap.ColumnDbTypeMap.First(x => x.Key == clrType).Value;
return GetSqlDbType(dbType);
}
/// <summary>
/// Returns the mapped SqlDbType for the DbType specified
/// </summary>
/// <param name="dbType"></param>
/// <returns></returns>
public virtual SqlDbType GetSqlDbType(DbType dbType)
{
var sqlDbType = SqlDbType.NVarChar;
//SEE: https://msdn.microsoft.com/en-us/library/cc716729(v=vs.110).aspx
// and https://msdn.microsoft.com/en-us/library/yy6y35y8%28v=vs.110%29.aspx?f=255&MSPPError=-2147217396
switch (dbType)
{
case DbType.AnsiString:
sqlDbType = SqlDbType.VarChar;
break;
case DbType.Binary:
sqlDbType = SqlDbType.VarBinary;
break;
case DbType.Byte:
sqlDbType = SqlDbType.TinyInt;
break;
case DbType.Boolean:
sqlDbType = SqlDbType.Bit;
break;
case DbType.Currency:
sqlDbType = SqlDbType.Money;
break;
case DbType.Date:
sqlDbType = SqlDbType.Date;
break;
case DbType.DateTime:
sqlDbType = SqlDbType.DateTime;
break;
case DbType.Decimal:
sqlDbType = SqlDbType.Decimal;
break;
case DbType.Double:
sqlDbType = SqlDbType.Float;
break;
case DbType.Guid:
sqlDbType = SqlDbType.UniqueIdentifier;
break;
case DbType.Int16:
sqlDbType = SqlDbType.SmallInt;
break;
case DbType.Int32:
sqlDbType = SqlDbType.Int;
break;
case DbType.Int64:
sqlDbType = SqlDbType.BigInt;
break;
case DbType.Object:
sqlDbType = SqlDbType.Variant;
break;
case DbType.SByte:
throw new NotSupportedException("Inferring a SqlDbType from SByte is not supported.");
case DbType.Single:
sqlDbType = SqlDbType.Real;
break;
case DbType.String:
sqlDbType = SqlDbType.NVarChar;
break;
case DbType.Time:
sqlDbType = SqlDbType.Time;
break;
case DbType.UInt16:
throw new NotSupportedException("Inferring a SqlDbType from UInt16 is not supported.");
case DbType.UInt32:
throw new NotSupportedException("Inferring a SqlDbType from UInt32 is not supported.");
case DbType.UInt64:
throw new NotSupportedException("Inferring a SqlDbType from UInt64 is not supported.");
case DbType.VarNumeric:
throw new NotSupportedException("Inferring a VarNumeric from UInt64 is not supported.");
case DbType.AnsiStringFixedLength:
sqlDbType = SqlDbType.Char;
break;
case DbType.StringFixedLength:
sqlDbType = SqlDbType.NChar;
break;
case DbType.Xml:
sqlDbType = SqlDbType.Xml;
break;
case DbType.DateTime2:
sqlDbType = SqlDbType.DateTime2;
break;
case DbType.DateTimeOffset:
sqlDbType = SqlDbType.DateTimeOffset;
break;
default:
throw new ArgumentOutOfRangeException();
}
return sqlDbType;
}
}
}

View File

@@ -415,6 +415,7 @@
<Compile Include="Models\UmbracoDomain.cs" />
<Compile Include="Models\DoNotCloneAttribute.cs" />
<Compile Include="Models\IDomain.cs" />
<Compile Include="Persistence\BulkDataReader.cs" />
<Compile Include="Persistence\DatabaseNodeLockExtensions.cs" />
<Compile Include="Persistence\Factories\ExternalLoginFactory.cs" />
<Compile Include="Persistence\Factories\MigrationEntryFactory.cs" />
@@ -472,6 +473,7 @@
<Compile Include="Media\Exif\TIFFStrip.cs" />
<Compile Include="Media\Exif\Utility.cs" />
<Compile Include="Persistence\Migrations\Upgrades\TargetVersionSevenThreeOne\UpdateUserLanguagesToIsoCode.cs" />
<Compile Include="Persistence\PocoDataDataReader.cs" />
<Compile Include="Persistence\RecordPersistenceType.cs" />
<Compile Include="Persistence\Relators\AccessRulesRelator.cs" />
<Compile Include="Persistence\Repositories\AuditRepository.cs" />

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,198 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using NUnit.Framework;
using Umbraco.Core.Models;
using Umbraco.Core.Persistence;
using Umbraco.Core.Services;
using Umbraco.Tests.Services;
using Umbraco.Tests.TestHelpers;
using Umbraco.Tests.TestHelpers.Entities;
namespace Umbraco.Tests.Persistence
{
[DatabaseTestBehavior(DatabaseBehavior.NewDbFileAndSchemaPerTest)]
[TestFixture, NUnit.Framework.Ignore]
public class PetaPocoCachesTest : BaseServiceTest
{
#if DEBUG
/// <summary>
/// This tests the peta poco caches
/// </summary>
/// <remarks>
/// This test WILL fail. This is because we cannot stop PetaPoco from creating more cached items for queries such as
/// ContentTypeRepository.GetAll(1,2,3,4);
/// when combined with other GetAll queries that pass in an array of Ids, each query generated for different length
/// arrays will produce a unique query which then gets added to the cache.
///
/// This test confirms this, if you analyze the DIFFERENCE output below you can see why the cached queries grow.
/// </remarks>
[Test]
public void Check_Peta_Poco_Caches()
{
var result = new List<Tuple<double, int, IEnumerable<string>>>();
Database.PocoData.UseLongKeys = true;
for (int i = 0; i < 2; i++)
{
int id1, id2, id3;
string alias;
CreateStuff(out id1, out id2, out id3, out alias);
QueryStuff(id1, id2, id3, alias);
double totalBytes1;
IEnumerable<string> keys;
Debug.Print(Database.PocoData.PrintDebugCacheReport(out totalBytes1, out keys));
result.Add(new Tuple<double, int, IEnumerable<string>>(totalBytes1, keys.Count(), keys));
}
for (int index = 0; index < result.Count; index++)
{
var tuple = result[index];
Debug.Print("Bytes: {0}, Delegates: {1}", tuple.Item1, tuple.Item2);
if (index != 0)
{
Debug.Print("----------------DIFFERENCE---------------------");
var diff = tuple.Item3.Except(result[index - 1].Item3);
foreach (var d in diff)
{
Debug.Print(d);
}
}
}
var allByteResults = result.Select(x => x.Item1).Distinct();
var totalKeys = result.Select(x => x.Item2).Distinct();
Assert.AreEqual(1, allByteResults.Count());
Assert.AreEqual(1, totalKeys.Count());
}
[Test]
public void Verify_Memory_Expires()
{
Database.PocoData.SlidingExpirationSeconds = 2;
var managedCache = new Database.ManagedCache();
int id1, id2, id3;
string alias;
CreateStuff(out id1, out id2, out id3, out alias);
QueryStuff(id1, id2, id3, alias);
var count1 = managedCache.GetCache().GetCount();
Debug.Print("Keys = " + count1);
Assert.Greater(count1, 0);
Thread.Sleep(10000);
var count2 = managedCache.GetCache().GetCount();
Debug.Print("Keys = " + count2);
Assert.Less(count2, count1);
}
private void QueryStuff(int id1, int id2, int id3, string alias1)
{
var contentService = ServiceContext.ContentService;
ServiceContext.TagService.GetTagsForEntity(id1);
ServiceContext.TagService.GetAllContentTags();
ServiceContext.TagService.GetTagsForEntity(id2);
ServiceContext.TagService.GetTagsForEntity(id3);
contentService.CountDescendants(id3);
contentService.CountChildren(id3);
contentService.Count(contentTypeAlias: alias1);
contentService.Count();
contentService.GetById(Guid.NewGuid());
contentService.GetByLevel(2);
contentService.GetChildren(id1);
contentService.GetDescendants(id2);
contentService.GetVersions(id3);
contentService.GetRootContent();
contentService.GetContentForExpiration();
contentService.GetContentForRelease();
contentService.GetContentInRecycleBin();
((ContentService)contentService).GetPublishedDescendants(new Content("Test", -1, new ContentType(-1))
{
Id = id1,
Path = "-1," + id1
});
contentService.GetByVersion(Guid.NewGuid());
}
private void CreateStuff(out int id1, out int id2, out int id3, out string alias)
{
var contentService = ServiceContext.ContentService;
var ctAlias = "umbTextpage" + Guid.NewGuid().ToString("N");
alias = ctAlias;
for (int i = 0; i < 20; i++)
{
contentService.CreateContentWithIdentity("Test", -1, "umbTextpage", 0);
}
var contentTypeService = ServiceContext.ContentTypeService;
var contentType = MockedContentTypes.CreateSimpleContentType(ctAlias, "test Doc Type");
contentTypeService.Save(contentType);
for (int i = 0; i < 20; i++)
{
contentService.CreateContentWithIdentity("Test", -1, ctAlias, 0);
}
var parent = contentService.CreateContentWithIdentity("Test", -1, ctAlias, 0);
id1 = parent.Id;
for (int i = 0; i < 20; i++)
{
contentService.CreateContentWithIdentity("Test", parent, ctAlias);
}
IContent current = parent;
for (int i = 0; i < 20; i++)
{
current = contentService.CreateContentWithIdentity("Test", current, ctAlias);
}
contentType = MockedContentTypes.CreateSimpleContentType("umbMandatory" + Guid.NewGuid().ToString("N"), "Mandatory Doc Type", true);
contentType.PropertyGroups.First().PropertyTypes.Add(
new PropertyType("test", DataTypeDatabaseType.Ntext, "tags")
{
DataTypeDefinitionId = 1041
});
contentTypeService.Save(contentType);
var content1 = MockedContent.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.SetTags("tags", new[] { "hello", "world", "some", "tags" }, true);
contentService.Publish(content1);
id2 = content1.Id;
var content2 = MockedContent.CreateSimpleContent(contentType, "Tagged content 2", -1);
content2.SetTags("tags", new[] { "hello", "world", "some", "tags" }, true);
contentService.Publish(content2);
id3 = content2.Id;
contentService.MoveToRecycleBin(content1);
}
#endif
}
}

View File

@@ -1,201 +1,16 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading;
using NUnit.Framework;
using Umbraco.Core;
using Umbraco.Core.Models;
using Umbraco.Core.Logging;
using Umbraco.Core.Models.Rdbms;
using Umbraco.Core.Persistence;
using Umbraco.Core.Services;
using Umbraco.Tests.Services;
using Umbraco.Core.Persistence.SqlSyntax;
using Umbraco.Tests.TestHelpers;
using Umbraco.Tests.TestHelpers.Entities;
namespace Umbraco.Tests.Persistence
{
[DatabaseTestBehavior(DatabaseBehavior.NewDbFileAndSchemaPerTest)]
[TestFixture, NUnit.Framework.Ignore]
public class PetaPocoCachesTest : BaseServiceTest
{
/// <summary>
/// This tests the peta poco caches
/// </summary>
/// <remarks>
/// This test WILL fail. This is because we cannot stop PetaPoco from creating more cached items for queries such as
/// ContentTypeRepository.GetAll(1,2,3,4);
/// when combined with other GetAll queries that pass in an array of Ids, each query generated for different length
/// arrays will produce a unique query which then gets added to the cache.
///
/// This test confirms this, if you analyze the DIFFERENCE output below you can see why the cached queries grow.
/// </remarks>
[Test]
public void Check_Peta_Poco_Caches()
{
var result = new List<Tuple<double, int, IEnumerable<string>>>();
Database.PocoData.UseLongKeys = true;
for (int i = 0; i < 2; i++)
{
int id1, id2, id3;
string alias;
CreateStuff(out id1, out id2, out id3, out alias);
QueryStuff(id1, id2, id3, alias);
double totalBytes1;
IEnumerable<string> keys;
Debug.Print(Database.PocoData.PrintDebugCacheReport(out totalBytes1, out keys));
result.Add(new Tuple<double, int, IEnumerable<string>>(totalBytes1, keys.Count(), keys));
}
for (int index = 0; index < result.Count; index++)
{
var tuple = result[index];
Debug.Print("Bytes: {0}, Delegates: {1}", tuple.Item1, tuple.Item2);
if (index != 0)
{
Debug.Print("----------------DIFFERENCE---------------------");
var diff = tuple.Item3.Except(result[index - 1].Item3);
foreach (var d in diff)
{
Debug.Print(d);
}
}
}
var allByteResults = result.Select(x => x.Item1).Distinct();
var totalKeys = result.Select(x => x.Item2).Distinct();
Assert.AreEqual(1, allByteResults.Count());
Assert.AreEqual(1, totalKeys.Count());
}
[Test]
public void Verify_Memory_Expires()
{
Database.PocoData.SlidingExpirationSeconds = 2;
var managedCache = new Database.ManagedCache();
int id1, id2, id3;
string alias;
CreateStuff(out id1, out id2, out id3, out alias);
QueryStuff(id1, id2, id3, alias);
var count1 = managedCache.GetCache().GetCount();
Debug.Print("Keys = " + count1);
Assert.Greater(count1, 0);
Thread.Sleep(10000);
var count2 = managedCache.GetCache().GetCount();
Debug.Print("Keys = " + count2);
Assert.Less(count2, count1);
}
private void QueryStuff(int id1, int id2, int id3, string alias1)
{
var contentService = ServiceContext.ContentService;
ServiceContext.TagService.GetTagsForEntity(id1);
ServiceContext.TagService.GetAllContentTags();
ServiceContext.TagService.GetTagsForEntity(id2);
ServiceContext.TagService.GetTagsForEntity(id3);
contentService.CountDescendants(id3);
contentService.CountChildren(id3);
contentService.Count(contentTypeAlias: alias1);
contentService.Count();
contentService.GetById(Guid.NewGuid());
contentService.GetByLevel(2);
contentService.GetChildren(id1);
contentService.GetDescendants(id2);
contentService.GetVersions(id3);
contentService.GetRootContent();
contentService.GetContentForExpiration();
contentService.GetContentForRelease();
contentService.GetContentInRecycleBin();
((ContentService)contentService).GetPublishedDescendants(new Content("Test", -1, new ContentType(-1))
{
Id = id1,
Path = "-1," + id1
});
contentService.GetByVersion(Guid.NewGuid());
}
private void CreateStuff(out int id1, out int id2, out int id3, out string alias)
{
var contentService = ServiceContext.ContentService;
var ctAlias = "umbTextpage" + Guid.NewGuid().ToString("N");
alias = ctAlias;
for (int i = 0; i < 20; i++)
{
contentService.CreateContentWithIdentity("Test", -1, "umbTextpage", 0);
}
var contentTypeService = ServiceContext.ContentTypeService;
var contentType = MockedContentTypes.CreateSimpleContentType(ctAlias, "test Doc Type");
contentTypeService.Save(contentType);
for (int i = 0; i < 20; i++)
{
contentService.CreateContentWithIdentity("Test", -1, ctAlias, 0);
}
var parent = contentService.CreateContentWithIdentity("Test", -1, ctAlias, 0);
id1 = parent.Id;
for (int i = 0; i < 20; i++)
{
contentService.CreateContentWithIdentity("Test", parent, ctAlias);
}
IContent current = parent;
for (int i = 0; i < 20; i++)
{
current = contentService.CreateContentWithIdentity("Test", current, ctAlias);
}
contentType = MockedContentTypes.CreateSimpleContentType("umbMandatory" + Guid.NewGuid().ToString("N"), "Mandatory Doc Type", true);
contentType.PropertyGroups.First().PropertyTypes.Add(
new PropertyType("test", DataTypeDatabaseType.Ntext, "tags")
{
DataTypeDefinitionId = 1041
});
contentTypeService.Save(contentType);
var content1 = MockedContent.CreateSimpleContent(contentType, "Tagged content 1", -1);
content1.SetTags("tags", new[] { "hello", "world", "some", "tags" }, true);
contentService.Publish(content1);
id2 = content1.Id;
var content2 = MockedContent.CreateSimpleContent(contentType, "Tagged content 2", -1);
content2.SetTags("tags", new[] { "hello", "world", "some", "tags" }, true);
contentService.Publish(content2);
id3 = content2.Id;
contentService.MoveToRecycleBin(content1);
}
}
[DatabaseTestBehavior(DatabaseBehavior.NewDbFileAndSchemaPerTest)]
[TestFixture]
public class PetaPocoExtensionsTest : BaseDatabaseFactoryTest
@@ -213,7 +28,7 @@ namespace Umbraco.Tests.Persistence
}
[Test]
public void Can_Bulk_Insert()
public void Can_Bulk_Insert_One_By_One()
{
// Arrange
var db = DatabaseContext.Database;
@@ -234,13 +49,168 @@ namespace Umbraco.Tests.Persistence
// Act
using (ProfilingLogger.TraceDuration<PetaPocoExtensionsTest>("starting insert", "finished insert"))
{
db.BulkInsertRecords(servers);
using (var tr = db.GetTransaction())
{
db.BulkInsertRecords(servers, tr, SqlSyntax, useNativeSqlPlatformBulkInsert:false);
tr.Complete();
}
}
// Assert
Assert.That(db.ExecuteScalar<int>("SELECT COUNT(*) FROM umbracoServer"), Is.EqualTo(1000));
}
[Test]
public void Can_Bulk_Insert_One_By_One_Transaction_Rollback()
{
// Arrange
var db = DatabaseContext.Database;
var servers = new List<ServerRegistrationDto>();
for (var i = 0; i < 1000; i++)
{
servers.Add(new ServerRegistrationDto
{
ServerAddress = "address" + i,
ServerIdentity = "computer" + i,
DateRegistered = DateTime.Now,
IsActive = true,
DateAccessed = DateTime.Now
});
}
// Act
using (ProfilingLogger.TraceDuration<PetaPocoExtensionsTest>("starting insert", "finished insert"))
{
using (var tr = db.GetTransaction())
{
db.BulkInsertRecords(servers, tr, SqlSyntax, useNativeSqlPlatformBulkInsert: false);
//don't call complete here - the trans will be rolled back
}
}
// Assert
Assert.That(db.ExecuteScalar<int>("SELECT COUNT(*) FROM umbracoServer"), Is.EqualTo(0));
}
[NUnit.Framework.Ignore("Ignored because you need to configure your own SQL Server to test thsi with")]
[Test]
public void Can_Bulk_Insert_Native_Sql_Server_Bulk_Inserts()
{
//create the db
var dbSqlServer = new UmbracoDatabase(
"server=.\\SQLExpress;database=YOURDB;user id=YOURUSER;password=YOURPASSWORD",
Constants.DatabaseProviders.SqlServer,
new DebugDiagnosticsLogger());
//drop the table
dbSqlServer.Execute("DROP TABLE [umbracoServer]");
//re-create it
dbSqlServer.Execute(@"CREATE TABLE [umbracoServer](
[id] [int] IDENTITY(1,1) NOT NULL,
[address] [nvarchar](500) NOT NULL,
[computerName] [nvarchar](255) NOT NULL,
[registeredDate] [datetime] NOT NULL CONSTRAINT [DF_umbracoServer_registeredDate] DEFAULT (getdate()),
[lastNotifiedDate] [datetime] NOT NULL,
[isActive] [bit] NOT NULL,
[isMaster] [bit] NOT NULL,
CONSTRAINT [PK_umbracoServer] PRIMARY KEY CLUSTERED
(
[id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
)");
var data = new List<ServerRegistrationDto>();
for (var i = 0; i < 1000; i++)
{
data.Add(new ServerRegistrationDto
{
ServerAddress = "address" + i,
ServerIdentity = "computer" + i,
DateRegistered = DateTime.Now,
IsActive = true,
DateAccessed = DateTime.Now
});
}
var sqlServerSyntax = new SqlServerSyntaxProvider();
using (var tr = dbSqlServer.GetTransaction())
{
dbSqlServer.BulkInsertRecords(data, tr, sqlServerSyntax, useNativeSqlPlatformBulkInsert: true);
tr.Complete();
}
// Assert
Assert.That(dbSqlServer.ExecuteScalar<int>("SELECT COUNT(*) FROM umbracoServer"), Is.EqualTo(1000));
}
[Test]
public void Can_Bulk_Insert_Native_Sql_Bulk_Inserts()
{
// Arrange
var db = DatabaseContext.Database;
var servers = new List<ServerRegistrationDto>();
for (var i = 0; i < 1000; i++)
{
servers.Add(new ServerRegistrationDto
{
ServerAddress = "address" + i,
ServerIdentity = "computer" + i,
DateRegistered = DateTime.Now,
IsActive = true,
DateAccessed = DateTime.Now
});
}
// Act
using (ProfilingLogger.TraceDuration<PetaPocoExtensionsTest>("starting insert", "finished insert"))
{
using (var tr = db.GetTransaction())
{
db.BulkInsertRecords(servers, tr, SqlSyntax, useNativeSqlPlatformBulkInsert: true);
tr.Complete();
}
}
// Assert
Assert.That(db.ExecuteScalar<int>("SELECT COUNT(*) FROM umbracoServer"), Is.EqualTo(1000));
}
[Test]
public void Can_Bulk_Insert_Native_Sql_Bulk_Inserts_Transaction_Rollback()
{
// Arrange
var db = DatabaseContext.Database;
var servers = new List<ServerRegistrationDto>();
for (var i = 0; i < 1000; i++)
{
servers.Add(new ServerRegistrationDto
{
ServerAddress = "address" + i,
ServerIdentity = "computer" + i,
DateRegistered = DateTime.Now,
IsActive = true,
DateAccessed = DateTime.Now
});
}
// Act
using (ProfilingLogger.TraceDuration<PetaPocoExtensionsTest>("starting insert", "finished insert"))
{
using (var tr = db.GetTransaction())
{
db.BulkInsertRecords(servers, tr, SqlSyntax, useNativeSqlPlatformBulkInsert: true);
//don't call complete here - the trans will be rolled back
}
}
// Assert
Assert.That(db.ExecuteScalar<int>("SELECT COUNT(*) FROM umbracoServer"), Is.EqualTo(0));
}
[Test]
public void Generate_Bulk_Import_Sql()
{
@@ -263,7 +233,9 @@ namespace Umbraco.Tests.Persistence
// Act
string[] sql;
db.GenerateBulkInsertCommand(servers, db.Connection, out sql);
db.GenerateBulkInsertCommand(
Database.PocoData.ForType(typeof(ServerRegistrationDto)),
servers, out sql);
db.CloseSharedConnection();
// Assert
@@ -295,7 +267,7 @@ namespace Umbraco.Tests.Persistence
// Act
string[] sql;
db.GenerateBulkInsertCommand(servers, db.Connection, out sql);
db.GenerateBulkInsertCommand(Database.PocoData.ForType(typeof(ServerRegistrationDto)), servers, out sql);
db.CloseSharedConnection();
// Assert

View File

@@ -28,7 +28,7 @@ namespace Umbraco.Tests.Persistence.SyntaxProvider
public void Can_Generate_Create_Table_Statement()
{
var type = typeof(TagRelationshipDto);
var definition = DefinitionFactory.GetTableDefinition(type);
var definition = DefinitionFactory.GetTableDefinition(SqlSyntaxContext.SqlSyntaxProvider, type);
string create = SqlSyntaxContext.SqlSyntaxProvider.Format(definition);
string primaryKey = SqlSyntaxContext.SqlSyntaxProvider.FormatPrimaryKey(definition);

View File

@@ -51,7 +51,7 @@ WHERE (([umbracoNode].[nodeObjectType] = @0))) x)".Replace(Environment.NewLine,
var sqlSyntax = new SqlCeSyntaxProvider();
var type = typeof (NodeDto);
var definition = DefinitionFactory.GetTableDefinition(type);
var definition = DefinitionFactory.GetTableDefinition(sqlSyntax, type);
string create = sqlSyntax.Format(definition);
string primaryKey = sqlSyntax.FormatPrimaryKey(definition);

View File

@@ -176,7 +176,9 @@
</ItemGroup>
<ItemGroup>
<Compile Include="Migrations\MigrationIssuesTests.cs" />
<Compile Include="Persistence\BulkDataReaderTests.cs" />
<Compile Include="Persistence\Migrations\MigrationStartupHandlerTests.cs" />
<Compile Include="Persistence\PetaPocoCachesTest.cs" />
<Compile Include="Persistence\PetaPocoExpressionsTests.cs" />
<Compile Include="Persistence\Repositories\RedirectUrlRepositoryTests.cs" />
<Compile Include="TestHelpers\Entities\MockedPropertyTypes.cs" />