diff --git a/src/Umbraco.Core/DatabaseContext.cs b/src/Umbraco.Core/DatabaseContext.cs index 05aba6b97d..585f1d51cf 100644 --- a/src/Umbraco.Core/DatabaseContext.cs +++ b/src/Umbraco.Core/DatabaseContext.cs @@ -174,7 +174,7 @@ namespace Umbraco.Core /// public void ConfigureEmbeddedDatabaseConnection() { - const string providerName = "System.Data.SqlServerCe.4.0"; + const string providerName = Constants.DatabaseProviders.SqlCe; var connectionString = GetEmbeddedDatabaseConnectionString(); SaveConnectionString(connectionString, providerName); diff --git a/src/Umbraco.Core/Persistence/BulkDataReader.cs b/src/Umbraco.Core/Persistence/BulkDataReader.cs new file mode 100644 index 0000000000..8df4dd536e --- /dev/null +++ b/src/Umbraco.Core/Persistence/BulkDataReader.cs @@ -0,0 +1,1511 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Data; +using System.Data.Common; +using System.Data.SqlClient; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace Umbraco.Core.Persistence +{ + /// + /// A base implementation of that is suitable for . + /// + /// + /// + /// Borrowed from Microsoft: + /// See: https://blogs.msdn.microsoft.com/anthonybloesch/2013/01/23/bulk-loading-data-with-idatareader-and-sqlbulkcopy/ + /// + /// This implementation is designed to be very memory efficient requiring few memory resources and to support + /// rapid transfer of data to SQL Server. + /// + /// Subclasses should implement , , + /// , , . + /// If they contain disposable resources they should override . + /// + /// SD: Alternatively, we could have used a LinqEntityDataReader which is nicer to use but it uses quite a lot of reflection and + /// I thought this would just be quicker. + /// Simple example of that: https://github.com/gridsum/DataflowEx/blob/master/Gridsum.DataflowEx/Databases/BulkDataReader.cs + /// Full example of that: https://github.com/matthewschrager/Repository/blob/master/Repository.EntityFramework/EntityDataReader.cs + /// So we know where to find that if we ever need it, these would convert any Linq data source to an IDataReader + /// + /// + internal abstract class BulkDataReader : IDataReader + { + + #region Fields + + /// + /// The containing the input row set's schema information + /// requires to function correctly. + /// + private DataTable _schemaTable = new DataTable(); + + /// + /// The mapping from the row set input to the target table's columns. + /// + private List _columnMappings = new List(); + + #endregion + + #region Subclass utility routines + + /// + /// The mapping from the row set input to the target table's columns. + /// + /// + /// If necessary, will be called to initialize the mapping. + /// + public ReadOnlyCollection ColumnMappings + { + get + { + if (this._columnMappings.Count == 0) + { + // Need to add the column definitions and mappings. + AddSchemaTableRows(); + + if (this._columnMappings.Count == 0) + { + throw new InvalidOperationException("AddSchemaTableRows did not add rows."); + } + + Debug.Assert(this._schemaTable.Rows.Count == FieldCount); + } + + return new ReadOnlyCollection(_columnMappings); + } + } + + /// + /// The name of the input row set's schema. + /// + /// + /// This may be different from the target schema but usually they are identical. + /// + protected abstract string SchemaName + { + get; + } + + /// + /// The name of the input row set's table. + /// + /// + /// This may be different from the target table but usually they are identical. + /// + protected abstract string TableName + { + get; + } + + /// + /// Adds the input row set's schema to the object. + /// + /// + /// Call + /// to do this for each row. + /// + /// + protected abstract void AddSchemaTableRows(); + + /// + /// For each , the optional columns that may have values. + /// + /// + /// This is used for checking the parameters of . + /// + /// + private static readonly Dictionary> AllowedOptionalColumnCombinations = new Dictionary> + { + { SqlDbType.BigInt, new List { } }, + { SqlDbType.Binary, new List { SchemaTableColumn.ColumnSize } }, + { SqlDbType.Bit, new List { } }, + { SqlDbType.Char, new List { SchemaTableColumn.ColumnSize } }, + { SqlDbType.Date, new List { } }, + { SqlDbType.DateTime, new List { } }, + { SqlDbType.DateTime2, new List { SchemaTableColumn.NumericPrecision } }, + { SqlDbType.DateTimeOffset, new List { SchemaTableColumn.NumericPrecision } }, + { SqlDbType.Decimal, new List { SchemaTableColumn.NumericPrecision, SchemaTableColumn.NumericScale } }, + { SqlDbType.Float, new List { SchemaTableColumn.NumericPrecision, SchemaTableColumn.NumericScale } }, + { SqlDbType.Image, new List { } }, + { SqlDbType.Int, new List { } }, + { SqlDbType.Money, new List { } }, + { SqlDbType.NChar, new List { SchemaTableColumn.ColumnSize } }, + { SqlDbType.NText, new List { } }, + { SqlDbType.NVarChar, new List { SchemaTableColumn.ColumnSize } }, + { SqlDbType.Real, new List { } }, + { SqlDbType.SmallDateTime, new List { } }, + { SqlDbType.SmallInt, new List { } }, + { SqlDbType.SmallMoney, new List { } }, + { SqlDbType.Structured, new List { } }, + { SqlDbType.Text, new List { } }, + { SqlDbType.Time, new List { SchemaTableColumn.NumericPrecision } }, + { SqlDbType.Timestamp, new List { } }, + { SqlDbType.TinyInt, new List { } }, + { SqlDbType.Udt, new List { BulkDataReader.DataTypeNameSchemaColumn } }, + { SqlDbType.UniqueIdentifier, new List { } }, + { SqlDbType.VarBinary, new List { SchemaTableColumn.ColumnSize } }, + { SqlDbType.VarChar, new List { SchemaTableColumn.ColumnSize } }, + { SqlDbType.Variant, new List { } }, + { SqlDbType.Xml, new List { BulkDataReader.XmlSchemaCollectionDatabaseSchemaColumn, BulkDataReader.XmlSchemaCollectionOwningSchemaSchemaColumn, BulkDataReader.XmlSchemaCollectionNameSchemaColumn } } + }; + + /// + /// A helper method to support . + /// + /// + /// This methds does extensive argument checks. These errors will cause hard to diagnose exceptions in latter + /// processing so it is important to detect them when they can be easily associated with the code defect. + /// + /// + /// The combination of values for the parameters is not supported. + /// + /// + /// A null value for the parameter is not supported. + /// + /// + /// The name of the column. + /// + /// + /// The size of the column which may be null if not applicable. + /// + /// + /// The precision of the column which may be null if not applicable. + /// + /// + /// The scale of the column which may be null if not applicable. + /// + /// + /// Are the column values unique (i.e. never duplicated)? + /// + /// + /// Is the column part of the primary key? + /// + /// + /// Is the column nullable (i.e. optional)? + /// + /// + /// The corresponding . + /// + /// + /// The schema name of the UDT. + /// + /// + /// The type name of the UDT. + /// + /// + /// For XML columns the schema collection's database name. Otherwise, null. + /// + /// + /// For XML columns the schema collection's schema name. Otherwise, null. + /// + /// + /// For XML columns the schema collection's name. Otherwise, null. + /// + /// + protected void AddSchemaTableRow(string columnName, + int? columnSize, + short? numericPrecision, + short? numericScale, + bool isUnique, + bool isKey, + bool allowDbNull, + SqlDbType providerType, + string udtSchema, + string udtType, + string xmlSchemaCollectionDatabase, + string xmlSchemaCollectionOwningSchema, + string xmlSchemaCollectionName) + { + if (string.IsNullOrEmpty(columnName)) + { + throw new ArgumentException("columnName must be a nonempty string."); + } + else if (columnSize.HasValue && columnSize.Value <= 0) + { + throw new ArgumentOutOfRangeException("columnSize"); + } + else if (numericPrecision.HasValue && numericPrecision.Value <= 0) + { + throw new ArgumentOutOfRangeException("numericPrecision"); + } + else if (numericScale.HasValue && numericScale.Value < 0) + { + throw new ArgumentOutOfRangeException("columnSize"); + } + + List allowedOptionalColumnList; + + if (BulkDataReader.AllowedOptionalColumnCombinations.TryGetValue(providerType, out allowedOptionalColumnList)) + { + if ((columnSize.HasValue && !allowedOptionalColumnList.Contains(SchemaTableColumn.ColumnSize)) || + (numericPrecision.HasValue && !allowedOptionalColumnList.Contains(SchemaTableColumn.NumericPrecision)) || + (numericScale.HasValue && !allowedOptionalColumnList.Contains(SchemaTableColumn.NumericScale)) || + (udtSchema != null && !allowedOptionalColumnList.Contains(BulkDataReader.DataTypeNameSchemaColumn)) || + (udtType != null && !allowedOptionalColumnList.Contains(BulkDataReader.DataTypeNameSchemaColumn)) || + (xmlSchemaCollectionDatabase != null && !allowedOptionalColumnList.Contains(BulkDataReader.XmlSchemaCollectionDatabaseSchemaColumn)) || + (xmlSchemaCollectionOwningSchema != null && !allowedOptionalColumnList.Contains(BulkDataReader.XmlSchemaCollectionOwningSchemaSchemaColumn)) || + (xmlSchemaCollectionName != null && !allowedOptionalColumnList.Contains(BulkDataReader.XmlSchemaCollectionNameSchemaColumn))) + { + throw new ArgumentException("Columns are set that are incompatible with the value of providerType."); + } + } + else + { + throw new ArgumentException("providerType is unsupported."); + } + + Type dataType; // Corresponding CLR type. + string dataTypeName; // Corresponding SQL Server type. + bool isLong = false; // Is the column a large value column (e.g. nvarchar(max))? + + switch (providerType) + { + case SqlDbType.BigInt: + dataType = typeof(long); + dataTypeName = "bigint"; + break; + + case SqlDbType.Binary: + dataType = typeof(byte[]); + + if (!columnSize.HasValue) + { + throw new ArgumentException("columnSize must be specified for \"binary\" type columns."); + } + else if (columnSize > 8000) + { + throw new ArgumentOutOfRangeException("columnSize"); + } + + dataTypeName = string.Format(CultureInfo.InvariantCulture, + "binary({0})", + columnSize.Value); + break; + + case SqlDbType.Bit: + dataType = typeof(bool); + dataTypeName = "bit"; + break; + + case SqlDbType.Char: + dataType = typeof(string); + + if (!columnSize.HasValue) + { + throw new ArgumentException("columnSize must be specified for \"char\" type columns."); + } + else if (columnSize > 8000) + { + throw new ArgumentOutOfRangeException("columnSize"); + } + + dataTypeName = string.Format(CultureInfo.InvariantCulture, + "char({0})", + columnSize.Value); + break; + + case SqlDbType.Date: + dataType = typeof(DateTime); + dataTypeName = "date"; + break; + + case SqlDbType.DateTime: + dataType = typeof(DateTime); + dataTypeName = "datetime"; + break; + + case SqlDbType.DateTime2: + dataType = typeof(DateTime); + + if (numericPrecision.HasValue) + { + if (numericPrecision.Value > 7) + { + throw new ArgumentOutOfRangeException("numericPrecision"); + } + + dataTypeName = string.Format(CultureInfo.InvariantCulture, + "datetime2({0})", + numericPrecision.Value); + } + else + { + dataTypeName = "datetime2"; + } + break; + + case SqlDbType.DateTimeOffset: + dataType = typeof(DateTimeOffset); + + if (numericPrecision.HasValue) + { + if (numericPrecision.Value > 7) + { + throw new ArgumentOutOfRangeException("numericPrecision"); + } + + dataTypeName = string.Format(CultureInfo.InvariantCulture, + "datetimeoffset({0})", + numericPrecision.Value); + } + else + { + dataTypeName = "datetimeoffset"; + } + break; + + case SqlDbType.Decimal: + dataType = typeof(decimal); + + if (!numericPrecision.HasValue || !numericScale.HasValue) + { + throw new ArgumentException("numericPrecision and numericScale must be specified for \"decimal\" type columns."); + } + else if (numericPrecision > 38) + { + throw new ArgumentOutOfRangeException("numericPrecision"); + } + else if (numericScale.Value > numericPrecision.Value) + { + throw new ArgumentException("numericScale must not be larger than numericPrecision for \"decimal\" type columns."); + } + + dataTypeName = string.Format(CultureInfo.InvariantCulture, + "decimal({0}, {1})", + numericPrecision.Value, + numericScale.Value); + break; + + case SqlDbType.Float: + dataType = typeof(double); + + if (!numericPrecision.HasValue) + { + throw new ArgumentException("numericPrecision must be specified for \"float\" type columns"); + } + else if (numericPrecision > 53) + { + throw new ArgumentOutOfRangeException("numericPrecision"); + } + + dataTypeName = string.Format(CultureInfo.InvariantCulture, + "float({0})", + numericPrecision.Value); + break; + + case SqlDbType.Image: + dataType = typeof(byte[]); + dataTypeName = "image"; + break; + + case SqlDbType.Int: + dataType = typeof(int); + dataTypeName = "int"; + break; + + case SqlDbType.Money: + dataType = typeof(decimal); + dataTypeName = "money"; + break; + + case SqlDbType.NChar: + dataType = typeof(string); + + if (!columnSize.HasValue) + { + throw new ArgumentException("columnSize must be specified for \"nchar\" type columns"); + } + else if (columnSize > 4000) + { + throw new ArgumentOutOfRangeException("columnSize"); + } + + dataTypeName = string.Format(CultureInfo.InvariantCulture, + "nchar({0})", + columnSize.Value); + break; + + case SqlDbType.NText: + dataType = typeof(string); + dataTypeName = "ntext"; + break; + + case SqlDbType.NVarChar: + dataType = typeof(string); + + if (columnSize.HasValue) + { + if (columnSize > 4000) + { + throw new ArgumentOutOfRangeException("columnSize"); + } + + dataTypeName = string.Format(CultureInfo.InvariantCulture, + "nvarchar({0})", + columnSize.Value); + } + else + { + isLong = true; + + dataTypeName = "nvarchar(max)"; + } + break; + + case SqlDbType.Real: + dataType = typeof(float); + dataTypeName = "real"; + break; + + case SqlDbType.SmallDateTime: + dataType = typeof(DateTime); + dataTypeName = "smalldatetime"; + break; + + case SqlDbType.SmallInt: + dataType = typeof(Int16); + dataTypeName = "smallint"; + break; + + case SqlDbType.SmallMoney: + dataType = typeof(decimal); + dataTypeName = "smallmoney"; + break; + + // SqlDbType.Structured not supported because it related to nested rowsets. + + case SqlDbType.Text: + dataType = typeof(string); + dataTypeName = "text"; + break; + + case SqlDbType.Time: + dataType = typeof(TimeSpan); + + if (numericPrecision.HasValue) + { + if (numericPrecision > 7) + { + throw new ArgumentOutOfRangeException("numericPrecision"); + } + + dataTypeName = string.Format(CultureInfo.InvariantCulture, + "time({0})", + numericPrecision.Value); + } + else + { + dataTypeName = "time"; + } + break; + + + // SqlDbType.Timestamp not supported because rowversions are not settable. + + case SqlDbType.TinyInt: + dataType = typeof(byte); + dataTypeName = "tinyint"; + break; + + case SqlDbType.Udt: + if (string.IsNullOrEmpty(udtSchema)) + { + throw new ArgumentException("udtSchema must be nonnull and nonempty for \"UDT\" columns."); + } + else if (string.IsNullOrEmpty(udtType)) + { + throw new ArgumentException("udtType must be nonnull and nonempty for \"UDT\" columns."); + } + + dataType = typeof(object); + using (SqlCommandBuilder commandBuilder = new SqlCommandBuilder()) + { + dataTypeName = commandBuilder.QuoteIdentifier(udtSchema) + "." + commandBuilder.QuoteIdentifier(udtType); + } + break; + + case SqlDbType.UniqueIdentifier: + dataType = typeof(Guid); + dataTypeName = "uniqueidentifier"; + break; + + case SqlDbType.VarBinary: + dataType = typeof(byte[]); + + if (columnSize.HasValue) + { + if (columnSize > 8000) + { + throw new ArgumentOutOfRangeException("columnSize"); + } + + dataTypeName = string.Format(CultureInfo.InvariantCulture, + "varbinary({0})", + columnSize.Value); + } + else + { + isLong = true; + + dataTypeName = "varbinary(max)"; + } + break; + + case SqlDbType.VarChar: + dataType = typeof(string); + + if (columnSize.HasValue) + { + if (columnSize > 8000) + { + throw new ArgumentOutOfRangeException("columnSize"); + } + + dataTypeName = string.Format(CultureInfo.InvariantCulture, + "varchar({0})", + columnSize.Value); + } + else + { + isLong = true; + + dataTypeName = "varchar(max)"; + } + break; + + case SqlDbType.Variant: + dataType = typeof(object); + dataTypeName = "sql_variant"; + break; + + case SqlDbType.Xml: + dataType = typeof(string); + + if (xmlSchemaCollectionName == null) + { + if (xmlSchemaCollectionDatabase != null || xmlSchemaCollectionOwningSchema != null) + { + throw new ArgumentException("xmlSchemaCollectionDatabase and xmlSchemaCollectionOwningSchema must be null if xmlSchemaCollectionName is null for \"xml\" columns."); + } + + dataTypeName = "xml"; + } + else + { + if (xmlSchemaCollectionName.Length == 0) + { + throw new ArgumentException("xmlSchemaCollectionName must be nonempty or null for \"xml\" columns."); + } + else if (xmlSchemaCollectionDatabase != null && + xmlSchemaCollectionDatabase.Length == 0) + { + throw new ArgumentException("xmlSchemaCollectionDatabase must be null or nonempty for \"xml\" columns."); + } + else if (xmlSchemaCollectionOwningSchema != null && + xmlSchemaCollectionOwningSchema.Length == 0) + { + throw new ArgumentException("xmlSchemaCollectionOwningSchema must be null or nonempty for \"xml\" columns."); + } + + System.Text.StringBuilder schemaCollection = new System.Text.StringBuilder("xml("); + + if (xmlSchemaCollectionDatabase != null) + { + schemaCollection.Append("[" + xmlSchemaCollectionDatabase + "]"); + } + + schemaCollection.Append("[" + (xmlSchemaCollectionOwningSchema == null ? SchemaName : xmlSchemaCollectionOwningSchema) + "]"); + schemaCollection.Append("[" + xmlSchemaCollectionName + "]"); + + dataTypeName = schemaCollection.ToString(); + } + break; + + default: + throw new ArgumentOutOfRangeException("providerType"); + + } + + this._schemaTable.Rows.Add(columnName, + _schemaTable.Rows.Count, + columnSize, + numericPrecision, + numericScale, + isUnique, + isKey, + "TraceServer", + "TraceWarehouse", + columnName, + SchemaName, + TableName, + dataType, + allowDbNull, + providerType, + false, // isAliased + false, // isExpression + false, // isIdentity, + false, // isAutoIncrement, + false, // isRowVersion, + false, // isHidden, + isLong, + true, // isReadOnly, + dataType, + dataTypeName, + xmlSchemaCollectionDatabase, + xmlSchemaCollectionOwningSchema, + xmlSchemaCollectionName); + + this._columnMappings.Add(new SqlBulkCopyColumnMapping(columnName, columnName)); + } + + #endregion + + #region Constructors + + private const string IsIdentitySchemaColumn = "IsIdentity"; + + private const string DataTypeNameSchemaColumn = "DataTypeName"; + + private const string XmlSchemaCollectionDatabaseSchemaColumn = "XmlSchemaCollectionDatabase"; + + private const string XmlSchemaCollectionOwningSchemaSchemaColumn = "XmlSchemaCollectionOwningSchema"; + + private const string XmlSchemaCollectionNameSchemaColumn = "XmlSchemaCollectionName"; + + /// + /// Constructor. + /// + protected BulkDataReader() + { + this._schemaTable.Locale = System.Globalization.CultureInfo.InvariantCulture; + + DataColumnCollection columns = _schemaTable.Columns; + + columns.Add(SchemaTableColumn.ColumnName, typeof(System.String)); + columns.Add(SchemaTableColumn.ColumnOrdinal, typeof(System.Int32)); + columns.Add(SchemaTableColumn.ColumnSize, typeof(System.Int32)); + columns.Add(SchemaTableColumn.NumericPrecision, typeof(System.Int16)); + columns.Add(SchemaTableColumn.NumericScale, typeof(System.Int16)); + columns.Add(SchemaTableColumn.IsUnique, typeof(System.Boolean)); + columns.Add(SchemaTableColumn.IsKey, typeof(System.Boolean)); + columns.Add(SchemaTableOptionalColumn.BaseServerName, typeof(System.String)); + columns.Add(SchemaTableOptionalColumn.BaseCatalogName, typeof(System.String)); + columns.Add(SchemaTableColumn.BaseColumnName, typeof(System.String)); + columns.Add(SchemaTableColumn.BaseSchemaName, typeof(System.String)); + columns.Add(SchemaTableColumn.BaseTableName, typeof(System.String)); + columns.Add(SchemaTableColumn.DataType, typeof(System.Type)); + columns.Add(SchemaTableColumn.AllowDBNull, typeof(System.Boolean)); + columns.Add(SchemaTableColumn.ProviderType, typeof(System.Int32)); + columns.Add(SchemaTableColumn.IsAliased, typeof(System.Boolean)); + columns.Add(SchemaTableColumn.IsExpression, typeof(System.Boolean)); + columns.Add(BulkDataReader.IsIdentitySchemaColumn, typeof(System.Boolean)); + columns.Add(SchemaTableOptionalColumn.IsAutoIncrement, typeof(System.Boolean)); + columns.Add(SchemaTableOptionalColumn.IsRowVersion, typeof(System.Boolean)); + columns.Add(SchemaTableOptionalColumn.IsHidden, typeof(System.Boolean)); + columns.Add(SchemaTableColumn.IsLong, typeof(System.Boolean)); + columns.Add(SchemaTableOptionalColumn.IsReadOnly, typeof(System.Boolean)); + columns.Add(SchemaTableOptionalColumn.ProviderSpecificDataType, typeof(System.Type)); + columns.Add(BulkDataReader.DataTypeNameSchemaColumn, typeof(System.String)); + columns.Add(BulkDataReader.XmlSchemaCollectionDatabaseSchemaColumn, typeof(System.String)); + columns.Add(BulkDataReader.XmlSchemaCollectionOwningSchemaSchemaColumn, typeof(System.String)); + columns.Add(BulkDataReader.XmlSchemaCollectionNameSchemaColumn, typeof(System.String)); + } + + #endregion + + #region IDataReader + + /// + /// Gets a value indicating the depth of nesting for the current row. (Inherited from .) + /// + /// + /// does not support nested result sets so this method always returns 0. + /// + /// + public int Depth + { + get { return 0; } + } + + /// + /// Gets the number of columns in the current row. (Inherited from .) + /// + /// + public int FieldCount + { + get { return GetSchemaTable().Rows.Count; } + } + + /// + /// Is the bulk copy process open? + /// + bool _isOpen = true; + + /// + /// Gets a value indicating whether the data reader is closed. (Inherited from .) + /// + /// + public bool IsClosed + { + get { return !_isOpen; } + } + + /// + /// Gets the column located at the specified index. (Inherited from .) + /// + /// + /// No column with the specified index was found. + /// + /// + /// The zero-based index of the column to get. + /// + /// + /// The column located at the specified index as an . + /// + /// + public object this[int i] + { + get { return GetValue(i); } + } + + /// + /// Gets the column with the specified name. (Inherited from .) + /// + /// + /// No column with the specified name was found. + /// + /// + /// The name of the column to find. + /// + /// + /// The column located at the specified name as an . + /// + /// + public object this[string name] + { + get { return GetValue(GetOrdinal(name)); } + } + + /// + /// Gets the number of rows changed, inserted, or deleted by execution of the SQL statement. (Inherited from .) + /// + /// + /// Always returns -1 which is the expected behaviour for statements. + /// + /// + public virtual int RecordsAffected + { + get { return -1; } + } + + /// + /// Closes the . (Inherited from .) + /// + /// + public void Close() + { + this._isOpen = false; + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public bool GetBoolean(int i) + { + return (bool)GetValue(i); + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public byte GetByte(int i) + { + return (byte)GetValue(i); + } + + /// + /// Reads a stream of bytes from the specified column offset into the buffer as an array, starting at the given buffer offset. + /// (Inherited from .) + /// + /// + /// If you pass a buffer that is null, returns the length of the row in bytes. + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The index within the field from which to start the read operation. + /// + /// + /// The buffer into which to read the stream of bytes. + /// + /// + /// The index for buffer to start the read operation. + /// + /// + /// The number of bytes to read. + /// + /// + /// The actual number of bytes read. + /// + /// + public long GetBytes(int i, + long fieldOffset, + byte[] buffer, + int bufferoffset, + int length) + { + byte[] data = (byte[])GetValue(i); + + if (buffer != null) + { + Array.Copy(data, fieldOffset, buffer, bufferoffset, length); + } + + return data.LongLength; + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public char GetChar(int i) + { + char result; + + object data = GetValue(i); + char? dataAsChar = data as char?; + char[] dataAsCharArray = data as char[]; + string dataAsString = data as string; + + if (dataAsChar.HasValue) + { + result = dataAsChar.Value; + } + else if (dataAsCharArray != null && + dataAsCharArray.Length == 1) + { + result = dataAsCharArray[0]; + } + else if (dataAsString != null && + dataAsString.Length == 1) + { + result = dataAsString[0]; + } + else + { + throw new InvalidOperationException("GetValue did not return a Char compatible type."); + } + + return result; + } + + /// + /// Reads a stream of characters from the specified column offset into the buffer as an array, starting at the given buffer offset. + /// (Inherited from .) + /// + /// + /// If you pass a buffer that is null, returns the length of the row in bytes. + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The index within the field from which to start the read operation. + /// + /// + /// The buffer into which to read the stream of characters. + /// + /// + /// The index for buffer to start the read operation. + /// + /// + /// The number of characters to read. + /// + /// + /// The actual number of characters read. + /// + /// + public long GetChars(int i, + long fieldoffset, + char[] buffer, + int bufferoffset, + int length) + { + object data = GetValue(i); + + string dataAsString = data as string; + char[] dataAsCharArray = data as char[]; + + if (dataAsString != null) + { + dataAsCharArray = dataAsString.ToCharArray((int)fieldoffset, length); + } + else if (dataAsCharArray == null) + { + throw new InvalidOperationException("GetValue did not return either a Char array or a String."); + } + + if (buffer != null) + { + Array.Copy(dataAsCharArray, fieldoffset, buffer, bufferoffset, length); + } + + return dataAsCharArray.LongLength; + } + + /// + /// Returns an IDataReader for the specified column ordinal. (Inherited from .) + /// + /// + /// does not support nested result sets so this method always returns null. + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The for the specified column ordinal (null). + /// + /// + public IDataReader GetData(int i) + { + if (i < 0 || i >= this.FieldCount) + { + throw new ArgumentOutOfRangeException("i"); + } + + return null; + } + + /// + /// The data type information for the specified field. (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The data type information for the specified field. + /// + /// + public string GetDataTypeName(int i) + { + return GetFieldType(i).Name; + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public DateTime GetDateTime(int i) + { + return (DateTime)GetValue(i); + } + + /// + /// Gets the value of the specified column as a . + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + public DateTimeOffset GetDateTimeOffset(int i) + { + return (DateTimeOffset)GetValue(i); + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public decimal GetDecimal(int i) + { + return (Decimal)GetValue(i); + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public double GetDouble(int i) + { + return (double)GetValue(i); + } + + /// + /// Gets the information corresponding to the type of that would be returned from . + /// (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The information corresponding to the type of that would be returned from . + /// + /// + public Type GetFieldType(int i) + { + return (Type)GetSchemaTable().Rows[i][SchemaTableColumn.DataType]; + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public float GetFloat(int i) + { + return (float)this[i]; + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public Guid GetGuid(int i) + { + return (Guid)GetValue(i); + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public short GetInt16(int i) + { + return (short)GetValue(i); + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public int GetInt32(int i) + { + return (int)GetValue(i); + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public long GetInt64(int i) + { + return (long)GetValue(i); + } + + /// + /// Gets the name for the field to find. (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The name of the field or the empty string (""), if there is no value to return. + /// + /// + public string GetName(int i) + { + return (string)GetSchemaTable().Rows[i][SchemaTableColumn.ColumnName]; + } + + /// + /// Return the index of the named field. (Inherited from .) + /// + /// + /// The index of the named field was not found. + /// + /// + /// The name of the field to find. + /// + /// + /// The index of the named field. + /// + /// + public int GetOrdinal(string name) + { + if (name == null) // Empty strings are handled as a IndexOutOfRangeException. + { + throw new ArgumentNullException("name"); + } + + int result = -1; + + int rowCount = FieldCount; + + DataRowCollection schemaRows = GetSchemaTable().Rows; + + // Case sensitive search + for (int ordinal = 0; ordinal < rowCount; ordinal++) + { + if (String.Equals((string)schemaRows[ordinal][SchemaTableColumn.ColumnName], name, StringComparison.Ordinal)) + { + result = ordinal; + } + } + + if (result == -1) + { + // Case insensitive search. + for (int ordinal = 0; ordinal < rowCount; ordinal++) + { + if (String.Equals((string)schemaRows[ordinal][SchemaTableColumn.ColumnName], name, StringComparison.OrdinalIgnoreCase)) + { + result = ordinal; + } + } + } + + if (result == -1) + { + throw new IndexOutOfRangeException(name); + } + + return result; + } + + /// + /// Returns a that describes the column metadata of the . (Inherited from .) + /// + /// + /// The is closed. + /// + /// + /// A that describes the column metadata. + /// + /// + public DataTable GetSchemaTable() + { + if (IsClosed) + { + throw new InvalidOperationException("The IDataReader is closed."); + } + + if (_schemaTable.Rows.Count == 0) + { + // Need to add the column definitions and mappings + _schemaTable.TableName = TableName; + + AddSchemaTableRows(); + + Debug.Assert(_schemaTable.Rows.Count == FieldCount); + } + + return _schemaTable; + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public string GetString(int i) + { + return (string)GetValue(i); + } + + /// + /// Gets the value of the specified column as a . + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + public TimeSpan GetTimeSpan(int i) + { + return (TimeSpan)GetValue(i); + } + + /// + /// Gets the value of the specified column as a . (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column. + /// + /// + public abstract object GetValue(int i); + + /// + /// Populates an array of objects with the column values of the current record. (Inherited from .) + /// + /// + /// was null. + /// + /// + /// An array of to copy the attribute fields into. + /// + /// + /// The number of instances of in the array. + /// + /// + public int GetValues(object[] values) + { + if (values == null) + { + throw new ArgumentNullException("values"); + } + + int fieldCount = Math.Min(FieldCount, values.Length); + + for (int i = 0; i < fieldCount; i++) + { + values[i] = GetValue(i); + } + + return fieldCount; + } + + /// + /// Return whether the specified field is set to null. (Inherited from .) + /// + /// + /// The index passed was outside the range of 0 through . + /// + /// + /// The zero-based column ordinal. + /// + /// + /// True if the specified field is set to null; otherwise, false. + /// + /// + public bool IsDBNull(int i) + { + object data = GetValue(i); + + return data == null || Convert.IsDBNull(data); + } + + /// + /// Advances the data reader to the next result, when reading the results of batch SQL statements. (Inherited from .) + /// + /// + /// for returns a single result set so false is always returned. + /// + /// + /// True if there are more rows; otherwise, false. for returns a single result set so false is always returned. + /// + /// + public bool NextResult() + { + return false; + } + + /// + /// Advances the to the next record. (Inherited from .) + /// + /// + /// True if there are more rows; otherwise, false. + /// + /// + public abstract bool Read(); + + #endregion + + #region IDisposable + + /// + /// Has the object been disposed? + /// + bool _disposed = false; + + /// + /// Dispose of any disposable and expensive resources. + /// + /// + /// Is this call the result of a call? + /// + protected virtual void Dispose(bool disposing) + { + if (!this._disposed) + { + this._disposed = true; + + if (disposing) + { + if (_schemaTable != null) + { + _schemaTable.Dispose(); + this._schemaTable = null; + } + + this._columnMappings = null; + + this._isOpen = false; + + GC.SuppressFinalize(this); + } + } + } + + /// + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. (Inherited from .) + /// + /// + public void Dispose() + { + Dispose(true); + } + + /// + /// Finalizer + /// + /// + /// has no unmanaged resources but a subclass may thus a finalizer is required. + /// + ~BulkDataReader() + { + Dispose(false); + } + + #endregion + + } +} diff --git a/src/Umbraco.Core/Persistence/DatabaseModelDefinitions/DefinitionFactory.cs b/src/Umbraco.Core/Persistence/DatabaseModelDefinitions/DefinitionFactory.cs index 9b08279716..e3c35e01b4 100644 --- a/src/Umbraco.Core/Persistence/DatabaseModelDefinitions/DefinitionFactory.cs +++ b/src/Umbraco.Core/Persistence/DatabaseModelDefinitions/DefinitionFactory.cs @@ -9,7 +9,7 @@ namespace Umbraco.Core.Persistence.DatabaseModelDefinitions { internal static class DefinitionFactory { - public static TableDefinition GetTableDefinition(Type modelType) + public static TableDefinition GetTableDefinition(ISqlSyntaxProvider syntaxProvider, Type modelType) { //Looks for PetaPoco's TableNameAtribute for the name of the table //If no attribute is set we use the name of the Type as the default convention @@ -32,7 +32,7 @@ namespace Umbraco.Core.Persistence.DatabaseModelDefinitions //Otherwise use the name of the property itself as the default convention var columnAttribute = propertyInfo.FirstAttribute(); string columnName = columnAttribute != null ? columnAttribute.Name : propertyInfo.Name; - var columnDefinition = GetColumnDefinition(modelType, propertyInfo, columnName, tableName); + var columnDefinition = GetColumnDefinition(syntaxProvider, modelType, propertyInfo, columnName, tableName); tableDefinition.Columns.Add(columnDefinition); //Creates a foreignkey definition and adds it to the collection on the table definition @@ -58,7 +58,7 @@ namespace Umbraco.Core.Persistence.DatabaseModelDefinitions return tableDefinition; } - public static ColumnDefinition GetColumnDefinition(Type modelType, PropertyInfo propertyInfo, string columnName, string tableName) + public static ColumnDefinition GetColumnDefinition(ISqlSyntaxProvider syntaxProvider, Type modelType, PropertyInfo propertyInfo, string columnName, string tableName) { var definition = new ColumnDefinition{ Name = columnName, TableName = tableName, ModificationType = ModificationType.Create }; @@ -110,7 +110,7 @@ namespace Umbraco.Core.Persistence.DatabaseModelDefinitions { //Special case for MySQL as it can't have multiple default DateTime values, which //is what the umbracoServer table definition is trying to create - if (SqlSyntaxContext.SqlSyntaxProvider is MySqlSyntaxProvider && definition.TableName == "umbracoServer" && + if (syntaxProvider is MySqlSyntaxProvider && definition.TableName == "umbracoServer" && definition.TableName.ToLowerInvariant() == "lastNotifiedDate".ToLowerInvariant()) return definition; diff --git a/src/Umbraco.Core/Persistence/DatabaseSchemaHelper.cs b/src/Umbraco.Core/Persistence/DatabaseSchemaHelper.cs index bcc1528d3f..944d5a8a74 100644 --- a/src/Umbraco.Core/Persistence/DatabaseSchemaHelper.cs +++ b/src/Umbraco.Core/Persistence/DatabaseSchemaHelper.cs @@ -99,7 +99,7 @@ namespace Umbraco.Core.Persistence public void CreateTable(bool overwrite, Type modelType) { - var tableDefinition = DefinitionFactory.GetTableDefinition(modelType); + var tableDefinition = DefinitionFactory.GetTableDefinition(_syntaxProvider, modelType); var tableName = tableDefinition.Name; string createSql = _syntaxProvider.Format(tableDefinition); diff --git a/src/Umbraco.Core/Persistence/Migrations/Initial/DatabaseSchemaCreation.cs b/src/Umbraco.Core/Persistence/Migrations/Initial/DatabaseSchemaCreation.cs index 423c847c47..ab477953b4 100644 --- a/src/Umbraco.Core/Persistence/Migrations/Initial/DatabaseSchemaCreation.cs +++ b/src/Umbraco.Core/Persistence/Migrations/Initial/DatabaseSchemaCreation.cs @@ -159,7 +159,7 @@ namespace Umbraco.Core.Persistence.Migrations.Initial foreach (var item in OrderedTables.OrderBy(x => x.Key)) { - var tableDefinition = DefinitionFactory.GetTableDefinition(item.Value); + var tableDefinition = DefinitionFactory.GetTableDefinition(_sqlSyntaxProvider, item.Value); result.TableDefinitions.Add(tableDefinition); } diff --git a/src/Umbraco.Core/Persistence/PetaPocoExtensions.cs b/src/Umbraco.Core/Persistence/PetaPocoExtensions.cs index 610b3d9b02..d1667456e6 100644 --- a/src/Umbraco.Core/Persistence/PetaPocoExtensions.cs +++ b/src/Umbraco.Core/Persistence/PetaPocoExtensions.cs @@ -3,9 +3,13 @@ using System.Collections.Generic; using System.Data; using System.Data.Common; using System.Data.SqlClient; +using System.Data.SqlServerCe; using System.Linq; using System.Text.RegularExpressions; +using MySql.Data.MySqlClient; +using StackExchange.Profiling.Data; using Umbraco.Core.Logging; +using Umbraco.Core.Persistence.DatabaseModelDefinitions; using Umbraco.Core.Persistence.Querying; using Umbraco.Core.Persistence.SqlSyntax; @@ -150,7 +154,7 @@ namespace Umbraco.Core.Persistence { //this fancy regex will only match a single @ not a double, etc... var regex = new Regex("(?(overwrite); } + /// + /// Performs the bulk insertion in the context of a current transaction with an optional parameter to complete the transaction + /// when finished + /// + /// + /// + /// public static void BulkInsertRecords(this Database db, IEnumerable collection) { //don't do anything if there are no records. @@ -180,10 +191,97 @@ namespace Umbraco.Core.Persistence using (var tr = db.GetTransaction()) { - db.BulkInsertRecords(collection, tr, true); + db.BulkInsertRecords(collection, tr, SqlSyntaxContext.SqlSyntaxProvider, true); } } + /// + /// Performs the bulk insertion in the context of a current transaction with an optional parameter to complete the transaction + /// when finished + /// + /// + /// + /// + /// + /// + /// + /// If this is false this will try to just generate bulk insert statements instead of using the current SQL platform's bulk + /// insert logic. For SQLCE, bulk insert statements do not work so if this is false it will insert one at a time. + /// + /// + /// The number of items inserted + public static int BulkInsertRecords(this Database db, + IEnumerable collection, + Transaction tr, + ISqlSyntaxProvider syntaxProvider, + bool useNativeSqlPlatformBulkInsert = true, + bool commitTrans = false) + { + + //don't do anything if there are no records. + if (collection.Any() == false) + { + return 0; + } + + var pd = Database.PocoData.ForType(typeof(T)); + if (pd == null) throw new InvalidOperationException("Could not find PocoData for " + typeof(T)); + + try + { + int processed = 0; + + var usedNativeSqlPlatformInserts = useNativeSqlPlatformBulkInsert + && NativeSqlPlatformBulkInsertRecords(db, syntaxProvider, pd, collection, out processed); + + if (usedNativeSqlPlatformInserts == false) + { + //if it is sql ce or it is a sql server version less than 2008, we need to do individual inserts. + var sqlServerSyntax = syntaxProvider as SqlServerSyntaxProvider; + if ((sqlServerSyntax != null && (int) sqlServerSyntax.GetVersionName(db) < (int) SqlServerVersionName.V2008) + || syntaxProvider is SqlCeSyntaxProvider) + { + //SqlCe doesn't support bulk insert statements! + foreach (var poco in collection) + { + db.Insert(poco); + } + } + else + { + //we'll need to generate insert statements instead + + string[] sqlStatements; + var cmds = db.GenerateBulkInsertCommand(pd, collection, out sqlStatements); + for (var i = 0; i < sqlStatements.Length; i++) + { + using (var cmd = cmds[i]) + { + cmd.CommandText = sqlStatements[i]; + cmd.ExecuteNonQuery(); + processed++; + } + } + } + } + + if (commitTrans) + { + tr.Complete(); + } + return processed; + } + catch + { + if (commitTrans) + { + tr.Dispose(); + } + throw; + } + + } + /// /// Performs the bulk insertion in the context of a current transaction with an optional parameter to complete the transaction /// when finished @@ -193,64 +291,10 @@ namespace Umbraco.Core.Persistence /// /// /// + [Obsolete("Use the method that specifies an SqlSyntaxContext instance instead")] public static void BulkInsertRecords(this Database db, IEnumerable collection, Transaction tr, bool commitTrans = false) { - //TODO: We should change this to use BulkCopy, as an example see: - // https://ayende.com/blog/4137/nhibernate-perf-tricks - // Even though this just generates lots of raw sql INSERT statements BulkCopy is the fastest it can possibly be - // and we should be able to do this using the current connection from the PetaPoco db instance (and would probably be much cleaner) - // - // BulkCopy is available for SQL Server and MySqlBulkLoader is available for MySql, pretty sure BulkCopy works for SQLCE so - // we should be covered and of course could fallback to this method if that is not our database. But we would get huge perf - // increases for this. - - - //don't do anything if there are no records. - if (collection.Any() == false) - return; - - try - { - //if it is sql ce or it is a sql server version less than 2008, we need to do individual inserts. - var sqlServerSyntax = SqlSyntaxContext.SqlSyntaxProvider as SqlServerSyntaxProvider; - - if ((sqlServerSyntax != null && (int)sqlServerSyntax.GetVersionName(db) < (int)SqlServerVersionName.V2008) - || SqlSyntaxContext.SqlSyntaxProvider is SqlCeSyntaxProvider) - { - //SqlCe doesn't support bulk insert statements! - - foreach (var poco in collection) - { - db.Insert(poco); - } - } - else - { - string[] sqlStatements; - var cmds = db.GenerateBulkInsertCommand(collection, db.Connection, out sqlStatements); - for (var i = 0; i < sqlStatements.Length; i++) - { - using (var cmd = cmds[i]) - { - cmd.CommandText = sqlStatements[i]; - cmd.ExecuteNonQuery(); - } - } - } - - if (commitTrans) - { - tr.Complete(); - } - } - catch - { - if (commitTrans) - { - tr.Dispose(); - } - throw; - } + db.BulkInsertRecords(collection, tr, SqlSyntaxContext.SqlSyntaxProvider, commitTrans); } /// @@ -259,8 +303,8 @@ namespace Umbraco.Core.Persistence /// /// /// - /// /// + /// /// Sql commands with populated command parameters required to execute the sql statement /// /// The limits for number of parameters are 2100 (in sql server, I think there's many more allowed in mysql). So @@ -269,33 +313,24 @@ namespace Umbraco.Core.Persistence /// that is max. I've reduced it to 2000 anyways. /// internal static IDbCommand[] GenerateBulkInsertCommand( - this Database db, - IEnumerable collection, - IDbConnection connection, + this Database db, + Database.PocoData pd, + IEnumerable collection, out string[] sql) { - //A filter used below a few times to get all columns except result cols and not the primary key if it is auto-incremental - Func, bool> includeColumn = (data, column) => - { - if (column.Value.ResultColumn) return false; - if (data.TableInfo.AutoIncrement && column.Key == data.TableInfo.PrimaryKey) return false; - return true; - }; - - var pd = Database.PocoData.ForType(typeof(T)); var tableName = db.EscapeTableName(pd.TableInfo.TableName); //get all columns to include and format for sql - var cols = string.Join(", ", + var cols = string.Join(", ", pd.Columns - .Where(c => includeColumn(pd, c)) + .Where(c => IncludeColumn(pd, c)) .Select(c => tableName + "." + db.EscapeSqlIdentifier(c.Key)).ToArray()); var itemArray = collection.ToArray(); //calculate number of parameters per item - var paramsPerItem = pd.Columns.Count(i => includeColumn(pd, i)); - + var paramsPerItem = pd.Columns.Count(i => IncludeColumn(pd, i)); + //Example calc: // Given: we have 4168 items in the itemArray, each item contains 8 command parameters (values to be inserterted) // 2100 / 8 = 262.5 @@ -316,14 +351,14 @@ namespace Umbraco.Core.Persistence .Skip(tIndex * (int)itemsPerTrans) .Take((int)itemsPerTrans); - var cmd = db.CreateCommand(connection, ""); + var cmd = db.CreateCommand(db.Connection, string.Empty); var pocoValues = new List(); var index = 0; foreach (var poco in itemsForTrans) { var values = new List(); //get all columns except result cols and not the primary key if it is auto-incremental - foreach (var i in pd.Columns.Where(x => includeColumn(pd, x))) + foreach (var i in pd.Columns.Where(x => IncludeColumn(pd, x))) { db.AddParam(cmd, i.Value.GetValue(poco), "@"); values.Add(string.Format("{0}{1}", "@", index++)); @@ -331,14 +366,211 @@ namespace Umbraco.Core.Persistence pocoValues.Add("(" + string.Join(",", values.ToArray()) + ")"); } - var sqlResult = string.Format("INSERT INTO {0} ({1}) VALUES {2}", tableName, cols, string.Join(", ", pocoValues)); + var sqlResult = string.Format("INSERT INTO {0} ({1}) VALUES {2}", tableName, cols, string.Join(", ", pocoValues)); sqlQueries.Add(sqlResult); commands.Add(cmd); } sql = sqlQueries.ToArray(); - return commands.ToArray(); + return commands.ToArray(); + } + + /// + /// A filter used below a few times to get all columns except result cols and not the primary key if it is auto-incremental + /// + /// + /// + /// + private static bool IncludeColumn(Database.PocoData data, KeyValuePair column) + { + if (column.Value.ResultColumn) return false; + if (data.TableInfo.AutoIncrement && column.Key == data.TableInfo.PrimaryKey) return false; + return true; + } + + /// + /// Bulk insert records with Sql BulkCopy or TableDirect or whatever sql platform specific bulk insert records should be used + /// + /// + /// + /// + /// + /// The number of records inserted + private static bool NativeSqlPlatformBulkInsertRecords(Database db, ISqlSyntaxProvider syntaxProvider, Database.PocoData pd, IEnumerable collection, out int processed) + { + + var dbConnection = db.Connection; + + //unwrap the profiled connection if there is one + var profiledConnection = dbConnection as ProfiledDbConnection; + if (profiledConnection != null) + { + dbConnection = profiledConnection.InnerConnection; + } + + //check if it's SQL or SqlCe + + var sqlConnection = dbConnection as SqlConnection; + if (sqlConnection != null) + { + processed = BulkInsertRecordsSqlServer(db, (SqlServerSyntaxProvider)syntaxProvider, pd, collection); + return true; + } + + var sqlCeConnection = dbConnection as SqlCeConnection; + if (sqlCeConnection != null) + { + processed = BulkInsertRecordsSqlCe(db, pd, collection); + return true; + } + + //could not use the SQL server's specific bulk insert operations + processed = 0; + return false; + + } + + /// + /// Logic used to perform bulk inserts with SqlCe's TableDirect + /// + /// + /// + /// + /// + /// + internal static int BulkInsertRecordsSqlCe(Database db, + Database.PocoData pd, + IEnumerable collection) + { + var cols = pd.Columns.ToArray(); + + using (var cmd = db.CreateCommand(db.Connection, string.Empty)) + { + cmd.CommandText = pd.TableInfo.TableName; + cmd.CommandType = CommandType.TableDirect; + //cmd.Transaction = GetTypedTransaction(db.Connection.); + + //get the real command + using (var sqlCeCommand = GetTypedCommand(cmd)) + { + // This seems to cause problems, I think this is primarily used for retrieval, not + // inserting. see: https://msdn.microsoft.com/en-us/library/system.data.sqlserverce.sqlcecommand.indexname%28v=vs.100%29.aspx?f=255&MSPPError=-2147217396 + //sqlCeCommand.IndexName = pd.TableInfo.PrimaryKey; + + var count = 0; + using (var rs = sqlCeCommand.ExecuteResultSet(ResultSetOptions.Updatable)) + { + var rec = rs.CreateRecord(); + + foreach (var item in collection) + { + for (var i = 0; i < cols.Length; i++) + { + //skip the index if this shouldn't be included (i.e. PK) + if (IncludeColumn(pd, cols[i])) + { + var val = cols[i].Value.GetValue(item); + rec.SetValue(i, val); + } + } + rs.Insert(rec); + count++; + } + } + return count; + } + + } + } + + /// + /// Logic used to perform bulk inserts with SqlServer's BulkCopy + /// + /// + /// + /// + /// + /// + /// + internal static int BulkInsertRecordsSqlServer(Database db, SqlServerSyntaxProvider sqlSyntaxProvider, + Database.PocoData pd, IEnumerable collection) + { + //NOTE: We need to use the original db.Connection here to create the command, but we need to pass in the typed + // connection below to the SqlBulkCopy + using (var cmd = db.CreateCommand(db.Connection, string.Empty)) + { + using (var copy = new SqlBulkCopy( + GetTypedConnection(db.Connection), + SqlBulkCopyOptions.Default, + GetTypedTransaction(cmd.Transaction)) + { + BulkCopyTimeout = 10000, + DestinationTableName = pd.TableInfo.TableName + }) + { + //var cols = pd.Columns.Where(x => IncludeColumn(pd, x)).Select(x => x.Value).ToArray(); + + using (var bulkReader = new PocoDataDataReader(collection, pd, sqlSyntaxProvider)) + { + copy.WriteToServer(bulkReader); + + return bulkReader.RecordsAffected; + } + } + } + } + + + /// + /// Returns the underlying connection as a typed connection - this is used to unwrap the profiled mini profiler stuff + /// + /// + /// + /// + private static TConnection GetTypedConnection(IDbConnection connection) + where TConnection : class, IDbConnection + { + var profiled = connection as ProfiledDbConnection; + if (profiled != null) + { + return profiled.InnerConnection as TConnection; + } + return connection as TConnection; + } + + /// + /// Returns the underlying connection as a typed connection - this is used to unwrap the profiled mini profiler stuff + /// + /// + /// + /// + private static TTransaction GetTypedTransaction(IDbTransaction connection) + where TTransaction : class, IDbTransaction + { + var profiled = connection as ProfiledDbTransaction; + if (profiled != null) + { + return profiled.WrappedTransaction as TTransaction; + } + return connection as TTransaction; + } + + /// + /// Returns the underlying connection as a typed connection - this is used to unwrap the profiled mini profiler stuff + /// + /// + /// + /// + private static TCommand GetTypedCommand(IDbCommand command) + where TCommand : class, IDbCommand + { + var profiled = command as ProfiledDbCommand; + if (profiled != null) + { + return profiled.InternalCommand as TCommand; + } + return command as TCommand; } [Obsolete("Use the DatabaseSchemaHelper instead")] @@ -415,8 +647,8 @@ namespace Umbraco.Core.Persistence return ApplicationContext.Current.DatabaseContext.DatabaseProvider; } - + } - + } \ No newline at end of file diff --git a/src/Umbraco.Core/Persistence/PocoDataDataReader.cs b/src/Umbraco.Core/Persistence/PocoDataDataReader.cs new file mode 100644 index 0000000000..b0479a311a --- /dev/null +++ b/src/Umbraco.Core/Persistence/PocoDataDataReader.cs @@ -0,0 +1,159 @@ +using System; +using System.Collections.Generic; +using System.Data; +using System.Linq; +using Umbraco.Core.Persistence.DatabaseAnnotations; +using Umbraco.Core.Persistence.DatabaseModelDefinitions; +using Umbraco.Core.Persistence.SqlSyntax; + +namespace Umbraco.Core.Persistence +{ + /// + /// A data reader used for reading collections of PocoData entity types + /// + /// + /// We are using a custom data reader so that tons of memory is not consumed when rebuilding this table, previously + /// we'd generate SQL insert statements, but we'd have to put all of the XML structures into memory first. Alternatively + /// we can use .net's DataTable, but this also requires putting everything into memory. By using a DataReader we don't have to + /// store every content item and it's XML structure in memory to get it into the DB, we can stream it into the db with this + /// reader. + /// + internal class PocoDataDataReader : BulkDataReader + where TSyntax : ISqlSyntaxProvider + { + private readonly MicrosoftSqlSyntaxProviderBase _sqlSyntaxProvider; + private readonly TableDefinition _tableDefinition; + private readonly Database.PocoColumn[] _readerColumns; + private readonly IEnumerator _enumerator; + private readonly ColumnDefinition[] _columnDefinitions; + private int _recordsAffected = -1; + + public PocoDataDataReader( + IEnumerable dataSource, + Database.PocoData pd, + MicrosoftSqlSyntaxProviderBase sqlSyntaxProvider) + { + if (dataSource == null) throw new ArgumentNullException("dataSource"); + if (sqlSyntaxProvider == null) throw new ArgumentNullException("sqlSyntaxProvider"); + + _tableDefinition = DefinitionFactory.GetTableDefinition(sqlSyntaxProvider, pd.type); + if (_tableDefinition == null) throw new InvalidOperationException("No table definition found for type " + pd.type); + + _readerColumns = pd.Columns.Select(x => x.Value).ToArray(); + _sqlSyntaxProvider = sqlSyntaxProvider; + _enumerator = dataSource.GetEnumerator(); + _columnDefinitions = _tableDefinition.Columns.ToArray(); + + } + + protected override string SchemaName + { + get { return _tableDefinition.SchemaName; } + } + + protected override string TableName + { + get { return _tableDefinition.Name; } + } + + public override int RecordsAffected + { + get { return _recordsAffected <= 0 ? -1 : _recordsAffected; } + } + + /// + /// This will automatically add the schema rows based on the Poco table definition and the columns passed in + /// + protected override void AddSchemaTableRows() + { + //var colNames = _readerColumns.Select(x => x.ColumnName).ToArray(); + //foreach (var col in _columnDefinitions.Where(x => colNames.Contains(x.Name, StringComparer.OrdinalIgnoreCase))) + foreach (var col in _columnDefinitions) + { + var sqlDbType = SqlDbType.NVarChar; + if (col.HasSpecialDbType) + { + //get the SqlDbType from the 'special type' + switch (col.DbType) + { + case SpecialDbTypes.NTEXT: + sqlDbType = SqlDbType.NText; + break; + case SpecialDbTypes.NCHAR: + sqlDbType = SqlDbType.NChar; + break; + default: + throw new ArgumentOutOfRangeException(); + } + } + else if (col.Type.HasValue) + { + //get the SqlDbType from the DbType + sqlDbType = _sqlSyntaxProvider.GetSqlDbType(col.Type.Value); + } + else + { + //get the SqlDbType from the clr type + sqlDbType = _sqlSyntaxProvider.GetSqlDbType(col.PropertyType); + } + + AddSchemaTableRow( + col.Name, + col.Size > 0 ? (int?)col.Size : null, + col.Precision > 0 ? (short?)col.Precision : null, + null, col.IsUnique, col.IsIdentity, col.IsNullable, sqlDbType, + null, null, null, null, null); + } + + } + + /// + /// Get the value from the column index for the current object + /// + /// + /// + public override object GetValue(int i) + { + if (_enumerator.Current != null) + { + return _readerColumns[i].GetValue(_enumerator.Current); + //return _columnDefinitions[i]. .GetValue(_enumerator.Current); + } + + return null; + //TODO: Or throw ? + } + + /// + /// Advance the cursor + /// + /// + public override bool Read() + { + var result = _enumerator.MoveNext(); + if (result) + { + if (_recordsAffected == -1) + { + _recordsAffected = 0; + } + _recordsAffected++; + } + return result; + } + + /// + /// Ensure the enumerator is disposed + /// + /// + protected override void Dispose(bool disposing) + { + base.Dispose(disposing); + + if (disposing) + { + _enumerator.Dispose(); + } + } + } +} \ No newline at end of file diff --git a/src/Umbraco.Core/Persistence/Repositories/ContentRepository.cs b/src/Umbraco.Core/Persistence/Repositories/ContentRepository.cs index 9222769247..9827382b00 100644 --- a/src/Umbraco.Core/Persistence/Repositories/ContentRepository.cs +++ b/src/Umbraco.Core/Persistence/Repositories/ContentRepository.cs @@ -248,12 +248,12 @@ namespace Umbraco.Core.Persistence.Repositories var xmlItems = (from descendant in descendants let xml = serializer(descendant) - select new ContentXmlDto { NodeId = descendant.Id, Xml = xml.ToDataString() }).ToArray(); + select new ContentXmlDto { NodeId = descendant.Id, Xml = xml.ToDataString() }); //bulk insert it into the database - Database.BulkInsertRecords(xmlItems, tr); + var count = Database.BulkInsertRecords(xmlItems, tr, SqlSyntax); - processed += xmlItems.Length; + processed += count; pageIndex++; } while (processed < total); diff --git a/src/Umbraco.Core/Persistence/SqlSyntax/DbTypes.cs b/src/Umbraco.Core/Persistence/SqlSyntax/DbTypes.cs index 507db230cc..c9ef4d1f35 100644 --- a/src/Umbraco.Core/Persistence/SqlSyntax/DbTypes.cs +++ b/src/Umbraco.Core/Persistence/SqlSyntax/DbTypes.cs @@ -4,6 +4,7 @@ using System.Data; namespace Umbraco.Core.Persistence.SqlSyntax { + //TODO: TSyntax should be removed, it's not used/needed here public class DbTypes where TSyntax : ISqlSyntaxProvider { diff --git a/src/Umbraco.Core/Persistence/SqlSyntax/MicrosoftSqlSyntaxProviderBase.cs b/src/Umbraco.Core/Persistence/SqlSyntax/MicrosoftSqlSyntaxProviderBase.cs index 449f5fb3b1..1ea600b6e4 100644 --- a/src/Umbraco.Core/Persistence/SqlSyntax/MicrosoftSqlSyntaxProviderBase.cs +++ b/src/Umbraco.Core/Persistence/SqlSyntax/MicrosoftSqlSyntaxProviderBase.cs @@ -1,4 +1,6 @@ using System; +using System.Data; +using System.Linq; using Umbraco.Core.Persistence.Querying; namespace Umbraco.Core.Persistence.SqlSyntax @@ -133,5 +135,111 @@ namespace Umbraco.Core.Persistence.SqlSyntax throw new ArgumentOutOfRangeException("columnType"); } } + + /// + /// This uses a the DbTypeMap created and custom mapping to resolve the SqlDbType + /// + /// + /// + public virtual SqlDbType GetSqlDbType(Type clrType) + { + var dbType = DbTypeMap.ColumnDbTypeMap.First(x => x.Key == clrType).Value; + return GetSqlDbType(dbType); + } + + /// + /// Returns the mapped SqlDbType for the DbType specified + /// + /// + /// + public virtual SqlDbType GetSqlDbType(DbType dbType) + { + var sqlDbType = SqlDbType.NVarChar; + + //SEE: https://msdn.microsoft.com/en-us/library/cc716729(v=vs.110).aspx + // and https://msdn.microsoft.com/en-us/library/yy6y35y8%28v=vs.110%29.aspx?f=255&MSPPError=-2147217396 + switch (dbType) + { + case DbType.AnsiString: + sqlDbType = SqlDbType.VarChar; + break; + case DbType.Binary: + sqlDbType = SqlDbType.VarBinary; + break; + case DbType.Byte: + sqlDbType = SqlDbType.TinyInt; + break; + case DbType.Boolean: + sqlDbType = SqlDbType.Bit; + break; + case DbType.Currency: + sqlDbType = SqlDbType.Money; + break; + case DbType.Date: + sqlDbType = SqlDbType.Date; + break; + case DbType.DateTime: + sqlDbType = SqlDbType.DateTime; + break; + case DbType.Decimal: + sqlDbType = SqlDbType.Decimal; + break; + case DbType.Double: + sqlDbType = SqlDbType.Float; + break; + case DbType.Guid: + sqlDbType = SqlDbType.UniqueIdentifier; + break; + case DbType.Int16: + sqlDbType = SqlDbType.SmallInt; + break; + case DbType.Int32: + sqlDbType = SqlDbType.Int; + break; + case DbType.Int64: + sqlDbType = SqlDbType.BigInt; + break; + case DbType.Object: + sqlDbType = SqlDbType.Variant; + break; + case DbType.SByte: + throw new NotSupportedException("Inferring a SqlDbType from SByte is not supported."); + case DbType.Single: + sqlDbType = SqlDbType.Real; + break; + case DbType.String: + sqlDbType = SqlDbType.NVarChar; + break; + case DbType.Time: + sqlDbType = SqlDbType.Time; + break; + case DbType.UInt16: + throw new NotSupportedException("Inferring a SqlDbType from UInt16 is not supported."); + case DbType.UInt32: + throw new NotSupportedException("Inferring a SqlDbType from UInt32 is not supported."); + case DbType.UInt64: + throw new NotSupportedException("Inferring a SqlDbType from UInt64 is not supported."); + case DbType.VarNumeric: + throw new NotSupportedException("Inferring a VarNumeric from UInt64 is not supported."); + case DbType.AnsiStringFixedLength: + sqlDbType = SqlDbType.Char; + break; + case DbType.StringFixedLength: + sqlDbType = SqlDbType.NChar; + break; + case DbType.Xml: + sqlDbType = SqlDbType.Xml; + break; + case DbType.DateTime2: + sqlDbType = SqlDbType.DateTime2; + break; + case DbType.DateTimeOffset: + sqlDbType = SqlDbType.DateTimeOffset; + break; + default: + throw new ArgumentOutOfRangeException(); + } + return sqlDbType; + } } } \ No newline at end of file diff --git a/src/Umbraco.Core/Umbraco.Core.csproj b/src/Umbraco.Core/Umbraco.Core.csproj index bf70f6afd6..c28cea24cd 100644 --- a/src/Umbraco.Core/Umbraco.Core.csproj +++ b/src/Umbraco.Core/Umbraco.Core.csproj @@ -415,6 +415,7 @@ + @@ -472,6 +473,7 @@ + diff --git a/src/Umbraco.Tests/Persistence/BulkDataReaderTests.cs b/src/Umbraco.Tests/Persistence/BulkDataReaderTests.cs new file mode 100644 index 0000000000..b1e1a79ddb --- /dev/null +++ b/src/Umbraco.Tests/Persistence/BulkDataReaderTests.cs @@ -0,0 +1,2432 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Data; +using System.Data.Common; +using System.Data.SqlClient; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using NUnit.Framework; +using Umbraco.Core.Persistence; + +namespace Umbraco.Tests.Persistence +{ + /// + /// Unit tests for . + /// + /// + /// Borrowed from Microsoft: + /// See: https://blogs.msdn.microsoft.com/anthonybloesch/2013/01/23/bulk-loading-data-with-idatareader-and-sqlbulkcopy/ + /// + [TestFixture] + public class BulkDataReaderTest + { + + #region Test constants + + /// + /// The schema name. + /// + private const string testSchemaName = "TestSchema"; + + /// + /// The table name. + /// + private const string testTableName = "TestTable"; + + /// + /// The test UDT schema name. + /// + private const string testUdtSchemaName = "UdtSchema"; + + /// + /// The test UDT name. + /// + private const string testUdtName = "TestUdt"; + + /// + /// The test XML schema collection database name. + /// + private const string testXmlSchemaCollectionDatabaseName = "XmlDatabase"; + + /// + /// The test XML schema collection owning schema name. + /// + private const string testXMLSchemaCollectionSchemaName = "XmlSchema"; + + /// + /// The test XML schema collection name. + /// + private const string testXMLSchemaCollectionName = "Xml"; + + #endregion + + #region Schema tests + + /// + /// Test that is functioning correctly. + /// + /// + [Test] + public void ColumnMappingsTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + ReadOnlyCollection columnMappings = testReader.ColumnMappings; + + Assert.IsTrue(columnMappings.Count > 0); + Assert.AreEqual(columnMappings.Count, testReader.FieldCount); + + foreach (SqlBulkCopyColumnMapping columnMapping in columnMappings) + { + Assert.AreEqual(columnMapping.SourceColumn, columnMapping.DestinationColumn); + } + } + } + + /// + /// Test that is functioning correctly. + /// + /// + [Test] + public void GetDataTypeNameTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + Assert.IsTrue(testReader.FieldCount > 0); + + for (int currentColumn = 0; currentColumn < testReader.FieldCount; currentColumn++) + { + Assert.AreEqual(testReader.GetDataTypeName(currentColumn), ((Type)testReader.GetSchemaTable().Rows[currentColumn][SchemaTableColumn.DataType]).Name); + } + } + } + + /// + /// Test that is functioning correctly. + /// + /// + [Test] + public void GetFieldTypeTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + Assert.IsTrue(testReader.FieldCount > 0); + + for (int currentColumn = 0; currentColumn < testReader.FieldCount; currentColumn++) + { + Assert.AreEqual(testReader.GetFieldType(currentColumn), testReader.GetSchemaTable().Rows[currentColumn][SchemaTableColumn.DataType]); + } + } + } + + /// + /// Test that is functioning correctly. + /// + /// + [Test] + public void GetOrdinalTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + Assert.IsTrue(testReader.FieldCount > 0); + + for (int currentColumn = 0; currentColumn < testReader.FieldCount; currentColumn++) + { + Assert.AreEqual(testReader.GetOrdinal(testReader.GetName(currentColumn)), currentColumn); + + Assert.AreEqual(testReader.GetOrdinal(testReader.GetName(currentColumn).ToUpperInvariant()), currentColumn); + } + } + } + + /// + /// Test that functions correctly. + /// + /// + /// uses to test legal schema combinations. + /// + /// + [Test] + public void GetSchemaTableTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + DataTable schemaTable = testReader.GetSchemaTable(); + + Assert.IsNotNull(schemaTable); + Assert.IsTrue(schemaTable.Rows.Count > 0); + Assert.AreEqual(schemaTable.Rows.Count, BulkDataReaderSubclass.ExpectedResultSet.Count); + } + } + + /// + /// Test that + /// throws a for null column names. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowNullColumnNameTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = null; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.BigInt; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for empty column names. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowEmptyColumnNameTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = string.Empty; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.BigInt; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for nonpositive column sizes. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowNonpositiveColumnSizeTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = 0; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.NVarChar; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for nonpositive numeric precision. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowNonpositiveNumericPrecisionTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = 0; + testReader.NumericScale = 0; + testReader.ProviderType = SqlDbType.Decimal; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for negative numeric scale. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowNegativeNumericScaleTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = 5; + testReader.NumericScale = -1; + testReader.ProviderType = SqlDbType.Decimal; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for binary column without a column size. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowBinaryWithoutSizeTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Binary; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for binary column with a column size that is too large (>8000). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowBinaryWithTooLargeSizeTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = 8001; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Binary; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for char column without a column size. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowCharWithoutSizeTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Char; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for char column with a column size that is too large (>8000). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowCharWithTooLargeSizeTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = 8001; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Char; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for decimal column without a column precision. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowDecimalWithoutPrecisionTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = 5; + testReader.ProviderType = SqlDbType.Decimal; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for decimal column without a column scale. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowDecimalWithoutScaleTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = 20; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Decimal; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for decimal column with a column precision that is too large (>38). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowDecimalWithTooLargePrecisionTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = 39; + testReader.NumericScale = 5; + testReader.ProviderType = SqlDbType.Decimal; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for decimal column with a column scale that is larger than the column precision. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowDecimalWithTooLargeScaleTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = 20; + testReader.NumericScale = 21; + testReader.ProviderType = SqlDbType.Decimal; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for datetime2 column with a column size that has a precision that is too large (>7). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowDateTime2WithTooLargePrecisionTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = 8; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.DateTime2; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for datetimeoffset column with a column size that has a precision that is too large (>7). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowDateTimeOffsetWithTooLargePrecisionTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = 8; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.DateTimeOffset; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for nchar column without a precision. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowFloatWithoutPrecisionTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Float; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for float column with a column precision that is too large (>53). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowFloatWithTooLargePrecisionTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = 54; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Float; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for nchar column without a column size. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowNCharWithoutSizeTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.NChar; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for nchar column with a column size that is too large (>4000). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowNCharWithTooLargeSizeTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = 4001; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.NChar; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for nvarchar column with a column size that is too large (>4000). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowNVarCharWithTooLargeSizeTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = 4001; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.NVarChar; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for time column with a column precision that is too large (>7). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowTimeWithTooLargePrecisionTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = 8; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Time; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for missing UDT schema name. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowUdtMissingSchemaNameTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Udt; + testReader.UdtSchema = null; + testReader.UdtType = "Type"; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for empty UDT schema name. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowUdtEmptySchemaNameTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Udt; + testReader.UdtSchema = string.Empty; + testReader.UdtType = "Type"; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for missing UDT name. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowUdtMissingNameTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Udt; + testReader.UdtSchema = "Schema"; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for empty UDT name. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowUdtEmptyNameTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Udt; + testReader.UdtSchema = "Schema"; + testReader.UdtType = string.Empty; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for varbinary column with a column size that is too large (>8000). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowVarBinaryWithTooLargeSizeTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = 8001; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.VarBinary; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for varchar column with a column size that is too large (>8000). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowVarCharWithTooLargeSizeTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = 8001; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.VarChar; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for null xml collection name but with a name for the database. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowXmlNullNameWithDatabaseNameTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Xml; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = "Database"; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for null xml collection name. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowXmlNullNameWithOwningSchemaNameTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Xml; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = "Schema"; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for empty xml collection database name. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowXmlEmptyDatabaseNameTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Xml; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = string.Empty; + testReader.XmlSchemaCollectionOwningSchema = "Schema"; + testReader.XmlSchemaCollectionName = "Xml"; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for empty xml collection owning schema name. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowXmlEmptyOwningSchemaNameTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Xml; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = "Database"; + testReader.XmlSchemaCollectionOwningSchema = string.Empty; + testReader.XmlSchemaCollectionName = "Xml"; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for empty xml collection name. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentException))] + public void AddSchemaTableRowXmlEmptyNameTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Xml; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = "Database"; + testReader.XmlSchemaCollectionOwningSchema = "Schema"; + testReader.XmlSchemaCollectionName = string.Empty; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for a structured column (which is illegal). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowStructuredTypeTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Structured; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws a for a timestamp column (which is illegal). + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void AddSchemaTableRowTimestampTypeTest() + { + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.ProviderType = SqlDbType.Timestamp; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + DataTable schemaTable = testReader.GetSchemaTable(); ; + } + } + + /// + /// Test that + /// throws an for a column with an unallowed optional column set. + /// + /// + /// Uses to test the illegal schema combination. + /// + /// + [Test] + public void AddSchemaTableRowUnallowedOptionalColumnTest() + { + + // Column size set + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = 5; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + foreach (SqlDbType dbtype in new List { SqlDbType.BigInt, SqlDbType.Bit, SqlDbType.Date, SqlDbType.DateTime, SqlDbType.DateTime2, + SqlDbType.DateTimeOffset, SqlDbType.Image, SqlDbType.Int, SqlDbType.Money, SqlDbType.Real, + SqlDbType.SmallDateTime, SqlDbType.SmallInt, SqlDbType.SmallMoney, SqlDbType.Structured, SqlDbType.Text, + SqlDbType.Time, SqlDbType.Timestamp, SqlDbType.TinyInt, SqlDbType.Udt, SqlDbType.UniqueIdentifier, + SqlDbType.Variant, SqlDbType.Xml }) + { + testReader.ProviderType = dbtype; + + try + { + DataTable schemaTable = testReader.GetSchemaTable(); + + Assert.Fail(); + } + catch (ArgumentException) + { + } + } + } + + // Numeric precision set + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = 5; + testReader.NumericScale = null; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + foreach (SqlDbType dbtype in new List { SqlDbType.BigInt, SqlDbType.Binary, SqlDbType.Bit, SqlDbType.Char, SqlDbType.Date, + SqlDbType.DateTime, SqlDbType.Image, SqlDbType.Int, SqlDbType.Money, SqlDbType.NChar, + SqlDbType.NText, SqlDbType.NVarChar, SqlDbType.Real, SqlDbType.SmallDateTime, SqlDbType.SmallInt, + SqlDbType.SmallMoney, SqlDbType.Structured, SqlDbType.Text, SqlDbType.Timestamp, SqlDbType.TinyInt, + SqlDbType.Udt, SqlDbType.UniqueIdentifier, SqlDbType.VarBinary, SqlDbType.VarChar, SqlDbType.Variant, + SqlDbType.Xml }) + { + testReader.ProviderType = dbtype; + + try + { + DataTable schemaTable = testReader.GetSchemaTable(); + + Assert.Fail(); + } + catch (ArgumentException) + { + } + } + } + + // Numeric scale set + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = 5; + testReader.NumericScale = 3; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + foreach (SqlDbType dbtype in new List { SqlDbType.BigInt, SqlDbType.Binary, SqlDbType.Bit, SqlDbType.Char, SqlDbType.Date, + SqlDbType.DateTime, SqlDbType.DateTime2, SqlDbType.DateTimeOffset, SqlDbType.Image, SqlDbType.Int, + SqlDbType.Money, SqlDbType.NChar, SqlDbType.NText, SqlDbType.NVarChar, SqlDbType.Real, + SqlDbType.SmallDateTime, SqlDbType.SmallInt, SqlDbType.SmallMoney, SqlDbType.Structured, SqlDbType.Text, + SqlDbType.Time, SqlDbType.Timestamp, SqlDbType.TinyInt, SqlDbType.Udt, SqlDbType.UniqueIdentifier, + SqlDbType.VarBinary, SqlDbType.VarChar, SqlDbType.Variant, SqlDbType.Xml }) + { + testReader.ProviderType = dbtype; + + try + { + DataTable schemaTable = testReader.GetSchemaTable(); + + Assert.Fail(); + } + catch (ArgumentException) + { + } + } + } + + // Numeric scale set + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = 5; + testReader.NumericScale = 3; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + foreach (SqlDbType dbtype in new List { SqlDbType.BigInt, SqlDbType.Binary, SqlDbType.Bit, SqlDbType.Char, SqlDbType.Date, + SqlDbType.DateTime, SqlDbType.DateTime2, SqlDbType.DateTimeOffset, SqlDbType.Image, SqlDbType.Int, + SqlDbType.Money, SqlDbType.NChar, SqlDbType.NText, SqlDbType.NVarChar, SqlDbType.Real, + SqlDbType.SmallDateTime, SqlDbType.SmallInt, SqlDbType.SmallMoney, SqlDbType.Structured, SqlDbType.Text, + SqlDbType.Time, SqlDbType.Timestamp, SqlDbType.TinyInt, SqlDbType.Udt, SqlDbType.UniqueIdentifier, + SqlDbType.VarBinary, SqlDbType.VarChar, SqlDbType.Variant, SqlDbType.Xml }) + { + testReader.ProviderType = dbtype; + + try + { + DataTable schemaTable = testReader.GetSchemaTable(); + + Assert.Fail(); + } + catch (ArgumentException) + { + } + } + } + + // UDT type name set + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.UdtSchema = null; + testReader.UdtType = "Type"; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + foreach (SqlDbType dbtype in new List { SqlDbType.BigInt, SqlDbType.Binary, SqlDbType.Bit, SqlDbType.Char, SqlDbType.Date, + SqlDbType.DateTime, SqlDbType.DateTime2, SqlDbType.DateTimeOffset, SqlDbType.Decimal, SqlDbType.Float, + SqlDbType.Image, SqlDbType.Int, SqlDbType.Money, SqlDbType.NChar, SqlDbType.NText, + SqlDbType.NVarChar, SqlDbType.Real, SqlDbType.SmallDateTime, SqlDbType.SmallInt, SqlDbType.SmallMoney, + SqlDbType.Structured, SqlDbType.Text, SqlDbType.Time, SqlDbType.Timestamp, SqlDbType.TinyInt, + SqlDbType.UniqueIdentifier, SqlDbType.VarBinary, SqlDbType.VarChar, SqlDbType.Variant, SqlDbType.Xml }) + { + testReader.ProviderType = dbtype; + + try + { + DataTable schemaTable = testReader.GetSchemaTable(); + + Assert.Fail(); + } + catch (ArgumentException) + { + } + } + } + + // UDT schema and type name set + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.UdtSchema = "Schema"; + testReader.UdtType = "Type"; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = null; + + foreach (SqlDbType dbtype in new List { SqlDbType.BigInt, SqlDbType.Binary, SqlDbType.Bit, SqlDbType.Char, SqlDbType.Date, + SqlDbType.DateTime, SqlDbType.DateTime2, SqlDbType.DateTimeOffset, SqlDbType.Decimal, SqlDbType.Float, + SqlDbType.Image, SqlDbType.Int, SqlDbType.Money, SqlDbType.NChar, SqlDbType.NText, + SqlDbType.NVarChar, SqlDbType.Real, SqlDbType.SmallDateTime, SqlDbType.SmallInt, SqlDbType.SmallMoney, + SqlDbType.Structured, SqlDbType.Text, SqlDbType.Time, SqlDbType.Timestamp, SqlDbType.TinyInt, + SqlDbType.UniqueIdentifier, SqlDbType.VarBinary, SqlDbType.VarChar, SqlDbType.Variant, SqlDbType.Xml }) + { + testReader.ProviderType = dbtype; + + try + { + DataTable schemaTable = testReader.GetSchemaTable(); + + Assert.Fail(); + } + catch (ArgumentException) + { + } + } + } + + // XML type name set + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = null; + testReader.XmlSchemaCollectionName = "Name"; + + foreach (SqlDbType dbtype in new List { SqlDbType.BigInt, SqlDbType.Binary, SqlDbType.Bit, SqlDbType.Char, SqlDbType.Date, + SqlDbType.DateTime, SqlDbType.DateTime2, SqlDbType.DateTimeOffset, SqlDbType.Decimal, SqlDbType.Float, + SqlDbType.Image, SqlDbType.Int, SqlDbType.Money, SqlDbType.NChar, SqlDbType.NText, + SqlDbType.NVarChar, SqlDbType.Real, SqlDbType.SmallDateTime, SqlDbType.SmallInt, SqlDbType.SmallMoney, + SqlDbType.Structured, SqlDbType.Text, SqlDbType.Time, SqlDbType.Timestamp, SqlDbType.TinyInt, + SqlDbType.UniqueIdentifier, SqlDbType.VarBinary, SqlDbType.VarChar, SqlDbType.Variant, SqlDbType.Udt }) + { + testReader.ProviderType = dbtype; + + try + { + DataTable schemaTable = testReader.GetSchemaTable(); + + Assert.Fail(); + } + catch (ArgumentException) + { + } + } + } + + // XML owning schema and type name set + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = null; + testReader.XmlSchemaCollectionOwningSchema = "Schema"; + testReader.XmlSchemaCollectionName = "Name"; + + foreach (SqlDbType dbtype in new List { SqlDbType.BigInt, SqlDbType.Binary, SqlDbType.Bit, SqlDbType.Char, SqlDbType.Date, + SqlDbType.DateTime, SqlDbType.DateTime2, SqlDbType.DateTimeOffset, SqlDbType.Decimal, SqlDbType.Float, + SqlDbType.Image, SqlDbType.Int, SqlDbType.Money, SqlDbType.NChar, SqlDbType.NText, + SqlDbType.NVarChar, SqlDbType.Real, SqlDbType.SmallDateTime, SqlDbType.SmallInt, SqlDbType.SmallMoney, + SqlDbType.Structured, SqlDbType.Text, SqlDbType.Time, SqlDbType.Timestamp, SqlDbType.TinyInt, + SqlDbType.UniqueIdentifier, SqlDbType.VarBinary, SqlDbType.VarChar, SqlDbType.Variant, SqlDbType.Udt }) + { + testReader.ProviderType = dbtype; + + try + { + DataTable schemaTable = testReader.GetSchemaTable(); + + Assert.Fail(); + } + catch (ArgumentException) + { + } + } + } + + // XML database, owning schema and type name set + using (BulkDataReaderSchemaTest testReader = new BulkDataReaderSchemaTest()) + { + testReader.AllowDBNull = false; + testReader.ColumnName = "Name"; + testReader.ColumnSize = null; + testReader.IsKey = false; + testReader.IsUnique = false; + testReader.NumericPrecision = null; + testReader.NumericScale = null; + testReader.UdtSchema = null; + testReader.UdtType = null; + testReader.XmlSchemaCollectionDatabase = "Database"; + testReader.XmlSchemaCollectionOwningSchema = "Schema"; + testReader.XmlSchemaCollectionName = "Name"; + + foreach (SqlDbType dbtype in new List { SqlDbType.BigInt, SqlDbType.Binary, SqlDbType.Bit, SqlDbType.Char, SqlDbType.Date, + SqlDbType.DateTime, SqlDbType.DateTime2, SqlDbType.DateTimeOffset, SqlDbType.Decimal, SqlDbType.Float, + SqlDbType.Image, SqlDbType.Int, SqlDbType.Money, SqlDbType.NChar, SqlDbType.NText, + SqlDbType.NVarChar, SqlDbType.Real, SqlDbType.SmallDateTime, SqlDbType.SmallInt, SqlDbType.SmallMoney, + SqlDbType.Structured, SqlDbType.Text, SqlDbType.Time, SqlDbType.Timestamp, SqlDbType.TinyInt, + SqlDbType.UniqueIdentifier, SqlDbType.VarBinary, SqlDbType.VarChar, SqlDbType.Variant, SqlDbType.Udt }) + { + testReader.ProviderType = dbtype; + + try + { + DataTable schemaTable = testReader.GetSchemaTable(); + + Assert.Fail(); + } + catch (ArgumentException) + { + } + } + } + } + + #endregion; + + #region Rowset tests + + /// + /// Test that is functioning correctly. + /// + /// + [Test] + public void CloseTest() + { + BulkDataReaderSubclass testReader = new BulkDataReaderSubclass(); + + testReader.Close(); + + Assert.IsTrue(testReader.IsClosed); + } + + /// + /// Test that is functioning correctly. + /// + /// + /// Because nested row sets are not supported, this should always return 0; + /// + /// + [Test] + public void DepthTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + Assert.IsTrue(testReader.Read()); + + Assert.AreEqual(testReader.Depth, 0); + } + } + + /// + /// Test that is functioning correctly. + /// + /// + /// Because nested row sets are not supported, this should always return null; + /// + /// + [Test] + public void GetDataTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + Assert.IsTrue(testReader.Read()); + + Assert.IsTrue(testReader.FieldCount > 0); + + Assert.IsNull(testReader.GetData(0)); + } + } + + /// + /// Test and related functions. + /// + /// + /// Uses to test legal schema combinations. + /// + [Test] + public void GetValueTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + Assert.IsTrue(testReader.Read()); + + // this[int] + for (int column = 0; column < BulkDataReaderSubclass.ExpectedResultSet.Count; column++) + { + Assert.AreEqual(testReader[column], BulkDataReaderSubclass.ExpectedResultSet[column]); + } + + // this[string] + for (int column = 0; column < BulkDataReaderSubclass.ExpectedResultSet.Count; column++) + { + Assert.AreEqual(testReader[testReader.GetName(column)], BulkDataReaderSubclass.ExpectedResultSet[column]); + + Assert.AreEqual(testReader[testReader.GetName(column).ToUpperInvariant()], BulkDataReaderSubclass.ExpectedResultSet[column]); + } + + // GetValues + { + object[] values = new object[BulkDataReaderSubclass.ExpectedResultSet.Count]; + object[] expectedValues = new object[BulkDataReaderSubclass.ExpectedResultSet.Count]; + + Assert.AreEqual(testReader.GetValues(values), values.Length); + + BulkDataReaderSubclass.ExpectedResultSet.CopyTo(expectedValues, 0); + + Assert.IsTrue(BulkDataReaderTest.ArraysMatch(values, expectedValues)); + } + + // Typed getters + { + int currentColumn = 0; + + Assert.AreEqual(testReader.GetInt64(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + { + byte[] expectedResult = (byte[])BulkDataReaderSubclass.ExpectedResultSet[currentColumn]; + int expectedLength = expectedResult.Length; + byte[] buffer = new byte[expectedLength]; + + Assert.AreEqual(testReader.GetBytes(currentColumn, 0, buffer, 0, expectedLength), expectedLength); + + Assert.IsTrue(BulkDataReaderTest.ArraysMatch(buffer, expectedResult)); + } + currentColumn++; + + Assert.AreEqual(testReader.GetBoolean(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.IsDBNull(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn] == null); + currentColumn++; + + Assert.AreEqual(testReader.GetChar(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetChar(currentColumn), ((char[])BulkDataReaderSubclass.ExpectedResultSet[currentColumn])[0]); + currentColumn++; + + Assert.AreEqual(testReader.GetChar(currentColumn), ((string)BulkDataReaderSubclass.ExpectedResultSet[currentColumn])[0]); + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + + { + char[] expectedResult = ((string)BulkDataReaderSubclass.ExpectedResultSet[currentColumn]).ToCharArray(); + int expectedLength = expectedResult.Length; + char[] buffer = new char[expectedLength]; + + Assert.AreEqual(testReader.GetChars(currentColumn, 0, buffer, 0, expectedLength), expectedLength); + + Assert.IsTrue(BulkDataReaderTest.ArraysMatch(buffer, expectedResult)); + } + + currentColumn++; + + Assert.AreEqual(testReader.GetDateTime(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetDateTime(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetDateTime(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetDateTime(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetDateTimeOffset(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetDateTimeOffset(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetDecimal(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetDouble(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + { + byte[] expectedResult = (byte[])BulkDataReaderSubclass.ExpectedResultSet[currentColumn]; + int expectedLength = expectedResult.Length; + byte[] buffer = new byte[expectedLength]; + + Assert.AreEqual(testReader.GetBytes(currentColumn, 0, buffer, 0, expectedLength), expectedLength); + + Assert.IsTrue(BulkDataReaderTest.ArraysMatch(buffer, expectedResult)); + } + currentColumn++; + + Assert.AreEqual(testReader.GetInt32(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetDecimal(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetFloat(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetDateTime(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetInt16(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetDecimal(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetTimeSpan(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetTimeSpan(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetByte(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetValue(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetGuid(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + { + byte[] expectedResult = (byte[])BulkDataReaderSubclass.ExpectedResultSet[currentColumn]; + int expectedLength = expectedResult.Length; + byte[] buffer = new byte[expectedLength]; + + Assert.AreEqual(testReader.GetBytes(currentColumn, 0, buffer, 0, expectedLength), expectedLength); + + Assert.IsTrue(BulkDataReaderTest.ArraysMatch(buffer, expectedResult)); + } + currentColumn++; + + { + byte[] expectedResult = (byte[])BulkDataReaderSubclass.ExpectedResultSet[currentColumn]; + int expectedLength = expectedResult.Length; + byte[] buffer = new byte[expectedLength]; + + Assert.AreEqual(testReader.GetBytes(currentColumn, 0, buffer, 0, expectedLength), expectedLength); + + Assert.IsTrue(BulkDataReaderTest.ArraysMatch(buffer, expectedResult)); + } + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetValue(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + + Assert.AreEqual(testReader.GetString(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn]); + currentColumn++; + } + } + } + + /// + /// Test throws a when + /// the index is too small. + /// + /// + /// Uses to test the method. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void GetValueIndexTooSmallTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + Assert.IsTrue(testReader.Read()); + + object result = testReader.GetValue(-1); + } + } + + /// + /// Test throws a when + /// the index is too large. + /// + /// + /// Uses to test the method. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void GetValueIndexTooLargeTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + Assert.IsTrue(testReader.Read()); + + object result = testReader.GetValue(testReader.FieldCount); + } + } + + /// + /// Test throws a when + /// the index is too small. + /// + /// + /// Uses to test the method. + /// + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void GetDataIndexTooSmallTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + Assert.IsTrue(testReader.Read()); + + object result = testReader.GetData(-1); + } + } + + /// + /// Test throws a when + /// the index is too large. + /// + /// + /// Uses to test the method. + /// + [Test] + [ExpectedException(typeof(ArgumentOutOfRangeException))] + public void GetDataIndexTooLargeTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + Assert.IsTrue(testReader.Read()); + + object result = testReader.GetData(testReader.FieldCount); + } + } + + /// + /// Test that functions correctly. + /// + /// + [Test] + public void IsDBNullTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + for (int currentColumn = 0; currentColumn < testReader.FieldCount; currentColumn++) + { + Assert.AreEqual(testReader.IsDBNull(currentColumn), BulkDataReaderSubclass.ExpectedResultSet[currentColumn] == null); + } + } + } + + /// + /// Test that is functioning correctly. + /// + /// + /// Because this is a single row set, this should always return false; + /// + /// + [Test] + public void NextResultTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + Assert.IsFalse(testReader.NextResult()); + } + } + + /// + /// Test that is functioning correctly. + /// + /// + /// Because this row set represents a data source, this should always return -1; + /// + /// + [Test] + public void RecordsAffectedTest() + { + using (BulkDataReaderSubclass testReader = new BulkDataReaderSubclass()) + { + Assert.IsTrue(testReader.Read()); + + Assert.AreEqual(testReader.RecordsAffected, -1); + } + } + + #endregion + + #region Test IDisposable + + /// + /// Test that the interface is functioning correctly. + /// + /// + /// + [Test] + public void IDisposableTest() + { + // Test the Dispose method + { + BulkDataReaderSubclass testReader = new BulkDataReaderSubclass(); + + testReader.Dispose(); + + Assert.IsTrue(testReader.IsClosed); + } + + // Test the finalizer method + { + BulkDataReaderSubclass testReader = new BulkDataReaderSubclass(); + + testReader = null; + + GC.Collect(); + + GC.WaitForPendingFinalizers(); + } + } + + #endregion + + #region Utility + + /// + /// Do the two arrays match exactly? + /// + /// + /// The type of the array elements. + /// + /// + /// The first array. + /// + /// + /// The second array. + /// + /// + /// True if the arrays have the same length and contents. + /// + private static bool ArraysMatch(ElementType[] left, + ElementType[] right) + { + if (left == null) + { + throw new ArgumentNullException("left"); + } + else if (right == null) + { + throw new ArgumentNullException("left"); + } + + bool result = true; + + if (left.Length != right.Length) + { + result = false; + } + else + { + for (int currentIndex = 0; currentIndex < left.Length; currentIndex++) + { + result &= object.Equals(left[currentIndex], right[currentIndex]); + } + } + + return result; + } + + #endregion + + #region Test stubs + + /// + /// A subclass of used for testing its utility functions. + /// + private class BulkDataReaderSubclass : BulkDataReader + { + + #region Constructors + + /// + /// Constructor. + /// + public BulkDataReaderSubclass() + { + } + + #endregion + + #region BulkDataReader + + /// + /// See . + /// + /// + /// Returns . + /// + protected override string SchemaName + { + get { return BulkDataReaderTest.testSchemaName; } + } + + /// + /// See . + /// + /// + /// Returns . + /// + protected override string TableName + { + get { return BulkDataReaderTest.testTableName; } + } + + /// + /// See + /// + /// + /// Creates a schema row for the various values. + /// + protected override void AddSchemaTableRows() + { + AddSchemaTableRow("BigInt", null, null, null, true, false, false, SqlDbType.BigInt, null, null, null, null, null); + AddSchemaTableRow("Binary_20", 20, null, null, false, true, false, SqlDbType.Binary, null, null, null, null, null); + AddSchemaTableRow("Bit", null, null, null, false, false, true, SqlDbType.Bit, null, null, null, null, null); + AddSchemaTableRow("Bit_null", null, null, null, false, false, true, SqlDbType.Bit, null, null, null, null, null); + AddSchemaTableRow("Char_Char", 1, null, null, false, false, false, SqlDbType.Char, null, null, null, null, null); + AddSchemaTableRow("Char_Char_Array", 1, null, null, false, false, false, SqlDbType.Char, null, null, null, null, null); + AddSchemaTableRow("Char_String", 1, null, null, false, false, false, SqlDbType.Char, null, null, null, null, null); + AddSchemaTableRow("Char_20_String", 20, null, null, false, false, false, SqlDbType.Char, null, null, null, null, null); + AddSchemaTableRow("Date", null, null, null, false, false, false, SqlDbType.Date, null, null, null, null, null); + AddSchemaTableRow("DateTime", null, null, null, false, false, false, SqlDbType.DateTime, null, null, null, null, null); + AddSchemaTableRow("DateTime2", null, null, null, false, false, false, SqlDbType.DateTime2, null, null, null, null, null); + AddSchemaTableRow("DateTime2_5", null, 5, null, false, false, false, SqlDbType.DateTime2, null, null, null, null, null); + AddSchemaTableRow("DateTimeOffset", null, null, null, false, false, false, SqlDbType.DateTimeOffset, null, null, null, null, null); + AddSchemaTableRow("DateTimeOffset_5", null, 5, null, false, false, false, SqlDbType.DateTimeOffset, null, null, null, null, null); + AddSchemaTableRow("Decimal_20_10", null, 20, 10, false, false, false, SqlDbType.Decimal, null, null, null, null, null); + AddSchemaTableRow("Float_50", null, 50, null, false, false, false, SqlDbType.Float, null, null, null, null, null); + AddSchemaTableRow("Image", null, null, null, false, false, false, SqlDbType.Image, null, null, null, null, null); + AddSchemaTableRow("Int", null, null, null, false, false, false, SqlDbType.Int, null, null, null, null, null); + AddSchemaTableRow("Money", null, null, null, false, false, false, SqlDbType.Money, null, null, null, null, null); + AddSchemaTableRow("NChar_20", 20, null, null, false, false, false, SqlDbType.NChar, null, null, null, null, null); + AddSchemaTableRow("NText", null, null, null, false, false, false, SqlDbType.NText, null, null, null, null, null); + AddSchemaTableRow("NVarChar_20", 20, null, null, false, false, false, SqlDbType.NVarChar, null, null, null, null, null); + AddSchemaTableRow("NVarChar_Max", null, null, null, false, false, false, SqlDbType.NVarChar, null, null, null, null, null); + AddSchemaTableRow("Real", null, null, null, false, false, false, SqlDbType.Real, null, null, null, null, null); + AddSchemaTableRow("SmallDateTime", null, null, null, false, false, false, SqlDbType.SmallDateTime, null, null, null, null, null); + AddSchemaTableRow("SmallInt", null, null, null, false, false, false, SqlDbType.SmallInt, null, null, null, null, null); + AddSchemaTableRow("SmallMoney", null, null, null, false, false, false, SqlDbType.SmallMoney, null, null, null, null, null); + AddSchemaTableRow("Text", null, null, null, false, false, false, SqlDbType.Text, null, null, null, null, null); + AddSchemaTableRow("Time", null, null, null, false, false, false, SqlDbType.Time, null, null, null, null, null); + AddSchemaTableRow("Time_5", null, 5, null, false, false, false, SqlDbType.Time, null, null, null, null, null); + AddSchemaTableRow("TinyInt", null, null, null, false, false, false, SqlDbType.TinyInt, null, null, null, null, null); + AddSchemaTableRow("Udt", null, null, null, false, false, false, SqlDbType.Udt, BulkDataReaderTest.testUdtSchemaName, BulkDataReaderTest.testUdtName, null, null, null); + AddSchemaTableRow("UniqueIdentifier", null, null, null, false, false, false, SqlDbType.UniqueIdentifier, null, null, null, null, null); + AddSchemaTableRow("VarBinary_20", 20, null, null, false, false, false, SqlDbType.VarBinary, null, null, null, null, null); + AddSchemaTableRow("VarBinary_Max", null, null, null, false, false, false, SqlDbType.VarBinary, null, null, null, null, null); + AddSchemaTableRow("VarChar_20", 20, null, null, false, false, false, SqlDbType.VarChar, null, null, null, null, null); + AddSchemaTableRow("VarChar_Max", null, null, null, false, false, false, SqlDbType.VarChar, null, null, null, null, null); + AddSchemaTableRow("Variant", null, null, null, false, false, false, SqlDbType.Variant, null, null, null, null, null); + AddSchemaTableRow("Xml_Database", null, null, null, false, false, false, SqlDbType.Xml, null, null, BulkDataReaderTest.testXmlSchemaCollectionDatabaseName, BulkDataReaderTest.testXMLSchemaCollectionSchemaName, BulkDataReaderTest.testXMLSchemaCollectionName); + AddSchemaTableRow("Xml_Database_XML", null, null, null, false, false, false, SqlDbType.Xml, null, null, BulkDataReaderTest.testXmlSchemaCollectionDatabaseName, BulkDataReaderTest.testXMLSchemaCollectionSchemaName, BulkDataReaderTest.testXMLSchemaCollectionName); + AddSchemaTableRow("Xml_Schema", null, null, null, false, false, false, SqlDbType.Xml, null, null, null, BulkDataReaderTest.testXMLSchemaCollectionSchemaName, BulkDataReaderTest.testXMLSchemaCollectionName); + AddSchemaTableRow("Xml_Xml", null, null, null, false, false, false, SqlDbType.Xml, null, null, null, null, BulkDataReaderTest.testXMLSchemaCollectionName); + AddSchemaTableRow("Xml", null, null, null, false, false, false, SqlDbType.Xml, null, null, null, null, null); + } + + /// + /// The result set returned by the . + /// + public static readonly ReadOnlyCollection ExpectedResultSet = new ReadOnlyCollection(new List + { + (long)10, + new byte[20], + true, + null, + 'c', + new char[] { 'c' }, + "c", + "char 20", + DateTime.UtcNow, + DateTime.UtcNow, + DateTime.UtcNow, + DateTime.UtcNow, + DateTimeOffset.UtcNow, + DateTimeOffset.UtcNow, + (decimal)10.5, + (double)10.5, + new byte[20], + (int)10, + (decimal)10.5, + "nchar 20", + "ntext", + "nvarchar 20", + "nvarchar max", + (float)10.5, + DateTime.UtcNow, + (short)10, + (decimal)10.5, + "text", + DateTime.UtcNow.TimeOfDay, + DateTime.UtcNow.TimeOfDay, + (byte)10, + new object(), + Guid.NewGuid(), + new byte[20], + new byte[20], + "varchar 20", + "varchar max", + (int)10, + @"", + @"", + @"", + @"", + @"" + }); + + /// + /// See + /// + /// + /// The zero-based column ordinal. + /// + /// + /// The value of the column in . + /// + /// + public override object GetValue(int i) + { + return BulkDataReaderSubclass.ExpectedResultSet[i]; + } + + /// + /// The number of rows read. + /// + private int readCount = 0; + + /// + /// See + /// + /// + /// True if there are more rows; otherwise, false. + /// + /// + public override bool Read() + { + return readCount++ < 1; + } + + #endregion + + } + + private class BulkDataReaderSchemaTest : BulkDataReader + { + + #region Properties + + /// + /// Is the column nullable (i.e. optional)? + /// + public bool AllowDBNull { get; set; } + + /// + /// The name of the column. + /// + public string ColumnName { get; set; } + + /// + /// The size of the column which may be null if not applicable. + /// + public int? ColumnSize { get; set; } + + /// + /// Is the column part of the primary key? + /// + public bool IsKey { get; set; } + + /// + /// Are the column values unique (i.e. never duplicated)? + /// + public bool IsUnique { get; set; } + + /// + /// The precision of the column which may be null if not applicable. + /// + public short? NumericPrecision { get; set; } + + /// + /// The scale of the column which may be null if not applicable. + /// + public short? NumericScale { get; set; } + + /// + /// The corresponding . + /// + public SqlDbType ProviderType { get; set; } + + /// + /// The schema name of the UDT. + /// + public string UdtSchema { get; set; } + + /// + /// The type name of the UDT. + /// + public string UdtType { get; set; } + + /// + /// For XML columns the schema collection's database name. Otherwise, null. + /// + public string XmlSchemaCollectionDatabase { get; set; } + + /// + /// For XML columns the schema collection's name. Otherwise, null. + /// + public string XmlSchemaCollectionName { get; set; } + + /// + /// For XML columns the schema collection's schema name. Otherwise, null. + /// + public string XmlSchemaCollectionOwningSchema { get; set; } + + #endregion + + #region Constructors + + /// + /// Constructor. + /// + public BulkDataReaderSchemaTest() + { + } + + #endregion + + #region BulkDataReader + + /// + /// See . + /// + /// + /// Returns . + /// + protected override string SchemaName + { + get { return BulkDataReaderTest.testSchemaName; } + } + + /// + /// See . + /// + /// + /// Returns . + /// + protected override string TableName + { + get { return BulkDataReaderTest.testTableName; } + } + + /// + /// See + /// + /// + /// Creates a schema row for the various values. + /// + protected override void AddSchemaTableRows() + { + AddSchemaTableRow(this.ColumnName, + this.ColumnSize, + this.NumericPrecision, + this.NumericScale, + this.IsUnique, + this.IsKey, + this.AllowDBNull, + this.ProviderType, + this.UdtSchema, + this.UdtType, + this.XmlSchemaCollectionDatabase, + this.XmlSchemaCollectionOwningSchema, + this.XmlSchemaCollectionName); + } + + /// + /// See + /// + /// + /// The test stub is only for testing schema functionality and behaves as if it has no rows. + /// + /// + /// The zero-based column ordinal. + /// + /// + /// Never returns. + /// + /// + public override object GetValue(int i) + { + throw new InvalidOperationException("No data."); + } + + + /// + /// See + /// + /// + /// False. + /// + /// + public override bool Read() + { + return false; + } + + #endregion + + } + + #endregion + } +} diff --git a/src/Umbraco.Tests/Persistence/PetaPocoCachesTest.cs b/src/Umbraco.Tests/Persistence/PetaPocoCachesTest.cs new file mode 100644 index 0000000000..de32e0704b --- /dev/null +++ b/src/Umbraco.Tests/Persistence/PetaPocoCachesTest.cs @@ -0,0 +1,198 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Threading; +using NUnit.Framework; +using Umbraco.Core.Models; +using Umbraco.Core.Persistence; +using Umbraco.Core.Services; +using Umbraco.Tests.Services; +using Umbraco.Tests.TestHelpers; +using Umbraco.Tests.TestHelpers.Entities; + +namespace Umbraco.Tests.Persistence +{ + [DatabaseTestBehavior(DatabaseBehavior.NewDbFileAndSchemaPerTest)] + [TestFixture, NUnit.Framework.Ignore] + public class PetaPocoCachesTest : BaseServiceTest + { + +#if DEBUG + /// + /// This tests the peta poco caches + /// + /// + /// This test WILL fail. This is because we cannot stop PetaPoco from creating more cached items for queries such as + /// ContentTypeRepository.GetAll(1,2,3,4); + /// when combined with other GetAll queries that pass in an array of Ids, each query generated for different length + /// arrays will produce a unique query which then gets added to the cache. + /// + /// This test confirms this, if you analyze the DIFFERENCE output below you can see why the cached queries grow. + /// + [Test] + public void Check_Peta_Poco_Caches() + { + var result = new List>>(); + + Database.PocoData.UseLongKeys = true; + + for (int i = 0; i < 2; i++) + { + int id1, id2, id3; + string alias; + CreateStuff(out id1, out id2, out id3, out alias); + QueryStuff(id1, id2, id3, alias); + + double totalBytes1; + IEnumerable keys; + Debug.Print(Database.PocoData.PrintDebugCacheReport(out totalBytes1, out keys)); + + result.Add(new Tuple>(totalBytes1, keys.Count(), keys)); + } + + for (int index = 0; index < result.Count; index++) + { + var tuple = result[index]; + Debug.Print("Bytes: {0}, Delegates: {1}", tuple.Item1, tuple.Item2); + if (index != 0) + { + Debug.Print("----------------DIFFERENCE---------------------"); + var diff = tuple.Item3.Except(result[index - 1].Item3); + foreach (var d in diff) + { + Debug.Print(d); + } + } + + } + + var allByteResults = result.Select(x => x.Item1).Distinct(); + var totalKeys = result.Select(x => x.Item2).Distinct(); + + Assert.AreEqual(1, allByteResults.Count()); + Assert.AreEqual(1, totalKeys.Count()); + } + + [Test] + public void Verify_Memory_Expires() + { + Database.PocoData.SlidingExpirationSeconds = 2; + + var managedCache = new Database.ManagedCache(); + + int id1, id2, id3; + string alias; + CreateStuff(out id1, out id2, out id3, out alias); + QueryStuff(id1, id2, id3, alias); + + var count1 = managedCache.GetCache().GetCount(); + Debug.Print("Keys = " + count1); + Assert.Greater(count1, 0); + + Thread.Sleep(10000); + + var count2 = managedCache.GetCache().GetCount(); + Debug.Print("Keys = " + count2); + Assert.Less(count2, count1); + } + + private void QueryStuff(int id1, int id2, int id3, string alias1) + { + var contentService = ServiceContext.ContentService; + + ServiceContext.TagService.GetTagsForEntity(id1); + + ServiceContext.TagService.GetAllContentTags(); + + ServiceContext.TagService.GetTagsForEntity(id2); + + ServiceContext.TagService.GetTagsForEntity(id3); + + contentService.CountDescendants(id3); + + contentService.CountChildren(id3); + + contentService.Count(contentTypeAlias: alias1); + + contentService.Count(); + + contentService.GetById(Guid.NewGuid()); + + contentService.GetByLevel(2); + + contentService.GetChildren(id1); + + contentService.GetDescendants(id2); + + contentService.GetVersions(id3); + + contentService.GetRootContent(); + + contentService.GetContentForExpiration(); + + contentService.GetContentForRelease(); + + contentService.GetContentInRecycleBin(); + + ((ContentService)contentService).GetPublishedDescendants(new Content("Test", -1, new ContentType(-1)) + { + Id = id1, + Path = "-1," + id1 + }); + + contentService.GetByVersion(Guid.NewGuid()); + } + + private void CreateStuff(out int id1, out int id2, out int id3, out string alias) + { + var contentService = ServiceContext.ContentService; + + var ctAlias = "umbTextpage" + Guid.NewGuid().ToString("N"); + alias = ctAlias; + + for (int i = 0; i < 20; i++) + { + contentService.CreateContentWithIdentity("Test", -1, "umbTextpage", 0); + } + var contentTypeService = ServiceContext.ContentTypeService; + var contentType = MockedContentTypes.CreateSimpleContentType(ctAlias, "test Doc Type"); + contentTypeService.Save(contentType); + for (int i = 0; i < 20; i++) + { + contentService.CreateContentWithIdentity("Test", -1, ctAlias, 0); + } + var parent = contentService.CreateContentWithIdentity("Test", -1, ctAlias, 0); + id1 = parent.Id; + + for (int i = 0; i < 20; i++) + { + contentService.CreateContentWithIdentity("Test", parent, ctAlias); + } + IContent current = parent; + for (int i = 0; i < 20; i++) + { + current = contentService.CreateContentWithIdentity("Test", current, ctAlias); + } + contentType = MockedContentTypes.CreateSimpleContentType("umbMandatory" + Guid.NewGuid().ToString("N"), "Mandatory Doc Type", true); + contentType.PropertyGroups.First().PropertyTypes.Add( + new PropertyType("test", DataTypeDatabaseType.Ntext, "tags") + { + DataTypeDefinitionId = 1041 + }); + contentTypeService.Save(contentType); + var content1 = MockedContent.CreateSimpleContent(contentType, "Tagged content 1", -1); + content1.SetTags("tags", new[] { "hello", "world", "some", "tags" }, true); + contentService.Publish(content1); + id2 = content1.Id; + + var content2 = MockedContent.CreateSimpleContent(contentType, "Tagged content 2", -1); + content2.SetTags("tags", new[] { "hello", "world", "some", "tags" }, true); + contentService.Publish(content2); + id3 = content2.Id; + + contentService.MoveToRecycleBin(content1); + } +#endif + } +} \ No newline at end of file diff --git a/src/Umbraco.Tests/Persistence/PetaPocoExtensionsTest.cs b/src/Umbraco.Tests/Persistence/PetaPocoExtensionsTest.cs index 0a15994785..d06d422d83 100644 --- a/src/Umbraco.Tests/Persistence/PetaPocoExtensionsTest.cs +++ b/src/Umbraco.Tests/Persistence/PetaPocoExtensionsTest.cs @@ -1,201 +1,16 @@ using System; using System.Collections.Generic; -using System.Diagnostics; -using System.Linq; using System.Text.RegularExpressions; -using System.Threading; using NUnit.Framework; using Umbraco.Core; -using Umbraco.Core.Models; +using Umbraco.Core.Logging; using Umbraco.Core.Models.Rdbms; using Umbraco.Core.Persistence; -using Umbraco.Core.Services; -using Umbraco.Tests.Services; +using Umbraco.Core.Persistence.SqlSyntax; using Umbraco.Tests.TestHelpers; -using Umbraco.Tests.TestHelpers.Entities; namespace Umbraco.Tests.Persistence { - [DatabaseTestBehavior(DatabaseBehavior.NewDbFileAndSchemaPerTest)] - [TestFixture, NUnit.Framework.Ignore] - public class PetaPocoCachesTest : BaseServiceTest - { - /// - /// This tests the peta poco caches - /// - /// - /// This test WILL fail. This is because we cannot stop PetaPoco from creating more cached items for queries such as - /// ContentTypeRepository.GetAll(1,2,3,4); - /// when combined with other GetAll queries that pass in an array of Ids, each query generated for different length - /// arrays will produce a unique query which then gets added to the cache. - /// - /// This test confirms this, if you analyze the DIFFERENCE output below you can see why the cached queries grow. - /// - [Test] - public void Check_Peta_Poco_Caches() - { - var result = new List>>(); - - Database.PocoData.UseLongKeys = true; - - for (int i = 0; i < 2; i++) - { - int id1, id2, id3; - string alias; - CreateStuff(out id1, out id2, out id3, out alias); - QueryStuff(id1, id2, id3, alias); - - double totalBytes1; - IEnumerable keys; - Debug.Print(Database.PocoData.PrintDebugCacheReport(out totalBytes1, out keys)); - - result.Add(new Tuple>(totalBytes1, keys.Count(), keys)); - } - - for (int index = 0; index < result.Count; index++) - { - var tuple = result[index]; - Debug.Print("Bytes: {0}, Delegates: {1}", tuple.Item1, tuple.Item2); - if (index != 0) - { - Debug.Print("----------------DIFFERENCE---------------------"); - var diff = tuple.Item3.Except(result[index - 1].Item3); - foreach (var d in diff) - { - Debug.Print(d); - } - } - - } - - var allByteResults = result.Select(x => x.Item1).Distinct(); - var totalKeys = result.Select(x => x.Item2).Distinct(); - - Assert.AreEqual(1, allByteResults.Count()); - Assert.AreEqual(1, totalKeys.Count()); - } - - [Test] - public void Verify_Memory_Expires() - { - Database.PocoData.SlidingExpirationSeconds = 2; - - var managedCache = new Database.ManagedCache(); - - int id1, id2, id3; - string alias; - CreateStuff(out id1, out id2, out id3, out alias); - QueryStuff(id1, id2, id3, alias); - - var count1 = managedCache.GetCache().GetCount(); - Debug.Print("Keys = " + count1); - Assert.Greater(count1, 0); - - Thread.Sleep(10000); - - var count2 = managedCache.GetCache().GetCount(); - Debug.Print("Keys = " + count2); - Assert.Less(count2, count1); - } - - private void QueryStuff(int id1, int id2, int id3, string alias1) - { - var contentService = ServiceContext.ContentService; - - ServiceContext.TagService.GetTagsForEntity(id1); - - ServiceContext.TagService.GetAllContentTags(); - - ServiceContext.TagService.GetTagsForEntity(id2); - - ServiceContext.TagService.GetTagsForEntity(id3); - - contentService.CountDescendants(id3); - - contentService.CountChildren(id3); - - contentService.Count(contentTypeAlias: alias1); - - contentService.Count(); - - contentService.GetById(Guid.NewGuid()); - - contentService.GetByLevel(2); - - contentService.GetChildren(id1); - - contentService.GetDescendants(id2); - - contentService.GetVersions(id3); - - contentService.GetRootContent(); - - contentService.GetContentForExpiration(); - - contentService.GetContentForRelease(); - - contentService.GetContentInRecycleBin(); - - ((ContentService)contentService).GetPublishedDescendants(new Content("Test", -1, new ContentType(-1)) - { - Id = id1, - Path = "-1," + id1 - }); - - contentService.GetByVersion(Guid.NewGuid()); - } - - private void CreateStuff(out int id1, out int id2, out int id3, out string alias) - { - var contentService = ServiceContext.ContentService; - - var ctAlias = "umbTextpage" + Guid.NewGuid().ToString("N"); - alias = ctAlias; - - for (int i = 0; i < 20; i++) - { - contentService.CreateContentWithIdentity("Test", -1, "umbTextpage", 0); - } - var contentTypeService = ServiceContext.ContentTypeService; - var contentType = MockedContentTypes.CreateSimpleContentType(ctAlias, "test Doc Type"); - contentTypeService.Save(contentType); - for (int i = 0; i < 20; i++) - { - contentService.CreateContentWithIdentity("Test", -1, ctAlias, 0); - } - var parent = contentService.CreateContentWithIdentity("Test", -1, ctAlias, 0); - id1 = parent.Id; - - for (int i = 0; i < 20; i++) - { - contentService.CreateContentWithIdentity("Test", parent, ctAlias); - } - IContent current = parent; - for (int i = 0; i < 20; i++) - { - current = contentService.CreateContentWithIdentity("Test", current, ctAlias); - } - contentType = MockedContentTypes.CreateSimpleContentType("umbMandatory" + Guid.NewGuid().ToString("N"), "Mandatory Doc Type", true); - contentType.PropertyGroups.First().PropertyTypes.Add( - new PropertyType("test", DataTypeDatabaseType.Ntext, "tags") - { - DataTypeDefinitionId = 1041 - }); - contentTypeService.Save(contentType); - var content1 = MockedContent.CreateSimpleContent(contentType, "Tagged content 1", -1); - content1.SetTags("tags", new[] { "hello", "world", "some", "tags" }, true); - contentService.Publish(content1); - id2 = content1.Id; - - var content2 = MockedContent.CreateSimpleContent(contentType, "Tagged content 2", -1); - content2.SetTags("tags", new[] { "hello", "world", "some", "tags" }, true); - contentService.Publish(content2); - id3 = content2.Id; - - contentService.MoveToRecycleBin(content1); - } - } - [DatabaseTestBehavior(DatabaseBehavior.NewDbFileAndSchemaPerTest)] [TestFixture] public class PetaPocoExtensionsTest : BaseDatabaseFactoryTest @@ -213,7 +28,7 @@ namespace Umbraco.Tests.Persistence } [Test] - public void Can_Bulk_Insert() + public void Can_Bulk_Insert_One_By_One() { // Arrange var db = DatabaseContext.Database; @@ -234,13 +49,168 @@ namespace Umbraco.Tests.Persistence // Act using (ProfilingLogger.TraceDuration("starting insert", "finished insert")) { - db.BulkInsertRecords(servers); + using (var tr = db.GetTransaction()) + { + db.BulkInsertRecords(servers, tr, SqlSyntax, useNativeSqlPlatformBulkInsert:false); + tr.Complete(); + } } // Assert Assert.That(db.ExecuteScalar("SELECT COUNT(*) FROM umbracoServer"), Is.EqualTo(1000)); } + [Test] + public void Can_Bulk_Insert_One_By_One_Transaction_Rollback() + { + // Arrange + var db = DatabaseContext.Database; + + var servers = new List(); + for (var i = 0; i < 1000; i++) + { + servers.Add(new ServerRegistrationDto + { + ServerAddress = "address" + i, + ServerIdentity = "computer" + i, + DateRegistered = DateTime.Now, + IsActive = true, + DateAccessed = DateTime.Now + }); + } + + // Act + using (ProfilingLogger.TraceDuration("starting insert", "finished insert")) + { + using (var tr = db.GetTransaction()) + { + db.BulkInsertRecords(servers, tr, SqlSyntax, useNativeSqlPlatformBulkInsert: false); + //don't call complete here - the trans will be rolled back + } + } + + // Assert + Assert.That(db.ExecuteScalar("SELECT COUNT(*) FROM umbracoServer"), Is.EqualTo(0)); + } + + + [NUnit.Framework.Ignore("Ignored because you need to configure your own SQL Server to test thsi with")] + [Test] + public void Can_Bulk_Insert_Native_Sql_Server_Bulk_Inserts() + { + //create the db + var dbSqlServer = new UmbracoDatabase( + "server=.\\SQLExpress;database=YOURDB;user id=YOURUSER;password=YOURPASSWORD", + Constants.DatabaseProviders.SqlServer, + new DebugDiagnosticsLogger()); + + //drop the table + dbSqlServer.Execute("DROP TABLE [umbracoServer]"); + + //re-create it + dbSqlServer.Execute(@"CREATE TABLE [umbracoServer]( + [id] [int] IDENTITY(1,1) NOT NULL, + [address] [nvarchar](500) NOT NULL, + [computerName] [nvarchar](255) NOT NULL, + [registeredDate] [datetime] NOT NULL CONSTRAINT [DF_umbracoServer_registeredDate] DEFAULT (getdate()), + [lastNotifiedDate] [datetime] NOT NULL, + [isActive] [bit] NOT NULL, + [isMaster] [bit] NOT NULL, + CONSTRAINT [PK_umbracoServer] PRIMARY KEY CLUSTERED +( + [id] ASC +)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY] +)"); + var data = new List(); + for (var i = 0; i < 1000; i++) + { + data.Add(new ServerRegistrationDto + { + ServerAddress = "address" + i, + ServerIdentity = "computer" + i, + DateRegistered = DateTime.Now, + IsActive = true, + DateAccessed = DateTime.Now + }); + } + + var sqlServerSyntax = new SqlServerSyntaxProvider(); + using (var tr = dbSqlServer.GetTransaction()) + { + dbSqlServer.BulkInsertRecords(data, tr, sqlServerSyntax, useNativeSqlPlatformBulkInsert: true); + tr.Complete(); + } + + // Assert + Assert.That(dbSqlServer.ExecuteScalar("SELECT COUNT(*) FROM umbracoServer"), Is.EqualTo(1000)); + } + + [Test] + public void Can_Bulk_Insert_Native_Sql_Bulk_Inserts() + { + // Arrange + var db = DatabaseContext.Database; + + var servers = new List(); + for (var i = 0; i < 1000; i++) + { + servers.Add(new ServerRegistrationDto + { + ServerAddress = "address" + i, + ServerIdentity = "computer" + i, + DateRegistered = DateTime.Now, + IsActive = true, + DateAccessed = DateTime.Now + }); + } + + // Act + using (ProfilingLogger.TraceDuration("starting insert", "finished insert")) + { + using (var tr = db.GetTransaction()) + { + db.BulkInsertRecords(servers, tr, SqlSyntax, useNativeSqlPlatformBulkInsert: true); + tr.Complete(); + } + } + + // Assert + Assert.That(db.ExecuteScalar("SELECT COUNT(*) FROM umbracoServer"), Is.EqualTo(1000)); + } + + [Test] + public void Can_Bulk_Insert_Native_Sql_Bulk_Inserts_Transaction_Rollback() + { + // Arrange + var db = DatabaseContext.Database; + + var servers = new List(); + for (var i = 0; i < 1000; i++) + { + servers.Add(new ServerRegistrationDto + { + ServerAddress = "address" + i, + ServerIdentity = "computer" + i, + DateRegistered = DateTime.Now, + IsActive = true, + DateAccessed = DateTime.Now + }); + } + + // Act + using (ProfilingLogger.TraceDuration("starting insert", "finished insert")) + { + using (var tr = db.GetTransaction()) + { + db.BulkInsertRecords(servers, tr, SqlSyntax, useNativeSqlPlatformBulkInsert: true); + //don't call complete here - the trans will be rolled back + } + } + + // Assert + Assert.That(db.ExecuteScalar("SELECT COUNT(*) FROM umbracoServer"), Is.EqualTo(0)); + } + [Test] public void Generate_Bulk_Import_Sql() { @@ -263,7 +233,9 @@ namespace Umbraco.Tests.Persistence // Act string[] sql; - db.GenerateBulkInsertCommand(servers, db.Connection, out sql); + db.GenerateBulkInsertCommand( + Database.PocoData.ForType(typeof(ServerRegistrationDto)), + servers, out sql); db.CloseSharedConnection(); // Assert @@ -295,7 +267,7 @@ namespace Umbraco.Tests.Persistence // Act string[] sql; - db.GenerateBulkInsertCommand(servers, db.Connection, out sql); + db.GenerateBulkInsertCommand(Database.PocoData.ForType(typeof(ServerRegistrationDto)), servers, out sql); db.CloseSharedConnection(); // Assert diff --git a/src/Umbraco.Tests/Persistence/SyntaxProvider/MySqlSyntaxProviderTests.cs b/src/Umbraco.Tests/Persistence/SyntaxProvider/MySqlSyntaxProviderTests.cs index 8126aa5e36..e58506aa03 100644 --- a/src/Umbraco.Tests/Persistence/SyntaxProvider/MySqlSyntaxProviderTests.cs +++ b/src/Umbraco.Tests/Persistence/SyntaxProvider/MySqlSyntaxProviderTests.cs @@ -28,7 +28,7 @@ namespace Umbraco.Tests.Persistence.SyntaxProvider public void Can_Generate_Create_Table_Statement() { var type = typeof(TagRelationshipDto); - var definition = DefinitionFactory.GetTableDefinition(type); + var definition = DefinitionFactory.GetTableDefinition(SqlSyntaxContext.SqlSyntaxProvider, type); string create = SqlSyntaxContext.SqlSyntaxProvider.Format(definition); string primaryKey = SqlSyntaxContext.SqlSyntaxProvider.FormatPrimaryKey(definition); diff --git a/src/Umbraco.Tests/Persistence/SyntaxProvider/SqlCeSyntaxProviderTests.cs b/src/Umbraco.Tests/Persistence/SyntaxProvider/SqlCeSyntaxProviderTests.cs index e960f50799..fafddb8dfd 100644 --- a/src/Umbraco.Tests/Persistence/SyntaxProvider/SqlCeSyntaxProviderTests.cs +++ b/src/Umbraco.Tests/Persistence/SyntaxProvider/SqlCeSyntaxProviderTests.cs @@ -51,7 +51,7 @@ WHERE (([umbracoNode].[nodeObjectType] = @0))) x)".Replace(Environment.NewLine, var sqlSyntax = new SqlCeSyntaxProvider(); var type = typeof (NodeDto); - var definition = DefinitionFactory.GetTableDefinition(type); + var definition = DefinitionFactory.GetTableDefinition(sqlSyntax, type); string create = sqlSyntax.Format(definition); string primaryKey = sqlSyntax.FormatPrimaryKey(definition); diff --git a/src/Umbraco.Tests/Umbraco.Tests.csproj b/src/Umbraco.Tests/Umbraco.Tests.csproj index 8659a73cd1..a606d401a0 100644 --- a/src/Umbraco.Tests/Umbraco.Tests.csproj +++ b/src/Umbraco.Tests/Umbraco.Tests.csproj @@ -176,7 +176,9 @@ + +