Merge branch 'v8/feature/nucache-perf' into v8/feature/nucache-perf-sql-fixes

This commit is contained in:
Chad
2021-02-12 18:08:53 +13:00
committed by GitHub
1848 changed files with 60624 additions and 14076 deletions

View File

@@ -7,6 +7,16 @@ using Umbraco.Core.Xml;
namespace Umbraco.Web.PublishedCache
{
public interface IPublishedCache2 : IPublishedCache
{
/// <summary>
/// Gets a content type identified by its alias.
/// </summary>
/// <param name="key">The content type key.</param>
/// <returns>The content type, or null.</returns>
IPublishedContentType GetContentType(Guid key);
}
/// <summary>
/// Provides access to cached contents.
/// </summary>

View File

@@ -4,6 +4,11 @@ using Umbraco.Core.Models.PublishedContent;
namespace Umbraco.Web.PublishedCache
{
public interface IPublishedContentCache2 : IPublishedContentCache, IPublishedCache2
{
// NOTE: this is here purely to avoid API breaking changes
}
public interface IPublishedContentCache : IPublishedCache
{
/// <summary>
@@ -38,16 +43,26 @@ namespace Umbraco.Web.PublishedCache
/// </summary>
/// <param name="preview">A value indicating whether to consider unpublished content.</param>
/// <param name="contentId">The content unique identifier.</param>
/// <returns>The route.</returns>
/// <remarks>The value of <paramref name="preview"/> overrides defaults.</remarks>
/// <returns>A special string formatted route path.</returns>
/// <remarks>
/// <para>
/// The resulting string is a special encoded route string that may contain the domain ID
/// for the current route. If a domain is present the string will be prefixed with the domain ID integer, example: {domainId}/route-path-of-item
/// </para>
/// <para>The value of <paramref name="preview"/> overrides defaults.</para>
/// </remarks>
string GetRouteById(bool preview, int contentId, string culture = null);
/// <summary>
/// Gets the route for a content identified by its unique identifier.
/// </summary>
/// <param name="contentId">The content unique identifier.</param>
/// <returns>The route.</returns>
/// <returns>A special string formatted route path.</returns>
/// <remarks>Considers published or unpublished content depending on defaults.</remarks>
/// <para>
/// The resulting string is a special encoded route string that may contain the domain ID
/// for the current route. If a domain is present the string will be prefixed with the domain ID integer, example: {domainId}/route-path-of-item
/// </para>
string GetRouteById(int contentId, string culture = null);
}
}

View File

@@ -1,5 +1,10 @@
namespace Umbraco.Web.PublishedCache
{
public interface IPublishedMediaCache2 : IPublishedMediaCache, IPublishedCache2
{
// NOTE: this is here purely to avoid API breaking changes
}
public interface IPublishedMediaCache : IPublishedCache
{ }
}

View File

@@ -13,7 +13,7 @@ using Umbraco.Web.PublishedCache.NuCache.Navigable;
namespace Umbraco.Web.PublishedCache.NuCache
{
internal class ContentCache : PublishedCacheBase, IPublishedContentCache, INavigableData, IDisposable
internal class ContentCache : PublishedCacheBase, IPublishedContentCache2, INavigableData, IDisposable
{
private readonly ContentStore.Snapshot _snapshot;
private readonly IAppCache _snapshotCache;
@@ -79,7 +79,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
var pos = route.IndexOf('/');
var path = pos == 0 ? route : route.Substring(pos);
var startNodeId = pos == 0 ? 0 : int.Parse(route.Substring(0, pos));
var parts = path.Split(new[] { '/' }, StringSplitOptions.RemoveEmptyEntries);
var parts = path.Split(Constants.CharArrays.ForwardSlash, StringSplitOptions.RemoveEmptyEntries);
IPublishedContent content;
@@ -109,8 +109,8 @@ namespace Umbraco.Web.PublishedCache.NuCache
content = FollowRoute(content, parts, 1, culture);
}
// if hideTopLevelNodePath is true then for url /foo we looked for /*/foo
// but maybe that was the url of a non-default top-level node, so we also
// if hideTopLevelNodePath is true then for URL /foo we looked for /*/foo
// but maybe that was the URL of a non-default top-level node, so we also
// have to look for /foo (see note in ApplyHideTopLevelNodeFromPath).
if (content == null && hideTopLevelNode.Value && parts.Length == 1)
{
@@ -141,7 +141,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
hideTopLevelNode = hideTopLevelNode ?? HideTopLevelNodeFromPath; // default = settings
// walk up from that node until we hit a node with a domain,
// or we reach the content root, collecting urls in the way
// or we reach the content root, collecting URLs in the way
var pathParts = new List<string>();
var n = node;
var urlSegment = n.UrlSegment(culture);
@@ -384,15 +384,11 @@ namespace Umbraco.Web.PublishedCache.NuCache
#region Content types
public override IPublishedContentType GetContentType(int id)
{
return _snapshot.GetContentType(id);
}
public override IPublishedContentType GetContentType(int id) => _snapshot.GetContentType(id);
public override IPublishedContentType GetContentType(string alias)
{
return _snapshot.GetContentType(alias);
}
public override IPublishedContentType GetContentType(string alias) => _snapshot.GetContentType(alias);
public override IPublishedContentType GetContentType(Guid key) => _snapshot.GetContentType(key);
#endregion

View File

@@ -15,6 +15,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
public bool IsNull => ContentTypeId < 0;
public static ContentNodeKit Empty { get; } = new ContentNodeKit();
public static ContentNodeKit Null { get; } = new ContentNodeKit { ContentTypeId = -1 };
public void Build(

View File

@@ -37,9 +37,14 @@ namespace Umbraco.Web.PublishedCache.NuCache
private readonly IVariationContextAccessor _variationContextAccessor;
private readonly ConcurrentDictionary<int, LinkedNode<ContentNode>> _contentNodes;
private LinkedNode<ContentNode> _root;
private readonly ConcurrentDictionary<int, LinkedNode<IPublishedContentType>> _contentTypesById;
// We must keep separate dictionaries for by id and by alias because we track these in snapshot/layers
// and it is possible that the alias of a content type can be different for the same id in another layer
// whereas the GUID -> INT cross reference can never be different
private readonly ConcurrentDictionary<int, LinkedNode<IPublishedContentType>> _contentTypesById;
private readonly ConcurrentDictionary<string, LinkedNode<IPublishedContentType>> _contentTypesByAlias;
private readonly ConcurrentDictionary<Guid, int> _xmap;
private readonly ConcurrentDictionary<Guid, int> _contentTypeKeyToIdMap;
private readonly ConcurrentDictionary<Guid, int> _contentKeyToIdMap;
private readonly ILogger _logger;
private BPlusTree<int, ContentNodeKit> _localDb;
@@ -73,7 +78,8 @@ namespace Umbraco.Web.PublishedCache.NuCache
_root = new LinkedNode<ContentNode>(new ContentNode(), 0);
_contentTypesById = new ConcurrentDictionary<int, LinkedNode<IPublishedContentType>>();
_contentTypesByAlias = new ConcurrentDictionary<string, LinkedNode<IPublishedContentType>>(StringComparer.InvariantCultureIgnoreCase);
_xmap = new ConcurrentDictionary<Guid, int>();
_contentTypeKeyToIdMap = new ConcurrentDictionary<Guid, int>();
_contentKeyToIdMap = new ConcurrentDictionary<Guid, int>();
_genObjs = new ConcurrentQueue<GenObj>();
_genObj = null; // no initial gen exists
@@ -136,7 +142,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
Monitor.Enter(_wlocko, ref lockInfo.Taken);
lock(_rlocko)
lock (_rlocko)
{
// see SnapDictionary
try { }
@@ -152,7 +158,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
_nextGen = true;
}
}
}
}
}
private void Release(WriteLockInfo lockInfo, bool commit = true)
@@ -291,8 +297,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
foreach (var type in types)
{
SetValueLocked(_contentTypesById, type.Id, type);
SetValueLocked(_contentTypesByAlias, type.Alias, type);
SetContentTypeLocked(type);
}
}
@@ -318,8 +323,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
foreach (var type in index.Values)
{
SetValueLocked(_contentTypesById, type.Id, type);
SetValueLocked(_contentTypesByAlias, type.Alias, type);
SetContentTypeLocked(type);
}
foreach (var link in _contentNodes.Values)
@@ -354,8 +358,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
// set all new content types
foreach (var type in types)
{
SetValueLocked(_contentTypesById, type.Id, type);
SetValueLocked(_contentTypesByAlias, type.Alias, type);
SetContentTypeLocked(type);
}
// beware! at that point the cache is inconsistent,
@@ -419,8 +422,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
// perform update of refreshed content types
foreach (var type in refreshedTypesA)
{
SetValueLocked(_contentTypesById, type.Id, type);
SetValueLocked(_contentTypesByAlias, type.Alias, type);
SetContentTypeLocked(type);
}
// perform update of content with refreshed content type - from the kits
@@ -638,7 +640,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
kit.Node.PreviousSiblingContentId = existing.PreviousSiblingContentId;
}
_xmap[kit.Node.Uid] = kit.Node.Id;
_contentKeyToIdMap[kit.Node.Uid] = kit.Node.Id;
return true;
}
@@ -734,7 +736,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
// this node becomes the previous node
previousNode = thisNode;
_xmap[kit.Node.Uid] = kit.Node.Id;
_contentKeyToIdMap[kit.Node.Uid] = kit.Node.Id;
}
return ok;
@@ -757,7 +759,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
EnsureLocked();
var ok = true;
ClearLocked(_contentNodes);
ClearRootLocked();
@@ -778,7 +780,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
if (_localDb != null) RegisterChange(kit.Node.Id, kit);
AddTreeNodeLocked(kit.Node, parent);
_xmap[kit.Node.Uid] = kit.Node.Id;
_contentKeyToIdMap[kit.Node.Uid] = kit.Node.Id;
}
return ok;
@@ -807,7 +809,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
EnsureLocked();
var ok = true;
// get existing
_contentNodes.TryGetValue(rootContentId, out var link);
var existing = link?.Value;
@@ -833,7 +835,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
if (_localDb != null) RegisterChange(kit.Node.Id, kit);
AddTreeNodeLocked(kit.Node, parent);
_xmap[kit.Node.Uid] = kit.Node.Id;
_contentKeyToIdMap[kit.Node.Uid] = kit.Node.Id;
}
return ok;
@@ -885,11 +887,11 @@ namespace Umbraco.Web.PublishedCache.NuCache
// This should never be null, all code that calls this method is null checking but we've seen
// issues of null ref exceptions in issue reports so we'll double check here
if (content == null) throw new ArgumentNullException(nameof(content));
SetValueLocked(_contentNodes, content.Id, null);
if (_localDb != null) RegisterChange(content.Id, ContentNodeKit.Null);
_xmap.TryRemove(content.Uid, out _);
_contentKeyToIdMap.TryRemove(content.Uid, out _);
var id = content.FirstChildContentId;
while (id > 0)
@@ -913,10 +915,10 @@ namespace Umbraco.Web.PublishedCache.NuCache
{
if (_contentNodes.TryGetValue(id, out var link))
{
link = GetLinkedNodeGen(link, gen);
link = GetLinkedNodeGen(link, gen);
if (link != null && link.Value != null)
return link;
}
}
throw new PanicException($"failed to get {description} with id={id}");
}
@@ -929,13 +931,13 @@ namespace Umbraco.Web.PublishedCache.NuCache
{
if (content.ParentContentId < 0)
{
var root = GetLinkedNodeGen(_root, gen);
var root = GetLinkedNodeGen(_root, gen);
return root;
}
if (_contentNodes.TryGetValue(content.ParentContentId, out var link))
link = GetLinkedNodeGen(link, gen);
return link;
}
@@ -1154,6 +1156,15 @@ namespace Umbraco.Web.PublishedCache.NuCache
}
}
private void SetContentTypeLocked(IPublishedContentType type)
{
SetValueLocked(_contentTypesById, type.Id, type);
SetValueLocked(_contentTypesByAlias, type.Alias, type);
// ensure the key/id map is accurate
if (type.TryGetKey(out var key))
_contentTypeKeyToIdMap[key] = type.Id;
}
// set a node (just the node, not the tree)
private void SetValueLocked<TKey, TValue>(ConcurrentDictionary<TKey, LinkedNode<TValue>> dict, TKey key, TValue value)
where TValue : class
@@ -1211,14 +1222,14 @@ namespace Umbraco.Web.PublishedCache.NuCache
public ContentNode Get(Guid uid, long gen)
{
return _xmap.TryGetValue(uid, out var id)
return _contentKeyToIdMap.TryGetValue(uid, out var id)
? GetValue(_contentNodes, id, gen)
: null;
}
public IEnumerable<ContentNode> GetAtRoot(long gen)
{
var root = GetLinkedNodeGen(_root, gen);
var root = GetLinkedNodeGen(_root, gen);
if (root == null)
yield break;
@@ -1274,13 +1285,20 @@ namespace Umbraco.Web.PublishedCache.NuCache
return GetValue(_contentTypesByAlias, alias, gen);
}
public IPublishedContentType GetContentType(Guid key, long gen)
{
if (!_contentTypeKeyToIdMap.TryGetValue(key, out var id))
return null;
return GetContentType(id, gen);
}
#endregion
#region Snapshots
public Snapshot CreateSnapshot()
{
lock(_rlocko)
lock (_rlocko)
{
// if no next generation is required, and we already have one,
// use it and create a new snapshot
@@ -1362,7 +1380,11 @@ namespace Umbraco.Web.PublishedCache.NuCache
{
_collectTask = null;
}
}, TaskContinuationOptions.ExecuteSynchronously);
},
CancellationToken.None,
TaskContinuationOptions.ExecuteSynchronously,
// Must explicitly specify this, see https://blog.stephencleary.com/2013/10/continuewith-is-dangerous-too.html
TaskScheduler.Default);
// ReSharper restore InconsistentlySynchronizedField
return task;
@@ -1606,6 +1628,13 @@ namespace Umbraco.Web.PublishedCache.NuCache
return _store.GetContentType(alias, _gen);
}
public IPublishedContentType GetContentType(Guid key)
{
if (_gen < 0)
throw new ObjectDisposedException("snapshot" /*+ " (" + _thisCount + ")"*/);
return _store.GetContentType(key, _gen);
}
// this code is here just so you don't try to implement it
// the only way we can iterate over "all" without locking the entire cache forever
// is by shallow cloning the cache, which is quite expensive, so we should probably not do it,

View File

@@ -3,10 +3,22 @@ using CSharpTest.Net.Serialization;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
class ContentDataSerializer : ISerializer<ContentData>
/// <summary>
/// Serializes/Deserializes data to BTree data source for <see cref="ContentData"/>
/// </summary>
internal class ContentDataSerializer : ISerializer<ContentData>
{
private static readonly DictionaryOfPropertyDataSerializer PropertiesSerializer = new DictionaryOfPropertyDataSerializer();
private static readonly DictionaryOfCultureVariationSerializer CultureVariationsSerializer = new DictionaryOfCultureVariationSerializer();
public ContentDataSerializer(IDictionaryOfPropertyDataSerializer dictionaryOfPropertyDataSerializer = null)
{
_dictionaryOfPropertyDataSerializer = dictionaryOfPropertyDataSerializer;
if(_dictionaryOfPropertyDataSerializer == null)
{
_dictionaryOfPropertyDataSerializer = DefaultPropertiesSerializer;
}
}
private static readonly DictionaryOfPropertyDataSerializer DefaultPropertiesSerializer = new DictionaryOfPropertyDataSerializer();
private static readonly DictionaryOfCultureVariationSerializer DefaultCultureVariationsSerializer = new DictionaryOfCultureVariationSerializer();
private readonly IDictionaryOfPropertyDataSerializer _dictionaryOfPropertyDataSerializer;
public ContentData ReadFrom(Stream stream)
{
@@ -19,8 +31,8 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
VersionDate = PrimitiveSerializer.DateTime.ReadFrom(stream),
WriterId = PrimitiveSerializer.Int32.ReadFrom(stream),
TemplateId = PrimitiveSerializer.Int32.ReadFrom(stream),
Properties = PropertiesSerializer.ReadFrom(stream), // TODO: We don't want to allocate empty arrays
CultureInfos = CultureVariationsSerializer.ReadFrom(stream) // TODO: We don't want to allocate empty arrays
Properties = _dictionaryOfPropertyDataSerializer.ReadFrom(stream), // TODO: We don't want to allocate empty arrays
CultureInfos = DefaultCultureVariationsSerializer.ReadFrom(stream) // TODO: We don't want to allocate empty arrays
};
}
@@ -36,8 +48,8 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
PrimitiveSerializer.Int32.WriteTo(value.TemplateId.Value, stream);
}
PropertiesSerializer.WriteTo(value.Properties, stream);
CultureVariationsSerializer.WriteTo(value.CultureInfos, stream);
_dictionaryOfPropertyDataSerializer.WriteTo(value.Properties, stream);
DefaultCultureVariationsSerializer.WriteTo(value.CultureInfos, stream);
}
}
}

View File

@@ -5,7 +5,17 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
internal class ContentNodeKitSerializer : ISerializer<ContentNodeKit>
{
static readonly ContentDataSerializer DataSerializer = new ContentDataSerializer();
public ContentNodeKitSerializer(ContentDataSerializer contentDataSerializer = null)
{
_contentDataSerializer = contentDataSerializer;
if(_contentDataSerializer == null)
{
_contentDataSerializer = DefaultDataSerializer;
}
}
static readonly ContentDataSerializer DefaultDataSerializer = new ContentDataSerializer();
private readonly ContentDataSerializer _contentDataSerializer;
//static readonly ListOfIntSerializer ChildContentIdsSerializer = new ListOfIntSerializer();
public ContentNodeKit ReadFrom(Stream stream)
@@ -26,10 +36,10 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
};
var hasDraft = PrimitiveSerializer.Boolean.ReadFrom(stream);
if (hasDraft)
kit.DraftData = DataSerializer.ReadFrom(stream);
kit.DraftData = _contentDataSerializer.ReadFrom(stream);
var hasPublished = PrimitiveSerializer.Boolean.ReadFrom(stream);
if (hasPublished)
kit.PublishedData = DataSerializer.ReadFrom(stream);
kit.PublishedData = _contentDataSerializer.ReadFrom(stream);
return kit;
}
@@ -47,11 +57,11 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
PrimitiveSerializer.Boolean.WriteTo(value.DraftData != null, stream);
if (value.DraftData != null)
DataSerializer.WriteTo(value.DraftData, stream);
_contentDataSerializer.WriteTo(value.DraftData, stream);
PrimitiveSerializer.Boolean.WriteTo(value.PublishedData != null, stream);
if (value.PublishedData != null)
DataSerializer.WriteTo(value.PublishedData, stream);
_contentDataSerializer.WriteTo(value.PublishedData, stream);
}
}
}

View File

@@ -6,6 +6,9 @@ using Umbraco.Core;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
/// <summary>
/// Serializes/Deserializes culture variant data as a dictionary for BTree
/// </summary>
internal class DictionaryOfCultureVariationSerializer : SerializerBase, ISerializer<IReadOnlyDictionary<string, CultureVariation>>
{
public IReadOnlyDictionary<string, CultureVariation> ReadFrom(Stream stream)

View File

@@ -6,7 +6,10 @@ using Umbraco.Core;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
internal class DictionaryOfPropertyDataSerializer : SerializerBase, ISerializer<IDictionary<string, PropertyData[]>>
/// <summary>
/// Serializes/Deserializes property data as a dictionary for BTree
/// </summary>
internal class DictionaryOfPropertyDataSerializer : SerializerBase, ISerializer<IDictionary<string, PropertyData[]>>, IDictionaryOfPropertyDataSerializer
{
public IDictionary<string, PropertyData[]> ReadFrom(Stream stream)
{

View File

@@ -6,10 +6,10 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
internal class BTree
{
public static BPlusTree<int, ContentNodeKit> GetTree(string filepath, bool exists)
public static BPlusTree<int, ContentNodeKit> GetTree(string filepath, bool exists, ContentDataSerializer contentDataSerializer = null)
{
var keySerializer = new PrimitiveSerializer();
var valueSerializer = new ContentNodeKitSerializer();
var valueSerializer = new ContentNodeKitSerializer(contentDataSerializer);
var options = new BPlusTree<int, ContentNodeKit>.OptionsV2(keySerializer, valueSerializer)
{
CreateFile = exists ? CreatePolicy.IfNeeded : CreatePolicy.Always,
@@ -30,6 +30,7 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
//btree.
return tree;
}
private static int GetBlockSize()

View File

@@ -7,10 +7,10 @@ using Umbraco.Core.Serialization;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
/// <summary>
/// The content item 1:M data that is serialized to JSON
/// The content model stored in the content cache database table serialized as JSON
/// </summary>
[DataContract] // NOTE: Use DataContract annotations here to control how MessagePack serializes/deserializes the data to use INT keys
public class ContentNestedData
public class ContentCacheDataModel
{
// TODO: We don't want to allocate empty arrays
//dont serialize empty properties
@@ -29,17 +29,17 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
public string UrlSegment { get; set; }
//Legacy properties used to deserialize existing nucache db entries
[DataMember(Order = 3)]
[IgnoreDataMember]
[JsonProperty("properties")]
[JsonConverter(typeof(CaseInsensitiveDictionaryConverter<PropertyData[]>))]
private Dictionary<string, PropertyData[]> LegacyPropertyData { set { PropertyData = value; } }
[DataMember(Order = 4)]
[IgnoreDataMember]
[JsonProperty("cultureData")]
[JsonConverter(typeof(CaseInsensitiveDictionaryConverter<CultureVariation>))]
private Dictionary<string, CultureVariation> LegacyCultureData { set { CultureData = value; } }
[DataMember(Order = 5)]
[IgnoreDataMember]
[JsonProperty("urlSegment")]
private string LegacyUrlSegment { set { UrlSegment = value; } }
}

View File

@@ -0,0 +1,51 @@
using System;
using System.Collections.Generic;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
/// <summary>
/// The serialization result from <see cref="IContentCacheDataSerializer"/> for which the serialized value
/// will be either a string or a byte[]
/// </summary>
public struct ContentCacheDataSerializationResult : IEquatable<ContentCacheDataSerializationResult>
{
public ContentCacheDataSerializationResult(string stringData, byte[] byteData)
{
StringData = stringData;
ByteData = byteData;
}
public string StringData { get; }
public byte[] ByteData { get; }
public override bool Equals(object obj)
{
return obj is ContentCacheDataSerializationResult result && Equals(result);
}
public bool Equals(ContentCacheDataSerializationResult other)
{
return StringData == other.StringData &&
EqualityComparer<byte[]>.Default.Equals(ByteData, other.ByteData);
}
public override int GetHashCode()
{
var hashCode = 1910544615;
hashCode = hashCode * -1521134295 + EqualityComparer<string>.Default.GetHashCode(StringData);
hashCode = hashCode * -1521134295 + EqualityComparer<byte[]>.Default.GetHashCode(ByteData);
return hashCode;
}
public static bool operator ==(ContentCacheDataSerializationResult left, ContentCacheDataSerializationResult right)
{
return left.Equals(right);
}
public static bool operator !=(ContentCacheDataSerializationResult left, ContentCacheDataSerializationResult right)
{
return !(left == right);
}
}
}

View File

@@ -0,0 +1,13 @@
using System;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
[Flags]
public enum ContentCacheDataSerializerEntityType
{
Document = 1,
Media = 2,
Member = 4
}
}

View File

@@ -3,7 +3,9 @@ using System.Collections.Generic;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
// represents everything that is specific to edited or published version
/// <summary>
/// Represents everything that is specific to an edited or published content version
/// </summary>
internal class ContentData
{
public string Name { get; set; }

View File

@@ -1,12 +1,13 @@
using System;
using Umbraco.Core.Models;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
// read-only dto
internal class ContentSourceDto
internal class ContentSourceDto : IReadOnlyContentBase
{
public int Id { get; set; }
public Guid Uid { get; set; }
public Guid Key { get; set; }
public int ContentTypeId { get; set; }
public int Level { get; set; }
@@ -27,6 +28,7 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
public int EditWriterId { get; set; }
public int EditTemplateId { get; set; }
public string EditData { get; set; }
public byte[] EditDataRaw { get; set; }
// published data
public int PublishedVersionId { get; set; }
@@ -35,5 +37,11 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
public int PubWriterId { get; set; }
public int PubTemplateId { get; set; }
public string PubData { get; set; }
public byte[] PubDataRaw { get; set; }
// Explicit implementation
DateTime IReadOnlyContentBase.UpdateDate => EditVersionDate;
string IReadOnlyContentBase.Name => EditName;
int IReadOnlyContentBase.WriterId => EditWriterId;
}
}

View File

@@ -27,19 +27,19 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
public bool IsDraft { get; set; }
//Legacy properties used to deserialize existing nucache db entries
[DataMember(Order = 4)]
[IgnoreDataMember]
[JsonProperty("name")]
private string LegacyName { set { Name = value; } }
[DataMember(Order = 5)]
[IgnoreDataMember]
[JsonProperty("urlSegment")]
private string LegacyUrlSegment { set { UrlSegment = value; } }
[DataMember(Order = 6)]
[IgnoreDataMember]
[JsonProperty("date")]
private DateTime LegacyDate { set { Date = value; } }
[DataMember(Order = 7)]
[IgnoreDataMember]
[JsonProperty("isDraft")]
private bool LegacyIsDraft { set { IsDraft = value; } }
}

View File

@@ -2,14 +2,12 @@
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using Newtonsoft.Json;
using NPoco;
using Umbraco.Core;
using Umbraco.Core.Logging;
using Umbraco.Core.Persistence;
using Umbraco.Core.Persistence.Dtos;
using Umbraco.Core.Scoping;
using Umbraco.Core.Serialization;
using Umbraco.Web.Composing;
using static Umbraco.Core.Persistence.NPocoSqlExtensions.Statics;
@@ -21,20 +19,19 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
internal class DatabaseDataSource : IDataSource
{
private const int PageSize = 500;
private readonly IContentNestedDataSerializer _contentNestedDataSerializer;
private readonly IContentCacheDataSerializerFactory _contentCacheDataSerializerFactory;
public DatabaseDataSource(IContentNestedDataSerializer contentNestedDataSerializer)
public DatabaseDataSource(IContentCacheDataSerializerFactory contentCacheDataSerializerFactory)
{
_contentNestedDataSerializer = contentNestedDataSerializer;
_contentCacheDataSerializerFactory = contentCacheDataSerializerFactory;
}
// we want arrays, we want them all loaded, not an enumerable
private Sql<ISqlContext> ContentSourcesSelect(IScope scope, Func<Sql<ISqlContext>, Sql<ISqlContext>> joins = null)
private Sql<ISqlContext> SqlContentSourcesSelect(IScope scope, Func<ISqlContext, Sql<ISqlContext>> joins = null)
{
var sql = scope.SqlContext.Sql()
.Select<NodeDto>(x => Alias(x.NodeId, "Id"), x => Alias(x.UniqueId, "Key"),
var sqlTemplate = scope.SqlContext.Templates.Get(Constants.SqlTemplates.NuCacheDatabaseDataSource.ContentSourcesSelect, tsql =>
tsql.Select<NodeDto>(x => Alias(x.NodeId, "Id"), x => Alias(x.UniqueId, "Key"),
x => Alias(x.Level, "Level"), x => Alias(x.Path, "Path"), x => Alias(x.SortOrder, "SortOrder"), x => Alias(x.ParentId, "ParentId"),
x => Alias(x.CreateDate, "CreateDate"), x => Alias(x.UserId, "CreatorId"))
.AndSelect<ContentDto>(x => Alias(x.ContentTypeId, "ContentTypeId"))
@@ -49,10 +46,17 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
.AndSelect<ContentNuDto>("nuEdit", x => Alias(x.Data, "EditData"))
.AndSelect<ContentNuDto>("nuPub", x => Alias(x.Data, "PubData"))
.From<NodeDto>();
.AndSelect<ContentNuDto>("nuEdit", x => Alias(x.RawData, "EditDataRaw"))
.AndSelect<ContentNuDto>("nuPub", x => Alias(x.RawData, "PubDataRaw"))
.From<NodeDto>());
var sql = sqlTemplate.Sql();
// TODO: I'm unsure how we can format the below into SQL templates also because right.Current and right.Published end up being parameters
if (joins != null)
sql = joins(sql);
sql = sql.Append(joins(sql.SqlContext));
sql = sql
.InnerJoin<ContentDto>().On<NodeDto, ContentDto>((left, right) => left.NodeId == right.NodeId)
@@ -62,85 +66,118 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
.InnerJoin<DocumentVersionDto>().On<ContentVersionDto, DocumentVersionDto>((left, right) => left.Id == right.Id)
.LeftJoin<ContentVersionDto>(j =>
j.InnerJoin<DocumentVersionDto>("pdver").On<ContentVersionDto, DocumentVersionDto>((left, right) => left.Id == right.Id && right.Published, "pcver", "pdver"), "pcver")
j.InnerJoin<DocumentVersionDto>("pdver").On<ContentVersionDto, DocumentVersionDto>((left, right) => left.Id == right.Id && right.Published == true, "pcver", "pdver"), "pcver")
.On<NodeDto, ContentVersionDto>((left, right) => left.NodeId == right.NodeId, aliasRight: "pcver")
.LeftJoin<ContentNuDto>("nuEdit").On<NodeDto, ContentNuDto>((left, right) => left.NodeId == right.NodeId && !right.Published, aliasRight: "nuEdit")
.LeftJoin<ContentNuDto>("nuPub").On<NodeDto, ContentNuDto>((left, right) => left.NodeId == right.NodeId && right.Published, aliasRight: "nuPub");
.LeftJoin<ContentNuDto>("nuEdit").On<NodeDto, ContentNuDto>((left, right) => left.NodeId == right.NodeId && right.Published == false, aliasRight: "nuEdit")
.LeftJoin<ContentNuDto>("nuPub").On<NodeDto, ContentNuDto>((left, right) => left.NodeId == right.NodeId && right.Published == true, aliasRight: "nuPub");
return sql;
}
public ContentNodeKit GetContentSource(IScope scope, int id)
private Sql<ISqlContext> SqlContentSourcesSelectUmbracoNodeJoin(ISqlContext sqlContext)
{
var sql = ContentSourcesSelect(scope)
.Where<NodeDto>(x => x.NodeObjectType == Constants.ObjectTypes.Document && x.NodeId == id && !x.Trashed)
.OrderBy<NodeDto>(x => x.Level, x => x.ParentId, x => x.SortOrder);
var syntax = sqlContext.SqlSyntax;
var dto = scope.Database.Fetch<ContentSourceDto>(sql).FirstOrDefault();
return dto == null ? new ContentNodeKit() : CreateContentNodeKit(dto);
var sqlTemplate = sqlContext.Templates.Get(Constants.SqlTemplates.NuCacheDatabaseDataSource.SourcesSelectUmbracoNodeJoin, builder =>
builder.InnerJoin<NodeDto>("x")
.On<NodeDto, NodeDto>((left, right) => left.NodeId == right.NodeId || SqlText<bool>(left.Path, right.Path, (lp, rp) => $"({lp} LIKE {syntax.GetConcat(rp, "',%'")})"), aliasRight: "x"));
var sql = sqlTemplate.Sql();
return sql;
}
public IEnumerable<ContentNodeKit> GetAllContentSources(IScope scope)
private Sql<ISqlContext> SqlWhereNodeId(ISqlContext sqlContext, int id)
{
var sql = ContentSourcesSelect(scope)
.Where<NodeDto>(x => x.NodeObjectType == Constants.ObjectTypes.Document && !x.Trashed)
.OrderBy<NodeDto>(x => x.Level, x => x.ParentId, x => x.SortOrder);
var syntax = sqlContext.SqlSyntax;
// We need to page here. We don't want to iterate over every single row in one connection cuz this can cause an SQL Timeout.
// We also want to read with a db reader and not load everything into memory, QueryPaged lets us do that.
var sqlTemplate = sqlContext.Templates.Get(Constants.SqlTemplates.NuCacheDatabaseDataSource.WhereNodeId, builder =>
builder.Where<NodeDto>(x => x.NodeId == SqlTemplate.Arg<int>("id")));
foreach (var row in scope.Database.QueryPaged<ContentSourceDto>(PageSize, sql))
yield return CreateContentNodeKit(row);
var sql = sqlTemplate.Sql(id);
return sql;
}
public IEnumerable<ContentNodeKit> GetBranchContentSources(IScope scope, int id)
private Sql<ISqlContext> SqlWhereNodeIdX(ISqlContext sqlContext, int id)
{
var syntax = scope.SqlContext.SqlSyntax;
var sql = ContentSourcesSelect(scope,
s => s.InnerJoin<NodeDto>("x").On<NodeDto, NodeDto>((left, right) => left.NodeId == right.NodeId || SqlText<bool>(left.Path, right.Path, (lp, rp) => $"({lp} LIKE {syntax.GetConcat(rp, "',%'")})"), aliasRight: "x"))
.Where<NodeDto>(x => x.NodeObjectType == Constants.ObjectTypes.Document && !x.Trashed)
.Where<NodeDto>(x => x.NodeId == id, "x")
.OrderBy<NodeDto>(x => x.Level, x => x.ParentId, x => x.SortOrder);
var syntax = sqlContext.SqlSyntax;
// We need to page here. We don't want to iterate over every single row in one connection cuz this can cause an SQL Timeout.
// We also want to read with a db reader and not load everything into memory, QueryPaged lets us do that.
var sqlTemplate = sqlContext.Templates.Get(Constants.SqlTemplates.NuCacheDatabaseDataSource.WhereNodeIdX, s =>
s.Where<NodeDto>(x => x.NodeId == SqlTemplate.Arg<int>("id"), "x"));
foreach (var row in scope.Database.QueryPaged<ContentSourceDto>(PageSize, sql))
yield return CreateContentNodeKit(row);
var sql = sqlTemplate.Sql(id);
return sql;
}
public IEnumerable<ContentNodeKit> GetTypeContentSources(IScope scope, IEnumerable<int> ids)
private Sql<ISqlContext> SqlOrderByLevelIdSortOrder(ISqlContext sqlContext)
{
if (!ids.Any()) yield break;
var syntax = sqlContext.SqlSyntax;
var sql = ContentSourcesSelect(scope)
.Where<NodeDto>(x => x.NodeObjectType == Constants.ObjectTypes.Document && !x.Trashed)
.WhereIn<ContentDto>(x => x.ContentTypeId, ids)
.OrderBy<NodeDto>(x => x.Level, x => x.ParentId, x => x.SortOrder);
var sqlTemplate = sqlContext.Templates.Get(Constants.SqlTemplates.NuCacheDatabaseDataSource.OrderByLevelIdSortOrder, s =>
s.OrderBy<NodeDto>(x => x.Level, x => x.ParentId, x => x.SortOrder));
// We need to page here. We don't want to iterate over every single row in one connection cuz this can cause an SQL Timeout.
// We also want to read with a db reader and not load everything into memory, QueryPaged lets us do that.
foreach (var row in scope.Database.QueryPaged<ContentSourceDto>(PageSize, sql))
yield return CreateContentNodeKit(row);
var sql = sqlTemplate.Sql();
return sql;
}
private Sql<ISqlContext> MediaSourcesSelect(IScope scope, Func<Sql<ISqlContext>, Sql<ISqlContext>> joins = null)
private Sql<ISqlContext> SqlObjectTypeNotTrashed(ISqlContext sqlContext, Guid nodeObjectType)
{
var sql = scope.SqlContext.Sql()
var syntax = sqlContext.SqlSyntax;
.Select<NodeDto>(x => Alias(x.NodeId, "Id"), x => Alias(x.UniqueId, "Key"),
var sqlTemplate = sqlContext.Templates.Get(Constants.SqlTemplates.NuCacheDatabaseDataSource.ObjectTypeNotTrashedFilter, s =>
s.Where<NodeDto>(x => x.NodeObjectType == SqlTemplate.Arg<Guid?>("nodeObjectType") && x.Trashed == SqlTemplate.Arg<bool>("trashed")));
var sql = sqlTemplate.Sql(nodeObjectType, false);
return sql;
}
/// <summary>
/// Returns a slightly more optimized query to use for the document counting when paging over the content sources
/// </summary>
/// <param name="scope"></param>
/// <returns></returns>
private Sql<ISqlContext> SqlContentSourcesCount(IScope scope, Func<ISqlContext, Sql<ISqlContext>> joins = null)
{
var sqlTemplate = scope.SqlContext.Templates.Get(Constants.SqlTemplates.NuCacheDatabaseDataSource.ContentSourcesCount, tsql =>
tsql.Select<NodeDto>(x => Alias(x.NodeId, "Id"))
.From<NodeDto>()
.InnerJoin<ContentDto>().On<NodeDto, ContentDto>((left, right) => left.NodeId == right.NodeId)
.InnerJoin<DocumentDto>().On<NodeDto, DocumentDto>((left, right) => left.NodeId == right.NodeId));
var sql = sqlTemplate.Sql();
if (joins != null)
sql = sql.Append(joins(sql.SqlContext));
// TODO: We can't use a template with this one because of the 'right.Current' and 'right.Published' ends up being a parameter so not sure how we can do that
sql = sql
.InnerJoin<ContentVersionDto>().On<NodeDto, ContentVersionDto>((left, right) => left.NodeId == right.NodeId && right.Current)
.InnerJoin<DocumentVersionDto>().On<ContentVersionDto, DocumentVersionDto>((left, right) => left.Id == right.Id)
.LeftJoin<ContentVersionDto>(j =>
j.InnerJoin<DocumentVersionDto>("pdver").On<ContentVersionDto, DocumentVersionDto>((left, right) => left.Id == right.Id && right.Published, "pcver", "pdver"), "pcver")
.On<NodeDto, ContentVersionDto>((left, right) => left.NodeId == right.NodeId, aliasRight: "pcver");
return sql;
}
private Sql<ISqlContext> SqlMediaSourcesSelect(IScope scope, Func<ISqlContext, Sql<ISqlContext>> joins = null)
{
var sqlTemplate = scope.SqlContext.Templates.Get(Constants.SqlTemplates.NuCacheDatabaseDataSource.MediaSourcesSelect, tsql =>
tsql.Select<NodeDto>(x => Alias(x.NodeId, "Id"), x => Alias(x.UniqueId, "Key"),
x => Alias(x.Level, "Level"), x => Alias(x.Path, "Path"), x => Alias(x.SortOrder, "SortOrder"), x => Alias(x.ParentId, "ParentId"),
x => Alias(x.CreateDate, "CreateDate"), x => Alias(x.UserId, "CreatorId"))
.AndSelect<ContentDto>(x => Alias(x.ContentTypeId, "ContentTypeId"))
.AndSelect<ContentVersionDto>(x => Alias(x.Id, "VersionId"), x => Alias(x.Text, "EditName"), x => Alias(x.VersionDate, "EditVersionDate"), x => Alias(x.UserId, "EditWriterId"))
.AndSelect<ContentNuDto>("nuEdit", x => Alias(x.Data, "EditData"))
.From<NodeDto>();
.AndSelect<ContentNuDto>("nuEdit", x => Alias(x.RawData, "EditDataRaw"))
.From<NodeDto>());
var sql = sqlTemplate.Sql();
if (joins != null)
sql = joins(sql);
sql = sql.Append(joins(sql.SqlContext));
// TODO: We can't use a template with this one because of the 'right.Published' ends up being a parameter so not sure how we can do that
sql = sql
.InnerJoin<ContentDto>().On<NodeDto, ContentDto>((left, right) => left.NodeId == right.NodeId)
.InnerJoin<ContentVersionDto>().On<NodeDto, ContentVersionDto>((left, right) => left.NodeId == right.NodeId && right.Current)
@@ -148,78 +185,215 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
return sql;
}
private Sql<ISqlContext> SqlMediaSourcesCount(IScope scope, Func<ISqlContext, Sql<ISqlContext>> joins = null)
{
var sqlTemplate = scope.SqlContext.Templates.Get(Constants.SqlTemplates.NuCacheDatabaseDataSource.MediaSourcesCount, tsql =>
tsql.Select<NodeDto>(x => Alias(x.NodeId, "Id")).From<NodeDto>());
var sql = sqlTemplate.Sql();
if (joins != null)
sql = sql.Append(joins(sql.SqlContext));
// TODO: We can't use a template with this one because of the 'right.Current' ends up being a parameter so not sure how we can do that
sql = sql
.InnerJoin<ContentDto>().On<NodeDto, ContentDto>((left, right) => left.NodeId == right.NodeId)
.InnerJoin<ContentVersionDto>().On<NodeDto, ContentVersionDto>((left, right) => left.NodeId == right.NodeId && right.Current);
return sql;
}
public ContentNodeKit GetContentSource(IScope scope, int id)
{
var sql = SqlContentSourcesSelect(scope)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Document))
.Append(SqlWhereNodeId(scope.SqlContext, id))
.Append(SqlOrderByLevelIdSortOrder(scope.SqlContext));
var dto = scope.Database.Fetch<ContentSourceDto>(sql).FirstOrDefault();
if (dto == null) return ContentNodeKit.Empty;
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Document);
return CreateContentNodeKit(dto, serializer);
}
public IEnumerable<ContentNodeKit> GetAllContentSources(IScope scope)
{
var sql = SqlContentSourcesSelect(scope)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Document))
.Append(SqlOrderByLevelIdSortOrder(scope.SqlContext));
// Use a more efficient COUNT query
var sqlCountQuery = SqlContentSourcesCount(scope)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Document));
var sqlCount = scope.SqlContext.Sql("SELECT COUNT(*) FROM (").Append(sqlCountQuery).Append(") npoco_tbl");
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Document);
// We need to page here. We don't want to iterate over every single row in one connection cuz this can cause an SQL Timeout.
// We also want to read with a db reader and not load everything into memory, QueryPaged lets us do that.
foreach (var row in scope.Database.QueryPaged<ContentSourceDto>(PageSize, sql, sqlCount))
{
yield return CreateContentNodeKit(row, serializer);
}
}
public IEnumerable<ContentNodeKit> GetBranchContentSources(IScope scope, int id)
{
var sql = SqlContentSourcesSelect(scope, SqlContentSourcesSelectUmbracoNodeJoin)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Document))
.Append(SqlWhereNodeIdX(scope.SqlContext, id))
.Append(SqlOrderByLevelIdSortOrder(scope.SqlContext));
// Use a more efficient COUNT query
var sqlCountQuery = SqlContentSourcesCount(scope, SqlContentSourcesSelectUmbracoNodeJoin)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Document))
.Append(SqlWhereNodeIdX(scope.SqlContext, id));
var sqlCount = scope.SqlContext.Sql("SELECT COUNT(*) FROM (").Append(sqlCountQuery).Append(") npoco_tbl");
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Document);
// We need to page here. We don't want to iterate over every single row in one connection cuz this can cause an SQL Timeout.
// We also want to read with a db reader and not load everything into memory, QueryPaged lets us do that.
foreach (var row in scope.Database.QueryPaged<ContentSourceDto>(PageSize, sql, sqlCount))
{
yield return CreateContentNodeKit(row, serializer);
}
}
public IEnumerable<ContentNodeKit> GetTypeContentSources(IScope scope, IEnumerable<int> ids)
{
if (!ids.Any()) yield break;
var sql = SqlContentSourcesSelect(scope)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Document))
.WhereIn<ContentDto>(x => x.ContentTypeId, ids)
.Append(SqlOrderByLevelIdSortOrder(scope.SqlContext));
// Use a more efficient COUNT query
var sqlCountQuery = SqlContentSourcesCount(scope)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Document))
.WhereIn<ContentDto>(x => x.ContentTypeId, ids);
var sqlCount = scope.SqlContext.Sql("SELECT COUNT(*) FROM (").Append(sqlCountQuery).Append(") npoco_tbl");
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Document);
// We need to page here. We don't want to iterate over every single row in one connection cuz this can cause an SQL Timeout.
// We also want to read with a db reader and not load everything into memory, QueryPaged lets us do that.
foreach (var row in scope.Database.QueryPaged<ContentSourceDto>(PageSize, sql, sqlCount))
{
yield return CreateContentNodeKit(row, serializer);
}
}
public ContentNodeKit GetMediaSource(IScope scope, int id)
{
var sql = MediaSourcesSelect(scope)
.Where<NodeDto>(x => x.NodeObjectType == Constants.ObjectTypes.Media && x.NodeId == id && !x.Trashed)
.OrderBy<NodeDto>(x => x.Level, x => x.ParentId, x => x.SortOrder);
var sql = SqlMediaSourcesSelect(scope)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Media))
.Append(SqlWhereNodeId(scope.SqlContext, id))
.Append(SqlOrderByLevelIdSortOrder(scope.SqlContext));
var dto = scope.Database.Fetch<ContentSourceDto>(sql).FirstOrDefault();
return dto == null ? new ContentNodeKit() : CreateMediaNodeKit(dto);
if (dto == null) return ContentNodeKit.Empty;
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Media);
return CreateMediaNodeKit(dto, serializer);
}
public IEnumerable<ContentNodeKit> GetAllMediaSources(IScope scope)
{
var sql = MediaSourcesSelect(scope)
.Where<NodeDto>(x => x.NodeObjectType == Constants.ObjectTypes.Media && !x.Trashed)
.OrderBy<NodeDto>(x => x.Level, x => x.ParentId, x => x.SortOrder);
var sql = SqlMediaSourcesSelect(scope)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Media))
.Append(SqlOrderByLevelIdSortOrder(scope.SqlContext));
// Use a more efficient COUNT query
var sqlCountQuery = SqlMediaSourcesCount(scope)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Media));
var sqlCount = scope.SqlContext.Sql("SELECT COUNT(*) FROM (").Append(sqlCountQuery).Append(") npoco_tbl");
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Media);
// We need to page here. We don't want to iterate over every single row in one connection cuz this can cause an SQL Timeout.
// We also want to read with a db reader and not load everything into memory, QueryPaged lets us do that.
foreach (var row in scope.Database.QueryPaged<ContentSourceDto>(PageSize, sql))
yield return CreateMediaNodeKit(row);
foreach (var row in scope.Database.QueryPaged<ContentSourceDto>(PageSize, sql, sqlCount))
{
yield return CreateMediaNodeKit(row, serializer);
}
}
public IEnumerable<ContentNodeKit> GetBranchMediaSources(IScope scope, int id)
{
var syntax = scope.SqlContext.SqlSyntax;
var sql = MediaSourcesSelect(scope,
s => s.InnerJoin<NodeDto>("x").On<NodeDto, NodeDto>((left, right) => left.NodeId == right.NodeId || SqlText<bool>(left.Path, right.Path, (lp, rp) => $"({lp} LIKE {syntax.GetConcat(rp, "',%'")})"), aliasRight: "x"))
.Where<NodeDto>(x => x.NodeObjectType == Constants.ObjectTypes.Media && !x.Trashed)
.Where<NodeDto>(x => x.NodeId == id, "x")
.OrderBy<NodeDto>(x => x.Level, x => x.ParentId, x => x.SortOrder);
var sql = SqlMediaSourcesSelect(scope, SqlContentSourcesSelectUmbracoNodeJoin)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Media))
.Append(SqlWhereNodeIdX(scope.SqlContext, id))
.Append(SqlOrderByLevelIdSortOrder(scope.SqlContext));
// Use a more efficient COUNT query
var sqlCountQuery = SqlMediaSourcesCount(scope, SqlContentSourcesSelectUmbracoNodeJoin)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Media))
.Append(SqlWhereNodeIdX(scope.SqlContext, id));
var sqlCount = scope.SqlContext.Sql("SELECT COUNT(*) FROM (").Append(sqlCountQuery).Append(") npoco_tbl");
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Media);
// We need to page here. We don't want to iterate over every single row in one connection cuz this can cause an SQL Timeout.
// We also want to read with a db reader and not load everything into memory, QueryPaged lets us do that.
foreach (var row in scope.Database.QueryPaged<ContentSourceDto>(PageSize, sql))
yield return CreateMediaNodeKit(row);
foreach (var row in scope.Database.QueryPaged<ContentSourceDto>(PageSize, sql, sqlCount))
{
yield return CreateMediaNodeKit(row, serializer);
}
}
public IEnumerable<ContentNodeKit> GetTypeMediaSources(IScope scope, IEnumerable<int> ids)
{
if (!ids.Any()) yield break;
var sql = MediaSourcesSelect(scope)
.Where<NodeDto>(x => x.NodeObjectType == Constants.ObjectTypes.Media && !x.Trashed)
var sql = SqlMediaSourcesSelect(scope)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Media))
.WhereIn<ContentDto>(x => x.ContentTypeId, ids)
.OrderBy<NodeDto>(x => x.Level, x => x.ParentId, x => x.SortOrder);
.Append(SqlOrderByLevelIdSortOrder(scope.SqlContext));
// Use a more efficient COUNT query
var sqlCountQuery = SqlMediaSourcesCount(scope)
.Append(SqlObjectTypeNotTrashed(scope.SqlContext, Constants.ObjectTypes.Media))
.WhereIn<ContentDto>(x => x.ContentTypeId, ids);
var sqlCount = scope.SqlContext.Sql("SELECT COUNT(*) FROM (").Append(sqlCountQuery).Append(") npoco_tbl");
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Media);
// We need to page here. We don't want to iterate over every single row in one connection cuz this can cause an SQL Timeout.
// We also want to read with a db reader and not load everything into memory, QueryPaged lets us do that.
foreach (var row in scope.Database.QueryPaged<ContentSourceDto>(PageSize, sql))
yield return CreateMediaNodeKit(row);
foreach (var row in scope.Database.QueryPaged<ContentSourceDto>(PageSize, sql, sqlCount))
{
yield return CreateMediaNodeKit(row, serializer);
}
}
private ContentNodeKit CreateContentNodeKit(ContentSourceDto dto)
private ContentNodeKit CreateContentNodeKit(ContentSourceDto dto, IContentCacheDataSerializer serializer)
{
ContentData d = null;
ContentData p = null;
if (dto.Edited)
{
if (dto.EditData == null)
if (dto.EditData == null && dto.EditDataRaw == null)
{
if (Debugger.IsAttached)
throw new Exception("Missing cmsContentNu edited content for node " + dto.Id + ", consider rebuilding.");
throw new InvalidOperationException("Missing cmsContentNu edited content for node " + dto.Id + ", consider rebuilding.");
Current.Logger.Warn<DatabaseDataSource>("Missing cmsContentNu edited content for node {NodeId}, consider rebuilding.", dto.Id);
}
else
{
var nested = _contentNestedDataSerializer.Deserialize(dto.EditData);
var deserializedContent = serializer.Deserialize(dto, dto.EditData, dto.EditDataRaw);
d = new ContentData
{
@@ -229,41 +403,41 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
VersionId = dto.VersionId,
VersionDate = dto.EditVersionDate,
WriterId = dto.EditWriterId,
Properties = nested.PropertyData, // TODO: We don't want to allocate empty arrays
CultureInfos = nested.CultureData,
UrlSegment = nested.UrlSegment
Properties = deserializedContent.PropertyData, // TODO: We don't want to allocate empty arrays
CultureInfos = deserializedContent.CultureData,
UrlSegment = deserializedContent.UrlSegment
};
}
}
if (dto.Published)
{
if (dto.PubData == null)
if (dto.PubData == null && dto.PubDataRaw == null)
{
if (Debugger.IsAttached)
throw new Exception("Missing cmsContentNu published content for node " + dto.Id + ", consider rebuilding.");
throw new InvalidOperationException("Missing cmsContentNu published content for node " + dto.Id + ", consider rebuilding.");
Current.Logger.Warn<DatabaseDataSource>("Missing cmsContentNu published content for node {NodeId}, consider rebuilding.", dto.Id);
}
else
{
var nested = _contentNestedDataSerializer.Deserialize(dto.PubData);
var deserializedContent = serializer.Deserialize(dto, dto.PubData, dto.PubDataRaw);
p = new ContentData
{
Name = dto.PubName,
UrlSegment = nested.UrlSegment,
UrlSegment = deserializedContent.UrlSegment,
Published = true,
TemplateId = dto.PubTemplateId,
VersionId = dto.VersionId,
VersionDate = dto.PubVersionDate,
WriterId = dto.PubWriterId,
Properties = nested.PropertyData, // TODO: We don't want to allocate empty arrays
CultureInfos = nested.CultureData
Properties = deserializedContent.PropertyData, // TODO: We don't want to allocate empty arrays
CultureInfos = deserializedContent.CultureData
};
}
}
var n = new ContentNode(dto.Id, dto.Uid,
var n = new ContentNode(dto.Id, dto.Key,
dto.Level, dto.Path, dto.SortOrder, dto.ParentId, dto.CreateDate, dto.CreatorId);
var s = new ContentNodeKit
@@ -277,12 +451,12 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
return s;
}
private ContentNodeKit CreateMediaNodeKit(ContentSourceDto dto)
private ContentNodeKit CreateMediaNodeKit(ContentSourceDto dto, IContentCacheDataSerializer serializer)
{
if (dto.EditData == null)
throw new Exception("No data for media " + dto.Id);
if (dto.EditData == null && dto.EditDataRaw == null)
throw new InvalidOperationException("No data for media " + dto.Id);
var nested = _contentNestedDataSerializer.Deserialize(dto.EditData);
var deserializedMedia = serializer.Deserialize(dto, dto.EditData, dto.EditDataRaw);
var p = new ContentData
{
@@ -292,11 +466,11 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
VersionId = dto.VersionId,
VersionDate = dto.EditVersionDate,
WriterId = dto.CreatorId, // what-else?
Properties = nested.PropertyData, // TODO: We don't want to allocate empty arrays
CultureInfos = nested.CultureData
Properties = deserializedMedia.PropertyData, // TODO: We don't want to allocate empty arrays
CultureInfos = deserializedMedia.CultureData
};
var n = new ContentNode(dto.Id, dto.Uid,
var n = new ContentNode(dto.Id, dto.Key,
dto.Level, dto.Path, dto.SortOrder, dto.ParentId, dto.CreateDate, dto.CreatorId);
var s = new ContentNodeKit
@@ -309,6 +483,6 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
return s;
}
}
}

View File

@@ -0,0 +1,25 @@
using Umbraco.Core.Models;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
/// <summary>
/// Serializes/Deserializes <see cref="ContentCacheDataModel"/> document to the SQL Database as a string
/// </summary>
/// <remarks>
/// Resolved from the <see cref="IContentCacheDataSerializerFactory"/>. This cannot be resolved from DI.
/// </remarks>
public interface IContentCacheDataSerializer
{
/// <summary>
/// Deserialize the data into a <see cref="ContentCacheDataModel"/>
/// </summary>
ContentCacheDataModel Deserialize(IReadOnlyContentBase content, string stringData, byte[] byteData);
/// <summary>
/// Serializes the <see cref="ContentCacheDataModel"/>
/// </summary>
ContentCacheDataSerializationResult Serialize(IReadOnlyContentBase content, ContentCacheDataModel model);
}
}

View File

@@ -0,0 +1,16 @@
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
public interface IContentCacheDataSerializerFactory
{
/// <summary>
/// Gets or creates a new instance of <see cref="IContentCacheDataSerializer"/>
/// </summary>
/// <returns></returns>
/// <remarks>
/// This method may return the same instance, however this depends on the state of the application and if any underlying data has changed.
/// This method may also be used to initialize anything before a serialization/deserialization session occurs.
/// </remarks>
IContentCacheDataSerializer Create(ContentCacheDataSerializerEntityType types);
}
}

View File

@@ -1,14 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
public interface IContentNestedDataSerializer
{
ContentNestedData Deserialize(string data);
string Serialize(ContentNestedData nestedData);
}
}

View File

@@ -0,0 +1,11 @@
using System.Collections.Generic;
using System.IO;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
internal interface IDictionaryOfPropertyDataSerializer
{
IDictionary<string, PropertyData[]> ReadFrom(Stream stream);
void WriteTo(IDictionary<string, PropertyData[]> value, Stream stream);
}
}

View File

@@ -1,41 +1,42 @@
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection.Emit;
using System.Text;
using System.Threading.Tasks;
using Umbraco.Core.Models;
using Umbraco.Core.Serialization;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
internal class JsonContentNestedDataSerializer : IContentNestedDataSerializer
public class JsonContentNestedDataSerializer : IContentCacheDataSerializer
{
public ContentNestedData Deserialize(string data)
// by default JsonConvert will deserialize our numeric values as Int64
// which is bad, because they were Int32 in the database - take care
private readonly JsonSerializerSettings _jsonSerializerSettings = new JsonSerializerSettings
{
// by default JsonConvert will deserialize our numeric values as Int64
// which is bad, because they were Int32 in the database - take care
Converters = new List<JsonConverter> { new ForceInt32Converter() },
var settings = new JsonSerializerSettings
{
Converters = new List<JsonConverter> { new ForceInt32Converter() },
// Explicitly specify date handling so that it's consistent and follows the same date handling as MessagePack
DateParseHandling = DateParseHandling.DateTime,
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
DateFormatString = "o"
};
// Explicitly specify date handling so that it's consistent and follows the same date handling as MessagePack
DateParseHandling = DateParseHandling.DateTime,
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
DateFormatString = "o"
};
public ContentCacheDataModel Deserialize(IReadOnlyContentBase content, string stringData, byte[] byteData)
{
if (stringData == null && byteData != null)
throw new NotSupportedException($"{typeof(JsonContentNestedDataSerializer)} does not support byte[] serialization");
return JsonConvert.DeserializeObject<ContentNestedData>(data, settings);
return JsonConvert.DeserializeObject<ContentCacheDataModel>(stringData, _jsonSerializerSettings);
}
public string Serialize(ContentNestedData nestedData)
public ContentCacheDataSerializationResult Serialize(IReadOnlyContentBase content, ContentCacheDataModel model)
{
// note that numeric values (which are Int32) are serialized without their
// type (eg "value":1234) and JsonConvert by default deserializes them as Int64
return JsonConvert.SerializeObject(nestedData);
var json = JsonConvert.SerializeObject(model);
return new ContentCacheDataSerializationResult(json, null);
}
}
}

View File

@@ -0,0 +1,10 @@
using System;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
internal class JsonContentNestedDataSerializerFactory : IContentCacheDataSerializerFactory
{
private Lazy<JsonContentNestedDataSerializer> _serializer = new Lazy<JsonContentNestedDataSerializer>();
public IContentCacheDataSerializer Create(ContentCacheDataSerializerEntityType types) => _serializer.Value;
}
}

View File

@@ -0,0 +1,109 @@
using K4os.Compression.LZ4;
using System;
using System.Diagnostics;
using System.Text;
using Umbraco.Core.Exceptions;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
/// <summary>
/// Lazily decompresses a LZ4 Pickler compressed UTF8 string
/// </summary>
[DebuggerDisplay("{Display}")]
internal struct LazyCompressedString
{
private byte[] _bytes;
private string _str;
private readonly object _locker;
/// <summary>
/// Constructor
/// </summary>
/// <param name="bytes">LZ4 Pickle compressed UTF8 String</param>
public LazyCompressedString(byte[] bytes)
{
_locker = new object();
_bytes = bytes;
_str = null;
}
public byte[] GetBytes()
{
if (_bytes == null)
{
throw new InvalidOperationException("The bytes have already been expanded");
}
return _bytes;
}
/// <summary>
/// Returns the decompressed string from the bytes. This methods can only be called once.
/// </summary>
/// <returns></returns>
/// <exception cref="InvalidOperationException">Throws if this is called more than once</exception>
public string DecompressString()
{
if (_str != null)
{
return _str;
}
lock (_locker)
{
if (_str != null)
{
// double check
return _str;
}
if (_bytes == null)
{
throw new InvalidOperationException("Bytes have already been cleared");
}
_str = Encoding.UTF8.GetString(LZ4Pickler.Unpickle(_bytes));
_bytes = null;
}
return _str;
}
/// <summary>
/// Used to display debugging output since ToString() can only be called once
/// </summary>
private string Display
{
get
{
if (_str != null)
{
return $"Decompressed: {_str}";
}
lock (_locker)
{
if (_str != null)
{
// double check
return $"Decompressed: {_str}";
}
if (_bytes == null)
{
// This shouldn't happen
throw new PanicException("Bytes have already been cleared");
}
else
{
return $"Compressed Bytes: {_bytes.Length}";
}
}
}
}
public override string ToString() => DecompressString();
public static implicit operator string(LazyCompressedString l) => l.ToString();
}
}

View File

@@ -1,19 +1,28 @@
using MessagePack;
using MessagePack.Formatters;
using K4os.Compression.LZ4;
using MessagePack;
using MessagePack.Resolvers;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Umbraco.Core.Models;
using Umbraco.Core.PropertyEditors;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
internal class MsgPackContentNestedDataSerializer : IContentNestedDataSerializer
/// <summary>
/// Serializes/Deserializes <see cref="ContentCacheDataModel"/> document to the SQL Database as bytes using MessagePack
/// </summary>
public class MsgPackContentNestedDataSerializer : IContentCacheDataSerializer
{
private MessagePackSerializerOptions _options;
private readonly MessagePackSerializerOptions _options;
private readonly IPropertyCacheCompression _propertyOptions;
public MsgPackContentNestedDataSerializer()
public MsgPackContentNestedDataSerializer(IPropertyCacheCompression propertyOptions)
{
var defaultOptions = ContractlessStandardResolver.Options;
_propertyOptions = propertyOptions ?? throw new ArgumentNullException(nameof(propertyOptions));
var defaultOptions = ContractlessStandardResolver.Options;
var resolver = CompositeResolver.Create(
// TODO: We want to be able to intern the strings for aliases when deserializing like we do for Newtonsoft but I'm unsure exactly how
@@ -29,31 +38,92 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
_options = defaultOptions
.WithResolver(resolver)
.WithCompression(MessagePackCompression.Lz4BlockArray);
.WithCompression(MessagePackCompression.Lz4BlockArray);
}
public string ToJson(string serialized)
public string ToJson(byte[] bin)
{
var bin = Convert.FromBase64String(serialized);
var json = MessagePackSerializer.ConvertToJson(bin, _options);
return json;
}
// TODO: Instead of returning base64 it would be more ideal to avoid that translation entirely and just store/retrieve raw bytes
public ContentNestedData Deserialize(string data)
public ContentCacheDataModel Deserialize(IReadOnlyContentBase content, string stringData, byte[] byteData)
{
var bin = Convert.FromBase64String(data);
var obj = MessagePackSerializer.Deserialize<ContentNestedData>(bin, _options);
return obj;
if (byteData != null)
{
var cacheModel = MessagePackSerializer.Deserialize<ContentCacheDataModel>(byteData, _options);
Expand(content, cacheModel);
return cacheModel;
}
else if (stringData != null)
{
// NOTE: We don't really support strings but it's possible if manually used (i.e. tests)
var bin = Convert.FromBase64String(stringData);
var cacheModel = MessagePackSerializer.Deserialize<ContentCacheDataModel>(bin, _options);
Expand(content, cacheModel);
return cacheModel;
}
else
{
return null;
}
}
public string Serialize(ContentNestedData nestedData)
public ContentCacheDataSerializationResult Serialize(IReadOnlyContentBase content, ContentCacheDataModel model)
{
var bin = MessagePackSerializer.Serialize(nestedData, _options);
return Convert.ToBase64String(bin);
Compress(content, model);
var bytes = MessagePackSerializer.Serialize(model, _options);
return new ContentCacheDataSerializationResult(null, bytes);
}
/// <summary>
/// Used during serialization to compress properties
/// </summary>
/// <param name="model"></param>
/// <remarks>
/// This will essentially 'double compress' property data. The MsgPack data as a whole will already be compressed
/// but this will go a step further and double compress property data so that it is stored in the nucache file
/// as compressed bytes and therefore will exist in memory as compressed bytes. That is, until the bytes are
/// read/decompressed as a string to be displayed on the front-end. This allows for potentially a significant
/// memory savings but could also affect performance of first rendering pages while decompression occurs.
/// </remarks>
private void Compress(IReadOnlyContentBase content, ContentCacheDataModel model)
{
foreach(var propertyAliasToData in model.PropertyData)
{
if (_propertyOptions.IsCompressed(content, propertyAliasToData.Key))
{
foreach(var property in propertyAliasToData.Value.Where(x => x.Value != null && x.Value is string))
{
property.Value = LZ4Pickler.Pickle(Encoding.UTF8.GetBytes((string)property.Value), LZ4Level.L00_FAST);
}
}
}
}
/// <summary>
/// Used during deserialization to map the property data as lazy or expand the value
/// </summary>
/// <param name="nestedData"></param>
private void Expand(IReadOnlyContentBase content, ContentCacheDataModel nestedData)
{
foreach (var propertyAliasToData in nestedData.PropertyData)
{
if (_propertyOptions.IsCompressed(content, propertyAliasToData.Key))
{
foreach (var property in propertyAliasToData.Value.Where(x => x.Value != null))
{
if (property.Value is byte[] byteArrayValue)
{
property.Value = new LazyCompressedString(byteArrayValue);
}
}
}
}
}
//private class ContentNestedDataResolver : IFormatterResolver
//{
// // GetFormatter<T>'s get cost should be minimized so use type cache.

View File

@@ -0,0 +1,69 @@
using System.Collections.Concurrent;
using System.Collections.Generic;
using Umbraco.Core.Models;
using Umbraco.Core.PropertyEditors;
using Umbraco.Core.Services;
namespace Umbraco.Web.PublishedCache.NuCache.DataSource
{
internal class MsgPackContentNestedDataSerializerFactory : IContentCacheDataSerializerFactory
{
private readonly IContentTypeService _contentTypeService;
private readonly IMediaTypeService _mediaTypeService;
private readonly IMemberTypeService _memberTypeService;
private readonly PropertyEditorCollection _propertyEditors;
private readonly IPropertyCacheCompressionOptions _compressionOptions;
private readonly ConcurrentDictionary<(int, string), bool> _isCompressedCache = new ConcurrentDictionary<(int, string), bool>();
public MsgPackContentNestedDataSerializerFactory(
IContentTypeService contentTypeService,
IMediaTypeService mediaTypeService,
IMemberTypeService memberTypeService,
PropertyEditorCollection propertyEditors,
IPropertyCacheCompressionOptions compressionOptions)
{
_contentTypeService = contentTypeService;
_mediaTypeService = mediaTypeService;
_memberTypeService = memberTypeService;
_propertyEditors = propertyEditors;
_compressionOptions = compressionOptions;
}
public IContentCacheDataSerializer Create(ContentCacheDataSerializerEntityType types)
{
// Depending on which entity types are being requested, we need to look up those content types
// to initialize the compression options.
// We need to initialize these options now so that any data lookups required are completed and are not done while the content cache
// is performing DB queries which will result in errors since we'll be trying to query with open readers.
// NOTE: The calls to GetAll() below should be cached if the data has not been changed.
var contentTypes = new Dictionary<int, IContentTypeComposition>();
if ((types & ContentCacheDataSerializerEntityType.Document) == ContentCacheDataSerializerEntityType.Document)
{
foreach(var ct in _contentTypeService.GetAll())
{
contentTypes[ct.Id] = ct;
}
}
if ((types & ContentCacheDataSerializerEntityType.Media) == ContentCacheDataSerializerEntityType.Media)
{
foreach (var ct in _mediaTypeService.GetAll())
{
contentTypes[ct.Id] = ct;
}
}
if ((types & ContentCacheDataSerializerEntityType.Member) == ContentCacheDataSerializerEntityType.Member)
{
foreach (var ct in _memberTypeService.GetAll())
{
contentTypes[ct.Id] = ct;
}
}
var compression = new PropertyCacheCompression(_compressionOptions, contentTypes, _propertyEditors, _isCompressedCache);
var serializer = new MsgPackContentNestedDataSerializer(compression);
return serializer;
}
}
}

View File

@@ -38,21 +38,21 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
public object Value { get; set; }
//Legacy properties used to deserialize existing nucache db entries
[DataMember(Order = 3)]
[IgnoreDataMember]
[JsonProperty("culture")]
private string LegacyCulture
{
set => Culture = value;
}
[DataMember(Order = 4)]
[IgnoreDataMember]
[JsonProperty("seg")]
private string LegacySegment
{
set => Segment = value;
}
[DataMember(Order = 5)]
[IgnoreDataMember]
[JsonProperty("val")]
private object LegacyValue
{

View File

@@ -16,6 +16,14 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
private const char PrefixDouble = 'B';
private const char PrefixDateTime = 'D';
private const char PrefixByte = 'O';
private const char PrefixByteArray = 'A';
private const char PrefixCompressedStringByteArray = 'C';
private const char PrefixSignedByte = 'E';
private const char PrefixBool = 'M';
private const char PrefixGuid = 'G';
private const char PrefixTimeSpan = 'T';
private const char PrefixInt16 = 'Q';
private const char PrefixChar = 'R';
protected string ReadString(Stream stream) => PrimitiveSerializer.String.ReadFrom(stream);
protected int ReadInt(Stream stream) => PrimitiveSerializer.Int32.ReadFrom(stream);
@@ -23,8 +31,9 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
protected float ReadFloat(Stream stream) => PrimitiveSerializer.Float.ReadFrom(stream);
protected double ReadDouble(Stream stream) => PrimitiveSerializer.Double.ReadFrom(stream);
protected DateTime ReadDateTime(Stream stream) => PrimitiveSerializer.DateTime.ReadFrom(stream);
protected byte[] ReadByteArray(Stream stream) => PrimitiveSerializer.Bytes.ReadFrom(stream);
private T? ReadObject<T>(Stream stream, char t, Func<Stream, T> read)
private T? ReadStruct<T>(Stream stream, char t, Func<Stream, T> read)
where T : struct
{
var type = PrimitiveSerializer.Char.ReadFrom(stream);
@@ -39,26 +48,33 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
var type = PrimitiveSerializer.Char.ReadFrom(stream);
if (type == PrefixNull) return null;
if (type != PrefixString)
throw new NotSupportedException($"Cannot deserialize type '{type}', expected 'S'.");
throw new NotSupportedException($"Cannot deserialize type '{type}', expected '{PrefixString}'.");
return intern
? string.Intern(PrimitiveSerializer.String.ReadFrom(stream))
: PrimitiveSerializer.String.ReadFrom(stream);
}
protected int? ReadIntObject(Stream stream) => ReadObject(stream, PrefixInt32, ReadInt);
protected long? ReadLongObject(Stream stream) => ReadObject(stream, PrefixLong, ReadLong);
protected float? ReadFloatObject(Stream stream) => ReadObject(stream, PrefixFloat, ReadFloat);
protected double? ReadDoubleObject(Stream stream) => ReadObject(stream, PrefixDouble, ReadDouble);
protected DateTime? ReadDateTimeObject(Stream stream) => ReadObject(stream, PrefixDateTime, ReadDateTime);
protected int? ReadIntObject(Stream stream) => ReadStruct(stream, PrefixInt32, ReadInt);
protected long? ReadLongObject(Stream stream) => ReadStruct(stream, PrefixLong, ReadLong);
protected float? ReadFloatObject(Stream stream) => ReadStruct(stream, PrefixFloat, ReadFloat);
protected double? ReadDoubleObject(Stream stream) => ReadStruct(stream, PrefixDouble, ReadDouble);
protected DateTime? ReadDateTimeObject(Stream stream) => ReadStruct(stream, PrefixDateTime, ReadDateTime);
protected object ReadObject(Stream stream)
=> ReadObject(PrimitiveSerializer.Char.ReadFrom(stream), stream);
/// <summary>
/// Reads in a value based on its char type
/// </summary>
/// <param name="type"></param>
/// <param name="stream"></param>
/// <returns></returns>
/// <remarks>
/// This will incur boxing because the result is an object but in most cases the value will be a struct.
/// When the type is known use the specific methods like <see cref="ReadInt(Stream)"/> instead
/// </remarks>
protected object ReadObject(char type, Stream stream)
{
// NOTE: There is going to be a ton of boxing going on here, but i'm not sure we can avoid that because innevitably with our
// current model structure the value will need to end up being 'object' at some point anyways.
switch (type)
{
case PrefixNull:
@@ -81,11 +97,36 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
return PrimitiveSerializer.Double.ReadFrom(stream);
case PrefixDateTime:
return PrimitiveSerializer.DateTime.ReadFrom(stream);
case PrefixByteArray:
return PrimitiveSerializer.Bytes.ReadFrom(stream);
case PrefixSignedByte:
return PrimitiveSerializer.SByte.ReadFrom(stream);
case PrefixBool:
return PrimitiveSerializer.Boolean.ReadFrom(stream);
case PrefixGuid:
return PrimitiveSerializer.Guid.ReadFrom(stream);
case PrefixTimeSpan:
return PrimitiveSerializer.TimeSpan.ReadFrom(stream);
case PrefixInt16:
return PrimitiveSerializer.Int16.ReadFrom(stream);
case PrefixChar:
return PrimitiveSerializer.Char.ReadFrom(stream);
case PrefixCompressedStringByteArray:
return new LazyCompressedString(PrimitiveSerializer.Bytes.ReadFrom(stream));
default:
throw new NotSupportedException($"Cannot deserialize unknown type '{type}'.");
}
}
/// <summary>
/// Writes a value to the stream ensuring it's char type is prefixed to the value for reading later
/// </summary>
/// <param name="value"></param>
/// <param name="stream"></param>
/// <remarks>
/// This method will incur boxing if the value is a struct. When the type is known use the <see cref="PrimitiveSerializer"/>
/// to write the value directly.
/// </remarks>
protected void WriteObject(object value, Stream stream)
{
if (value == null)
@@ -137,6 +178,46 @@ namespace Umbraco.Web.PublishedCache.NuCache.DataSource
PrimitiveSerializer.Char.WriteTo(PrefixUInt32, stream);
PrimitiveSerializer.UInt32.WriteTo(uInt32Value, stream);
}
else if (value is byte[] byteArrayValue)
{
PrimitiveSerializer.Char.WriteTo(PrefixByteArray, stream);
PrimitiveSerializer.Bytes.WriteTo(byteArrayValue, stream);
}
else if (value is LazyCompressedString lazyCompressedString)
{
PrimitiveSerializer.Char.WriteTo(PrefixCompressedStringByteArray, stream);
PrimitiveSerializer.Bytes.WriteTo(lazyCompressedString.GetBytes(), stream);
}
else if (value is sbyte signedByteValue)
{
PrimitiveSerializer.Char.WriteTo(PrefixSignedByte, stream);
PrimitiveSerializer.SByte.WriteTo(signedByteValue, stream);
}
else if (value is bool boolValue)
{
PrimitiveSerializer.Char.WriteTo(PrefixBool, stream);
PrimitiveSerializer.Boolean.WriteTo(boolValue, stream);
}
else if (value is Guid guidValue)
{
PrimitiveSerializer.Char.WriteTo(PrefixGuid, stream);
PrimitiveSerializer.Guid.WriteTo(guidValue, stream);
}
else if (value is TimeSpan timespanValue)
{
PrimitiveSerializer.Char.WriteTo(PrefixTimeSpan, stream);
PrimitiveSerializer.TimeSpan.WriteTo(timespanValue, stream);
}
else if (value is short int16Value)
{
PrimitiveSerializer.Char.WriteTo(PrefixInt16, stream);
PrimitiveSerializer.Int16.WriteTo(int16Value, stream);
}
else if (value is char charValue)
{
PrimitiveSerializer.Char.WriteTo(PrefixChar, stream);
PrimitiveSerializer.Char.WriteTo(charValue, stream);
}
else
throw new NotSupportedException("Value type " + value.GetType().FullName + " cannot be serialized.");
}

View File

@@ -11,7 +11,7 @@ using Umbraco.Web.PublishedCache.NuCache.Navigable;
namespace Umbraco.Web.PublishedCache.NuCache
{
internal class MediaCache : PublishedCacheBase, IPublishedMediaCache, INavigableData, IDisposable
internal class MediaCache : PublishedCacheBase, IPublishedMediaCache2, INavigableData, IDisposable
{
private readonly ContentStore.Snapshot _snapshot;
private readonly IVariationContextAccessor _variationContextAccessor;
@@ -155,15 +155,11 @@ namespace Umbraco.Web.PublishedCache.NuCache
#region Content types
public override IPublishedContentType GetContentType(int id)
{
return _snapshot.GetContentType(id);
}
public override IPublishedContentType GetContentType(int id) => _snapshot.GetContentType(id);
public override IPublishedContentType GetContentType(string alias)
{
return _snapshot.GetContentType(alias);
}
public override IPublishedContentType GetContentType(string alias) => _snapshot.GetContentType(alias);
public override IPublishedContentType GetContentType(Guid key) => _snapshot.GetContentType(key);
#endregion

View File

@@ -1,6 +1,7 @@
using System.Configuration;
using Umbraco.Core;
using Umbraco.Core.Composing;
using Umbraco.Core.PropertyEditors;
using Umbraco.Web.PublishedCache.NuCache.DataSource;
namespace Umbraco.Web.PublishedCache.NuCache
@@ -12,18 +13,22 @@ namespace Umbraco.Web.PublishedCache.NuCache
base.Compose(composition);
var serializer = ConfigurationManager.AppSettings["Umbraco.Web.PublishedCache.NuCache.Serializer"];
if (serializer == "MsgPack")
if (serializer != "MsgPack")
{
composition.Register<IContentNestedDataSerializer, MsgPackContentNestedDataSerializer>();
// TODO: This allows people to revert to the legacy serializer, by default it will be MessagePack
composition.RegisterUnique<IContentCacheDataSerializerFactory, JsonContentNestedDataSerializerFactory>();
}
else
{
composition.Register<IContentNestedDataSerializer, JsonContentNestedDataSerializer>();
composition.RegisterUnique<IContentCacheDataSerializerFactory, MsgPackContentNestedDataSerializerFactory>();
}
composition.RegisterUnique<IPropertyCacheCompressionOptions, NoopPropertyCacheCompressionOptions>();
composition.RegisterUnique(factory => new ContentDataSerializer(new DictionaryOfPropertyDataSerializer()));
// register the NuCache database data source
composition.Register<IDataSource, DatabaseDataSource>();
composition.RegisterUnique<IDataSource, DatabaseDataSource>();
// register the NuCache published snapshot service
// must register default options, required in the service ctor
@@ -34,5 +39,6 @@ namespace Umbraco.Web.PublishedCache.NuCache
// TODO: no NuCache health check yet
//composition.HealthChecks().Add<NuCacheIntegrityHealthCheck>();
}
}
}

View File

@@ -33,15 +33,16 @@ namespace Umbraco.Web.PublishedCache.NuCache
_urlSegment = ContentData.UrlSegment;
IsPreviewing = ContentData.Published == false;
var properties = new List<IPublishedProperty>();
var properties = new IPublishedProperty[_contentNode.ContentType.PropertyTypes.Count()];
int i =0;
foreach (var propertyType in _contentNode.ContentType.PropertyTypes)
{
// add one property per property type - this is required, for the indexing to work
// if contentData supplies pdatas, use them, else use null
contentData.Properties.TryGetValue(propertyType.Alias, out var pdatas); // else will be null
properties.Add(new Property(propertyType, this, pdatas, _publishedSnapshotAccessor));
properties[i++] =new Property(propertyType, this, pdatas, _publishedSnapshotAccessor);
}
PropertiesArray = properties.ToArray();
PropertiesArray = properties;
}
private string GetProfileNameById(int id)
@@ -297,7 +298,18 @@ namespace Umbraco.Web.PublishedCache.NuCache
throw new PanicException($"failed to get content with id={id}");
}
id = UnwrapIPublishedContent(content)._contentNode.NextSiblingContentId;
var next = UnwrapIPublishedContent(content)._contentNode.NextSiblingContentId;
#if DEBUG
// I've seen this happen but I think that may have been due to corrupt DB data due to my own
// bugs, but I'm leaving this here just in case we encounter it again while we're debugging.
if (next == id)
{
throw new PanicException($"The current content id {id} is the same as it's next sibling id {next}");
}
#endif
id = next;
}
}
}

View File

@@ -1,14 +1,10 @@
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using CSharpTest.Net.Collections;
using Newtonsoft.Json;
using Umbraco.Core;
using Umbraco.Core.Cache;
using Umbraco.Core.Configuration;
@@ -49,7 +45,8 @@ namespace Umbraco.Web.PublishedCache.NuCache
private readonly IPublishedModelFactory _publishedModelFactory;
private readonly IDefaultCultureAccessor _defaultCultureAccessor;
private readonly UrlSegmentProviderCollection _urlSegmentProviders;
private readonly IContentNestedDataSerializer _contentNestedDataSerializer;
private readonly IContentCacheDataSerializerFactory _contentCacheDataSerializerFactory;
private readonly ContentDataSerializer _contentDataSerializer;
// volatile because we read it with no lock
private volatile bool _isReady;
@@ -83,7 +80,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
IDataSource dataSource, IGlobalSettings globalSettings,
IEntityXmlSerializer entitySerializer,
IPublishedModelFactory publishedModelFactory,
UrlSegmentProviderCollection urlSegmentProviders, IContentNestedDataSerializer contentNestedDataSerializer)
UrlSegmentProviderCollection urlSegmentProviders, IContentCacheDataSerializerFactory contentCacheDataSerializerFactory, ContentDataSerializer contentDataSerializer = null)
: base(publishedSnapshotAccessor, variationContextAccessor)
{
//if (Interlocked.Increment(ref _singletonCheck) > 1)
@@ -100,7 +97,8 @@ namespace Umbraco.Web.PublishedCache.NuCache
_defaultCultureAccessor = defaultCultureAccessor;
_globalSettings = globalSettings;
_urlSegmentProviders = urlSegmentProviders;
_contentNestedDataSerializer = contentNestedDataSerializer;
_contentCacheDataSerializerFactory = contentCacheDataSerializerFactory;
_contentDataSerializer = contentDataSerializer;
// we need an Xml serializer here so that the member cache can support XPath,
// for members this is done by navigating the serialized-to-xml member
@@ -182,8 +180,8 @@ namespace Umbraco.Web.PublishedCache.NuCache
_localMediaDbExists = File.Exists(localMediaDbPath);
// if both local databases exist then GetTree will open them, else new databases will be created
_localContentDb = BTree.GetTree(localContentDbPath, _localContentDbExists);
_localMediaDb = BTree.GetTree(localMediaDbPath, _localMediaDbExists);
_localContentDb = BTree.GetTree(localContentDbPath, _localContentDbExists, _contentDataSerializer);
_localMediaDb = BTree.GetTree(localMediaDbPath, _localMediaDbExists, _contentDataSerializer);
_logger.Info<PublishedSnapshotService>("Registered with MainDom, localContentDbExists? {LocalContentDbExists}, localMediaDbExists? {LocalMediaDbExists}", _localContentDbExists, _localMediaDbExists);
}
@@ -385,10 +383,9 @@ namespace Umbraco.Web.PublishedCache.NuCache
// contentStore is wlocked (1 thread)
// content (and types) are read-locked
var contentTypes = _serviceContext.ContentTypeService.GetAll()
.Select(x => _publishedContentTypeFactory.CreateContentType(x));
var contentTypes = _serviceContext.ContentTypeService.GetAll().ToList();
_contentStore.SetAllContentTypesLocked(contentTypes);
_contentStore.SetAllContentTypesLocked(contentTypes.Select(x => _publishedContentTypeFactory.CreateContentType(x)));
using (_logger.TraceDuration<PublishedSnapshotService>("Loading content from database"))
{
@@ -1285,8 +1282,10 @@ namespace Umbraco.Web.PublishedCache.NuCache
var db = args.Scope.Database;
var content = (Content)args.Entity;
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Document);
// always refresh the edited data
OnRepositoryRefreshed(db, content, false);
OnRepositoryRefreshed(serializer, db, content, false);
// if unpublishing, remove published data from table
if (content.PublishedState == PublishedState.Unpublishing)
@@ -1294,37 +1293,42 @@ namespace Umbraco.Web.PublishedCache.NuCache
// if publishing, refresh the published data
else if (content.PublishedState == PublishedState.Publishing)
OnRepositoryRefreshed(db, content, true);
OnRepositoryRefreshed(serializer, db, content, true);
}
private void OnMediaRefreshedEntity(MediaRepository sender, MediaRepository.ScopedEntityEventArgs args)
{
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Media);
var db = args.Scope.Database;
var media = args.Entity;
// refresh the edited data
OnRepositoryRefreshed(db, media, false);
OnRepositoryRefreshed(serializer, db, media, false);
}
private void OnMemberRefreshedEntity(MemberRepository sender, MemberRepository.ScopedEntityEventArgs args)
{
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Member);
var db = args.Scope.Database;
var member = args.Entity;
// refresh the edited data
OnRepositoryRefreshed(db, member, false);
OnRepositoryRefreshed(serializer, db, member, false);
}
private void OnRepositoryRefreshed(IUmbracoDatabase db, IContentBase content, bool published)
private void OnRepositoryRefreshed(IContentCacheDataSerializer serializer, IUmbracoDatabase db, IContentBase content, bool published)
{
// use a custom SQL to update row version on each update
//db.InsertOrUpdate(dto);
var dto = GetDto(content, published);
var dto = GetDto(content, published, serializer);
db.InsertOrUpdate(dto,
"SET data=@data, rv=rv+1 WHERE nodeId=@id AND published=@published",
"SET data=@data, dataRaw=@dataRaw, rv=rv+1 WHERE nodeId=@id AND published=@published",
new
{
dataRaw = dto.RawData ?? Array.Empty<byte>(),
data = dto.Data,
id = dto.NodeId,
published = dto.Published
@@ -1373,7 +1377,7 @@ namespace Umbraco.Web.PublishedCache.NuCache
}
}
private ContentNuDto GetDto(IContentBase content, bool published)
private ContentNuDto GetDto(IContentBase content, bool published, IContentCacheDataSerializer serializer)
{
// should inject these in ctor
// BUT for the time being we decide not to support ConvertDbToXml/String
@@ -1445,18 +1449,21 @@ namespace Umbraco.Web.PublishedCache.NuCache
}
//the dictionary that will be serialized
var nestedData = new ContentNestedData
var contentCacheData = new ContentCacheDataModel
{
PropertyData = propertyData,
CultureData = cultureData,
UrlSegment = content.GetUrlSegment(_urlSegmentProviders)
};
var serialized = serializer.Serialize(ReadOnlyContentBaseAdapter.Create(content), contentCacheData);
var dto = new ContentNuDto
{
NodeId = content.Id,
Published = published,
Data = _contentNestedDataSerializer.Serialize(nestedData)
Data = serialized.StringData,
RawData = serialized.ByteData
};
//Core.Composing.Current.Logger.Debug<PublishedSnapshotService>(dto.Data);
@@ -1479,30 +1486,32 @@ namespace Umbraco.Web.PublishedCache.NuCache
public override void Rebuild()
{
_logger.Debug<PublishedSnapshotService>("Rebuilding...");
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Document | ContentCacheDataSerializerEntityType.Media | ContentCacheDataSerializerEntityType.Member);
using (var scope = _scopeProvider.CreateScope(repositoryCacheMode: RepositoryCacheMode.Scoped))
{
scope.ReadLock(Constants.Locks.ContentTree);
scope.ReadLock(Constants.Locks.MediaTree);
scope.ReadLock(Constants.Locks.MemberTree);
RebuildContentDbCacheLocked(scope, GetSqlPagingSize(), null);
RebuildMediaDbCacheLocked(scope, GetSqlPagingSize(), null);
RebuildMemberDbCacheLocked(scope, GetSqlPagingSize(), null);
RebuildContentDbCacheLocked(serializer, scope, GetSqlPagingSize(), null);
RebuildMediaDbCacheLocked(serializer, scope, GetSqlPagingSize(), null);
RebuildMemberDbCacheLocked(serializer, scope, GetSqlPagingSize(), null);
scope.Complete();
}
}
public void RebuildContentDbCache(int groupSize = DefaultSqlPagingSize, IEnumerable<int> contentTypeIds = null)
{
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Document);
using (var scope = _scopeProvider.CreateScope(repositoryCacheMode: RepositoryCacheMode.Scoped))
{
scope.ReadLock(Constants.Locks.ContentTree);
RebuildContentDbCacheLocked(scope, groupSize, contentTypeIds);
RebuildContentDbCacheLocked(serializer, scope, groupSize, contentTypeIds);
scope.Complete();
}
}
// assumes content tree lock
private void RebuildContentDbCacheLocked(IScope scope, int groupSize, IEnumerable<int> contentTypeIds)
private void RebuildContentDbCacheLocked(IContentCacheDataSerializer serializer, IScope scope, int groupSize, IEnumerable<int> contentTypeIds)
{
var contentTypeIdsA = contentTypeIds?.ToArray();
var contentObjectType = Constants.ObjectTypes.Document;
@@ -1549,11 +1558,11 @@ WHERE cmsContentNu.nodeId IN (
foreach (var c in descendants)
{
// always the edited version
items.Add(GetDto(c, false));
items.Add(GetDto(c, false, serializer));
// and also the published version if it makes any sense
if (c.Published)
items.Add(GetDto(c, true));
items.Add(GetDto(c, true, serializer));
count++;
}
@@ -1565,16 +1574,17 @@ WHERE cmsContentNu.nodeId IN (
public void RebuildMediaDbCache(int groupSize = DefaultSqlPagingSize, IEnumerable<int> contentTypeIds = null)
{
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Media);
using (var scope = _scopeProvider.CreateScope(repositoryCacheMode: RepositoryCacheMode.Scoped))
{
scope.ReadLock(Constants.Locks.MediaTree);
RebuildMediaDbCacheLocked(scope, groupSize, contentTypeIds);
RebuildMediaDbCacheLocked(serializer, scope, groupSize, contentTypeIds);
scope.Complete();
}
}
// assumes media tree lock
public void RebuildMediaDbCacheLocked(IScope scope, int groupSize, IEnumerable<int> contentTypeIds)
public void RebuildMediaDbCacheLocked(IContentCacheDataSerializer serializer, IScope scope, int groupSize, IEnumerable<int> contentTypeIds)
{
var contentTypeIdsA = contentTypeIds?.ToArray();
var mediaObjectType = Constants.ObjectTypes.Media;
@@ -1616,7 +1626,7 @@ WHERE cmsContentNu.nodeId IN (
{
// the tree is locked, counting and comparing to total is safe
var descendants = _mediaRepository.GetPage(query, pageIndex++, groupSize, out total, null, Ordering.By("Path"));
var items = descendants.Select(m => GetDto(m, false)).ToList();
var items = descendants.Select(m => GetDto(m, false, serializer)).ToList();
db.BulkInsertRecords(items);
processed += items.Count;
} while (processed < total);
@@ -1624,16 +1634,17 @@ WHERE cmsContentNu.nodeId IN (
public void RebuildMemberDbCache(int groupSize = DefaultSqlPagingSize, IEnumerable<int> contentTypeIds = null)
{
var serializer = _contentCacheDataSerializerFactory.Create(ContentCacheDataSerializerEntityType.Member);
using (var scope = _scopeProvider.CreateScope(repositoryCacheMode: RepositoryCacheMode.Scoped))
{
scope.ReadLock(Constants.Locks.MemberTree);
RebuildMemberDbCacheLocked(scope, groupSize, contentTypeIds);
RebuildMemberDbCacheLocked(serializer, scope, groupSize, contentTypeIds);
scope.Complete();
}
}
// assumes member tree lock
public void RebuildMemberDbCacheLocked(IScope scope, int groupSize, IEnumerable<int> contentTypeIds)
public void RebuildMemberDbCacheLocked(IContentCacheDataSerializer serializer, IScope scope, int groupSize, IEnumerable<int> contentTypeIds)
{
var contentTypeIdsA = contentTypeIds?.ToArray();
var memberObjectType = Constants.ObjectTypes.Member;
@@ -1674,7 +1685,7 @@ WHERE cmsContentNu.nodeId IN (
do
{
var descendants = _memberRepository.GetPage(query, pageIndex++, groupSize, out total, null, Ordering.By("Path"));
var items = descendants.Select(m => GetDto(m, false)).ToArray();
var items = descendants.Select(m => GetDto(m, false, serializer)).ToArray();
db.BulkInsertRecords(items);
processed += items.Length;
} while (processed < total);

View File

@@ -380,7 +380,11 @@ namespace Umbraco.Web.PublishedCache.NuCache
{
_collectTask = null;
}
}, TaskContinuationOptions.ExecuteSynchronously);
},
CancellationToken.None,
TaskContinuationOptions.ExecuteSynchronously,
// Must explicitly specify this, see https://blog.stephencleary.com/2013/10/continuewith-is-dangerous-too.html
TaskScheduler.Default);
// ReSharper restore InconsistentlySynchronizedField
return task;

View File

@@ -8,7 +8,7 @@ using Umbraco.Core.Xml;
namespace Umbraco.Web.PublishedCache
{
abstract class PublishedCacheBase : IPublishedCache
internal abstract class PublishedCacheBase : IPublishedCache2
{
public bool PreviewDefault { get; }
@@ -89,8 +89,8 @@ namespace Umbraco.Web.PublishedCache
}
public abstract IPublishedContentType GetContentType(int id);
public abstract IPublishedContentType GetContentType(string alias);
public abstract IPublishedContentType GetContentType(Guid key);
public virtual IEnumerable<IPublishedContent> GetByContentType(IPublishedContentType contentType)
{

View File

@@ -15,8 +15,10 @@ namespace Umbraco.Web.PublishedCache
/// <remarks>This cache is not snapshotted, so it refreshes any time things change.</remarks>
public class PublishedContentTypeCache
{
// NOTE: These are not concurrent dictionaries because all access is done within a lock
private readonly Dictionary<string, IPublishedContentType> _typesByAlias = new Dictionary<string, IPublishedContentType>();
private readonly Dictionary<int, IPublishedContentType> _typesById = new Dictionary<int, IPublishedContentType>();
private readonly Dictionary<Guid, int> _keyToIdMap = new Dictionary<Guid, int>();
private readonly IContentTypeService _contentTypeService;
private readonly IMediaTypeService _mediaTypeService;
private readonly IMemberTypeService _memberTypeService;
@@ -130,6 +132,42 @@ namespace Umbraco.Web.PublishedCache
}
}
/// <summary>
/// Gets a published content type.
/// </summary>
/// <param name="itemType">An item type.</param>
/// <param name="key">An key.</param>
/// <returns>The published content type corresponding to the item key.</returns>
public IPublishedContentType Get(PublishedItemType itemType, Guid key)
{
try
{
_lock.EnterUpgradeableReadLock();
if (_keyToIdMap.TryGetValue(key, out var id))
return Get(itemType, id);
var type = CreatePublishedContentType(itemType, key);
try
{
_lock.EnterWriteLock();
_keyToIdMap[key] = type.Id;
return _typesByAlias[GetAliasKey(type)] = _typesById[type.Id] = type;
}
finally
{
if (_lock.IsWriteLockHeld)
_lock.ExitWriteLock();
}
}
finally
{
if (_lock.IsUpgradeableReadLockHeld)
_lock.ExitUpgradeableReadLock();
}
}
/// <summary>
/// Gets a published content type.
/// </summary>
@@ -152,7 +190,8 @@ namespace Umbraco.Web.PublishedCache
try
{
_lock.EnterWriteLock();
if (type.TryGetKey(out var key))
_keyToIdMap[key] = type.Id;
return _typesByAlias[aliasKey] = _typesById[type.Id] = type;
}
finally
@@ -188,7 +227,8 @@ namespace Umbraco.Web.PublishedCache
try
{
_lock.EnterWriteLock();
if (type.TryGetKey(out var key))
_keyToIdMap[key] = type.Id;
return _typesByAlias[GetAliasKey(type)] = _typesById[type.Id] = type;
}
finally
@@ -204,27 +244,32 @@ namespace Umbraco.Web.PublishedCache
}
}
private IPublishedContentType CreatePublishedContentType(PublishedItemType itemType, Guid key)
{
IContentTypeComposition contentType = itemType switch
{
PublishedItemType.Content => _contentTypeService.Get(key),
PublishedItemType.Media => _mediaTypeService.Get(key),
PublishedItemType.Member => _memberTypeService.Get(key),
_ => throw new ArgumentOutOfRangeException(nameof(itemType)),
};
if (contentType == null)
throw new Exception($"ContentTypeService failed to find a {itemType.ToString().ToLower()} type with key \"{key}\".");
return _publishedContentTypeFactory.CreateContentType(contentType);
}
private IPublishedContentType CreatePublishedContentType(PublishedItemType itemType, string alias)
{
if (GetPublishedContentTypeByAlias != null)
return GetPublishedContentTypeByAlias(alias);
IContentTypeComposition contentType;
switch (itemType)
IContentTypeComposition contentType = itemType switch
{
case PublishedItemType.Content:
contentType = _contentTypeService.Get(alias);
break;
case PublishedItemType.Media:
contentType = _mediaTypeService.Get(alias);
break;
case PublishedItemType.Member:
contentType = _memberTypeService.Get(alias);
break;
default:
throw new ArgumentOutOfRangeException(nameof(itemType));
}
PublishedItemType.Content => _contentTypeService.Get(alias),
PublishedItemType.Media => _mediaTypeService.Get(alias),
PublishedItemType.Member => _memberTypeService.Get(alias),
_ => throw new ArgumentOutOfRangeException(nameof(itemType)),
};
if (contentType == null)
throw new Exception($"ContentTypeService failed to find a {itemType.ToString().ToLower()} type with alias \"{alias}\".");
@@ -235,23 +280,13 @@ namespace Umbraco.Web.PublishedCache
{
if (GetPublishedContentTypeById != null)
return GetPublishedContentTypeById(id);
IContentTypeComposition contentType;
switch (itemType)
IContentTypeComposition contentType = itemType switch
{
case PublishedItemType.Content:
contentType = _contentTypeService.Get(id);
break;
case PublishedItemType.Media:
contentType = _mediaTypeService.Get(id);
break;
case PublishedItemType.Member:
contentType = _memberTypeService.Get(id);
break;
default:
throw new ArgumentOutOfRangeException(nameof(itemType));
}
PublishedItemType.Content => _contentTypeService.Get(id),
PublishedItemType.Media => _mediaTypeService.Get(id),
PublishedItemType.Member => _memberTypeService.Get(id),
_ => throw new ArgumentOutOfRangeException(nameof(itemType)),
};
if (contentType == null)
throw new Exception($"ContentTypeService failed to find a {itemType.ToString().ToLower()} type with id {id}.");
@@ -259,6 +294,7 @@ namespace Umbraco.Web.PublishedCache
}
// for unit tests - changing the callback must reset the cache obviously
// TODO: Why does this even exist? For testing you'd pass in a mocked service to get by id
private Func<string, IPublishedContentType> _getPublishedContentTypeByAlias;
internal Func<string, IPublishedContentType> GetPublishedContentTypeByAlias
{
@@ -282,6 +318,7 @@ namespace Umbraco.Web.PublishedCache
}
// for unit tests - changing the callback must reset the cache obviously
// TODO: Why does this even exist? For testing you'd pass in a mocked service to get by id
private Func<int, IPublishedContentType> _getPublishedContentTypeById;
internal Func<int, IPublishedContentType> GetPublishedContentTypeById
{