diff --git a/src/Umbraco.Core/Cache/DictionaryCacheProvider.cs b/src/Umbraco.Core/Cache/DictionaryCacheProvider.cs new file mode 100644 index 0000000000..02e4b08642 --- /dev/null +++ b/src/Umbraco.Core/Cache/DictionaryCacheProvider.cs @@ -0,0 +1,151 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Text.RegularExpressions; +using Umbraco.Core.Plugins; + +namespace Umbraco.Core.Cache +{ + class DictionaryCacheProvider : ICacheProvider + { + private readonly ConcurrentDictionary> _items + = new ConcurrentDictionary>(); + + public void ClearAllCache() + { + _items.Clear(); + } + + public void ClearCacheItem(string key) + { + Lazy item; + _items.TryRemove(key, out item); + } + + public void ClearCacheObjectTypes(string typeName) + { + var type = TypeFinder.GetTypeByName(typeName); + if (type == null) return; + var isInterface = type.IsInterface; + + Lazy item; + foreach (var kvp in _items + .Where(x => + { + // entry.Value is Lazy and not null, its value may be null + // remove null values as well, does not hurt + // get non-created as NonCreatedValue & exceptions as null + var value = DictionaryCacheProviderBase.GetSafeLazyValue(x.Value, true); + + // if T is an interface remove anything that implements that interface + // otherwise remove exact types (not inherited types) + return value == null || (isInterface ? (type.IsInstanceOfType(value)) : (value.GetType() == type)); + })) + _items.TryRemove(kvp.Key, out item); + } + + public void ClearCacheObjectTypes() + { + var typeOfT = typeof(T); + var isInterface = typeOfT.IsInterface; + + Lazy item; + foreach (var kvp in _items + .Where(x => + { + // entry.Value is Lazy and not null, its value may be null + // remove null values as well, does not hurt + // compare on exact type, don't use "is" + // get non-created as NonCreatedValue & exceptions as null + var value = DictionaryCacheProviderBase.GetSafeLazyValue(x.Value, true); + + // if T is an interface remove anything that implements that interface + // otherwise remove exact types (not inherited types) + return value == null || (isInterface ? (value is T) : (value.GetType() == typeOfT)); + })) + _items.TryRemove(kvp.Key, out item); + } + + public void ClearCacheObjectTypes(Func predicate) + { + var typeOfT = typeof(T); + var isInterface = typeOfT.IsInterface; + + Lazy item; + foreach (var kvp in _items + .Where(x => + { + // entry.Value is Lazy and not null, its value may be null + // remove null values as well, does not hurt + // compare on exact type, don't use "is" + // get non-created as NonCreatedValue & exceptions as null + var value = DictionaryCacheProviderBase.GetSafeLazyValue(x.Value, true); + if (value == null) return true; + + // if T is an interface remove anything that implements that interface + // otherwise remove exact types (not inherited types) + return (isInterface ? (value is T) : (value.GetType() == typeOfT)) + // run predicate on the 'public key' part only, ie without prefix + && predicate(x.Key, (T)value); + })) + _items.TryRemove(kvp.Key, out item); + } + + public void ClearCacheByKeySearch(string keyStartsWith) + { + Lazy item; + foreach (var ikvp in _items + .Where(kvp => kvp.Key.InvariantStartsWith(keyStartsWith))) + _items.TryRemove(ikvp.Key, out item); + } + + public void ClearCacheByKeyExpression(string regexString) + { + Lazy item; + foreach (var ikvp in _items + .Where(kvp => Regex.IsMatch(kvp.Key, regexString))) + _items.TryRemove(ikvp.Key, out item); + } + + public IEnumerable GetCacheItemsByKeySearch(string keyStartsWith) + { + return _items + .Where(kvp => kvp.Key.InvariantStartsWith(keyStartsWith)) + .Select(kvp => DictionaryCacheProviderBase.GetSafeLazyValue(kvp.Value)) + .Where(x => x != null); + } + + public IEnumerable GetCacheItemsByKeyExpression(string regexString) + { + return _items + .Where(kvp => Regex.IsMatch(kvp.Key, regexString)) + .Select(kvp => DictionaryCacheProviderBase.GetSafeLazyValue(kvp.Value)) + .Where(x => x != null); + } + + public object GetCacheItem(string cacheKey) + { + Lazy result; + _items.TryGetValue(cacheKey, out result); // else null + return result == null ? null : DictionaryCacheProviderBase.GetSafeLazyValue(result); // return exceptions as null + } + + public object GetCacheItem(string cacheKey, Func getCacheItem) + { + var result = _items.GetOrAdd(cacheKey, k => DictionaryCacheProviderBase.GetSafeLazy(getCacheItem)); + + var value = result.Value; // will not throw (safe lazy) + var eh = value as DictionaryCacheProviderBase.ExceptionHolder; + if (eh == null) + return value; + + // and... it's in the cache anyway - so contrary to other cache providers, + // which would trick with GetSafeLazyValue, we need to remove by ourselves, + // in order NOT to cache exceptions + + _items.TryRemove(cacheKey, out result); + throw eh.Exception; // throw once! + } + } +} diff --git a/src/Umbraco.Core/Configuration/UmbracoVersion.cs b/src/Umbraco.Core/Configuration/UmbracoVersion.cs index 4c90df9111..78f1488e0c 100644 --- a/src/Umbraco.Core/Configuration/UmbracoVersion.cs +++ b/src/Umbraco.Core/Configuration/UmbracoVersion.cs @@ -23,7 +23,7 @@ namespace Umbraco.Core.Configuration /// Gets the version comment (like beta or RC). /// /// The version comment. - public static string CurrentComment => "alpha0002"; + public static string CurrentComment => "alpha0004"; // Get the version of the Umbraco.Core.dll by looking at a class in that dll // Had to do it like this due to medium trust issues, see: http://haacked.com/archive/2010/11/04/assembly-location-and-medium-trust.aspx diff --git a/src/Umbraco.Core/Persistence/Migrations/Upgrades/TargetVersionEight/AddContentNuTable.cs b/src/Umbraco.Core/Persistence/Migrations/Upgrades/TargetVersionEight/AddContentNuTable.cs new file mode 100644 index 0000000000..bcd1b98f27 --- /dev/null +++ b/src/Umbraco.Core/Persistence/Migrations/Upgrades/TargetVersionEight/AddContentNuTable.cs @@ -0,0 +1,44 @@ +using System.Data; +using System.Linq; +using Umbraco.Core.Configuration; +using Umbraco.Core.Persistence.DatabaseAnnotations; + +namespace Umbraco.Core.Persistence.Migrations.Upgrades.TargetVersionEight +{ + [Migration("8.0.0", 100, GlobalSettings.UmbracoMigrationName)] + class AddContentNuTable : MigrationBase + { + public AddContentNuTable(IMigrationContext context) + : base(context) + { } + + public override void Up() + { + var tables = SqlSyntax.GetTablesInSchema(Context.Database).ToArray(); + if (tables.InvariantContains("cmsContentNu")) return; + + var textType = SqlSyntax.GetSpecialDbType(SpecialDbTypes.NTEXT); + + Create.Table("cmsContentNu") + .WithColumn("nodeId").AsInt32().NotNullable() + .WithColumn("published").AsBoolean().NotNullable() + .WithColumn("data").AsCustom(textType).NotNullable() + .WithColumn("rv").AsInt64().NotNullable().WithDefaultValue(0); + + Create.PrimaryKey("PK_cmsContentNu") + .OnTable("cmsContentNu") + .Columns(new[] { "nodeId", "published" }); + + Create.ForeignKey("FK_cmsContentNu_umbracoNode_id") + .FromTable("cmsContentNu") + .ForeignColumn("nodeId") + .ToTable("umbracoNode") + .PrimaryColumn("id") + .OnDelete(Rule.Cascade) + .OnUpdate(Rule.None); + } + + public override void Down() + { } + } +} diff --git a/src/Umbraco.Core/Persistence/NPocoDatabaseExtensions.cs b/src/Umbraco.Core/Persistence/NPocoDatabaseExtensions.cs index 5057912c45..140ef071e6 100644 --- a/src/Umbraco.Core/Persistence/NPocoDatabaseExtensions.cs +++ b/src/Umbraco.Core/Persistence/NPocoDatabaseExtensions.cs @@ -125,6 +125,9 @@ namespace Umbraco.Core.Persistence // failed: exists (due to race cond RC1) // RC2 race cond here: another thread may remove the record + // fixme - debugging, ok? + throw; + // try to update rowCount = updateCommand.IsNullOrWhiteSpace() ? db.Update(poco) diff --git a/src/Umbraco.Core/Serialization/ForceInt32Converter.cs b/src/Umbraco.Core/Serialization/ForceInt32Converter.cs new file mode 100644 index 0000000000..97639a3cb2 --- /dev/null +++ b/src/Umbraco.Core/Serialization/ForceInt32Converter.cs @@ -0,0 +1,28 @@ +using System; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace Umbraco.Core.Serialization +{ + internal class ForceInt32Converter : JsonConverter + { + public override bool CanConvert(Type objectType) + { + return objectType == typeof (object) || objectType == typeof (int); + } + + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + var jsonValue = serializer.Deserialize(reader); + + return jsonValue.Type == JTokenType.Integer + ? jsonValue.Value() + : serializer.Deserialize(reader); + } + + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } +} \ No newline at end of file diff --git a/src/Umbraco.Core/Umbraco.Core.csproj b/src/Umbraco.Core/Umbraco.Core.csproj index b7a3717b66..fcb2c701bb 100644 --- a/src/Umbraco.Core/Umbraco.Core.csproj +++ b/src/Umbraco.Core/Umbraco.Core.csproj @@ -74,6 +74,7 @@ + @@ -256,6 +257,7 @@ Files.resx + @@ -438,6 +440,7 @@ + diff --git a/src/Umbraco.Tests/Cache/SnapDictionaryTests.cs b/src/Umbraco.Tests/Cache/SnapDictionaryTests.cs new file mode 100644 index 0000000000..c7dc51cb07 --- /dev/null +++ b/src/Umbraco.Tests/Cache/SnapDictionaryTests.cs @@ -0,0 +1,710 @@ +using System; +using NUnit.Framework; +using Umbraco.Web.PublishedCache.NuCache; + +namespace Umbraco.Tests.Cache +{ + [TestFixture] + public class SnapDictionaryTests + { + [Test] + public void LiveGenUpdate() + { + var d = new SnapDictionary(); + d.Test.CollectAuto = false; + + Assert.AreEqual(0, d.Test.GetValues(1).Length); + + // gen 1 + d.Set(1, "one"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + d.Clear(1); + Assert.AreEqual(0, d.Test.GetValues(1).Length); // gone + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + Assert.AreEqual(0, d.Test.FloorGen); + } + + [Test] + public void OtherGenUpdate() + { + var d = new SnapDictionary(); + d.Test.CollectAuto = false; + + Assert.AreEqual(0, d.Test.GetValues(1).Length); + Assert.AreEqual(0, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + // gen 1 + d.Set(1, "one"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s = d.CreateSnapshot(); + Assert.AreEqual(1, s.Gen); + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + // gen 2 + d.Clear(1); + Assert.AreEqual(2, d.Test.GetValues(1).Length); // there + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + Assert.AreEqual(0, d.Test.FloorGen); + + GC.KeepAlive(s); + } + + [Test] + public void MissingReturnsNull() + { + var d = new SnapDictionary(); + var s = d.CreateSnapshot(); + + Assert.IsNull(s.Get(1)); + } + + [Test] + public void DeletedReturnsNull() + { + var d = new SnapDictionary(); + + // gen 1 + d.Set(1, "one"); + + var s1 = d.CreateSnapshot(); + Assert.AreEqual("one", s1.Get(1)); + + // gen 2 + d.Clear(1); + + var s2 = d.CreateSnapshot(); + Assert.IsNull(s2.Get(1)); + + Assert.AreEqual("one", s1.Get(1)); + } + + [Test] + public async void CollectValues() + { + var d = new SnapDictionary(); + d.Test.CollectAuto = false; + + // gen 1 + d.Set(1, "one"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + d.Set(1, "one"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + d.Set(1, "uno"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s1 = d.CreateSnapshot(); + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + // gen 2 + Assert.AreEqual(1, d.Test.GetValues(1).Length); + d.Set(1, "one"); + Assert.AreEqual(2, d.Test.GetValues(1).Length); + d.Set(1, "uno"); + Assert.AreEqual(2, d.Test.GetValues(1).Length); + + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s2 = d.CreateSnapshot(); + + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + // gen 3 + Assert.AreEqual(2, d.Test.GetValues(1).Length); + d.Set(1, "one"); + Assert.AreEqual(3, d.Test.GetValues(1).Length); + d.Set(1, "uno"); + Assert.AreEqual(3, d.Test.GetValues(1).Length); + + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var tv = d.Test.GetValues(1); + Assert.AreEqual(3, tv[0].Gen); + Assert.AreEqual(2, tv[1].Gen); + Assert.AreEqual(1, tv[2].Gen); + + Assert.AreEqual(0, d.Test.FloorGen); + + // nothing to collect + await d.CollectAsync(); + GC.KeepAlive(s1); + GC.KeepAlive(s2); + Assert.AreEqual(0, d.Test.FloorGen); + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + Assert.AreEqual(2, d.SnapCount); + Assert.AreEqual(3, d.Test.GetValues(1).Length); + + // one snapshot to collect + s1 = null; + GC.Collect(); + GC.KeepAlive(s2); + await d.CollectAsync(); + Assert.AreEqual(1, d.Test.FloorGen); + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + Assert.AreEqual(1, d.SnapCount); + Assert.AreEqual(2, d.Test.GetValues(1).Length); + + // another snapshot to collect + s2 = null; + GC.Collect(); + await d.CollectAsync(); + Assert.AreEqual(2, d.Test.FloorGen); + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + Assert.AreEqual(0, d.SnapCount); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + } + + [Test] + public async void ProperlyCollects() + { + var d = new SnapDictionary(); + d.Test.CollectAuto = false; + + for (var i = 0; i < 32; i++) + { + d.Set(i, i.ToString()); + d.CreateSnapshot().Dispose(); + } + + Assert.AreEqual(32, d.GenCount); + Assert.AreEqual(0, d.SnapCount); // because we've disposed them + + await d.CollectAsync(); + Assert.AreEqual(32, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + Assert.AreEqual(0, d.GenCount); + Assert.AreEqual(0, d.SnapCount); + Assert.AreEqual(32, d.Count); + + for (var i = 0; i < 32; i++) + d.Set(i, null); + + d.CreateSnapshot().Dispose(); + + // because we haven't collected yet, but disposed nevertheless + Assert.AreEqual(1, d.GenCount); + Assert.AreEqual(0, d.SnapCount); + Assert.AreEqual(32, d.Count); + + // once we collect, they are all gone + // since noone is interested anymore + await d.CollectAsync(); + Assert.AreEqual(0, d.GenCount); + Assert.AreEqual(0, d.SnapCount); + Assert.AreEqual(0, d.Count); + } + + [Test] + public async void CollectNulls() + { + var d = new SnapDictionary(); + d.Test.CollectAuto = false; + + // gen 1 + d.Set(1, "one"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + d.Set(1, "one"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + d.Set(1, "uno"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s1 = d.CreateSnapshot(); + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + // gen 2 + Assert.AreEqual(1, d.Test.GetValues(1).Length); + d.Set(1, "one"); + Assert.AreEqual(2, d.Test.GetValues(1).Length); + d.Set(1, "uno"); + Assert.AreEqual(2, d.Test.GetValues(1).Length); + + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s2 = d.CreateSnapshot(); + + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + // gen 3 + Assert.AreEqual(2, d.Test.GetValues(1).Length); + d.Set(1, "one"); + Assert.AreEqual(3, d.Test.GetValues(1).Length); + d.Set(1, "uno"); + Assert.AreEqual(3, d.Test.GetValues(1).Length); + d.Clear(1); + Assert.AreEqual(3, d.Test.GetValues(1).Length); + + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var tv = d.Test.GetValues(1); + Assert.AreEqual(3, tv[0].Gen); + Assert.AreEqual(2, tv[1].Gen); + Assert.AreEqual(1, tv[2].Gen); + + Assert.AreEqual(0, d.Test.FloorGen); + + // nothing to collect + await d.CollectAsync(); + GC.KeepAlive(s1); + GC.KeepAlive(s2); + Assert.AreEqual(0, d.Test.FloorGen); + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + Assert.AreEqual(2, d.SnapCount); + Assert.AreEqual(3, d.Test.GetValues(1).Length); + + // one snapshot to collect + s1 = null; + GC.Collect(); + GC.KeepAlive(s2); + await d.CollectAsync(); + Assert.AreEqual(1, d.Test.FloorGen); + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + Assert.AreEqual(1, d.SnapCount); + Assert.AreEqual(2, d.Test.GetValues(1).Length); + + // another snapshot to collect + s2 = null; + GC.Collect(); + await d.CollectAsync(); + Assert.AreEqual(2, d.Test.FloorGen); + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + Assert.AreEqual(0, d.SnapCount); + + // and everything is gone? + // no, cannot collect the live gen because we'd need to lock + Assert.AreEqual(1, d.Test.GetValues(1).Length); + + d.CreateSnapshot(); + GC.Collect(); + await d.CollectAsync(); + + // poof, gone + Assert.AreEqual(0, d.Test.GetValues(1).Length); + } + + [Test] + public async void EventuallyCollectNulls() + { + var d = new SnapDictionary(); + d.Test.CollectAuto = false; + + Assert.AreEqual(0, d.Test.GetValues(1).Length); + + // gen 1 + d.Set(1, "one"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + await d.CollectAsync(); + var tv = d.Test.GetValues(1); + Assert.AreEqual(1, tv.Length); + Assert.AreEqual(1, tv[0].Gen); + + var s = d.CreateSnapshot(); + Assert.AreEqual("one", s.Get(1)); + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + Assert.AreEqual(1, d.Count); + Assert.AreEqual(1, d.SnapCount); + Assert.AreEqual(1, d.GenCount); + + // gen 2 + d.Clear(1); + tv = d.Test.GetValues(1); + Assert.AreEqual(2, tv.Length); + Assert.AreEqual(2, tv[0].Gen); + + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + Assert.AreEqual(1, d.Count); + Assert.AreEqual(1, d.SnapCount); + Assert.AreEqual(1, d.GenCount); + + // nothing to collect + await d.CollectAsync(); + GC.KeepAlive(s); + Assert.AreEqual(2, d.Test.GetValues(1).Length); + + Assert.AreEqual(1, d.Count); + Assert.AreEqual(1, d.SnapCount); + Assert.AreEqual(1, d.GenCount); + + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + // collect snapshot + // don't collect liveGen+ + s = null; // without being disposed + GC.Collect(); // should release the generation reference + await d.CollectAsync(); + + Assert.AreEqual(1, d.Test.GetValues(1).Length); // "one" value is gone + Assert.AreEqual(1, d.Count); // still have 1 item + Assert.AreEqual(0, d.SnapCount); // snapshot is gone + Assert.AreEqual(0, d.GenCount); // and generation has been dequeued + + // liveGen/nextGen + s = d.CreateSnapshot(); + s = null; + + // collect liveGen + GC.Collect(); + + SnapDictionary.GenerationObject genObj; + Assert.IsTrue(d.Test.GenerationObjects.TryPeek(out genObj)); + genObj = null; + + // in Release mode, it works, but in Debug mode, the weak reference is still alive + // and for some reason we need to do this to ensure it is collected +#if DEBUG + await d.CollectAsync(); + GC.Collect(); +#endif + + Assert.IsTrue(d.Test.GenerationObjects.TryPeek(out genObj)); + Assert.IsFalse(genObj.WeakReference.IsAlive); // snapshot is gone, along with its reference + + await d.CollectAsync(); + + Assert.AreEqual(0, d.Test.GetValues(1).Length); // null value is gone + Assert.AreEqual(0, d.Count); // item is gone + Assert.AreEqual(0, d.Test.GenerationObjects.Count); + Assert.AreEqual(0, d.SnapCount); // snapshot is gone + Assert.AreEqual(0, d.GenCount); // and generation has been dequeued + } + + [Test] + public async void CollectDisposedSnapshots() + { + var d = new SnapDictionary(); + d.Test.CollectAuto = false; + + // gen 1 + d.Set(1, "one"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s1 = d.CreateSnapshot(); + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + // gen 2 + d.Set(1, "two"); + Assert.AreEqual(2, d.Test.GetValues(1).Length); + + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s2 = d.CreateSnapshot(); + + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + // gen 3 + d.Set(1, "three"); + Assert.AreEqual(3, d.Test.GetValues(1).Length); + + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s3 = d.CreateSnapshot(); + + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + Assert.AreEqual(3, d.SnapCount); + + s1.Dispose(); + await d.CollectAsync(); + Assert.AreEqual(2, d.SnapCount); + Assert.AreEqual(2, d.Test.GetValues(1).Length); + + s2.Dispose(); + await d.CollectAsync(); + Assert.AreEqual(1, d.SnapCount); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + + s3.Dispose(); + await d.CollectAsync(); + Assert.AreEqual(0, d.SnapCount); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + } + + [Test] + public async void CollectGcSnapshots() + { + var d = new SnapDictionary(); + d.Test.CollectAuto = false; + + // gen 1 + d.Set(1, "one"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s1 = d.CreateSnapshot(); + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + // gen 2 + d.Set(1, "two"); + Assert.AreEqual(2, d.Test.GetValues(1).Length); + + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s2 = d.CreateSnapshot(); + + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + // gen 3 + d.Set(1, "three"); + Assert.AreEqual(3, d.Test.GetValues(1).Length); + + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s3 = d.CreateSnapshot(); + + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + + Assert.AreEqual(3, d.SnapCount); + + s1 = s2 = s3 = null; + + await d.CollectAsync(); + Assert.AreEqual(3, d.SnapCount); + Assert.AreEqual(3, d.Test.GetValues(1).Length); + + GC.Collect(); + await d.CollectAsync(); + Assert.AreEqual(0, d.SnapCount); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + } + + [Test] + public async void RandomTest1() + { + var d = new SnapDictionary(); + d.Test.CollectAuto = false; + + d.Set(1, "one"); + d.Set(2, "two"); + + var s1 = d.CreateSnapshot(); + var v1 = s1.Get(1); + Assert.AreEqual("one", v1); + + d.Set(1, "uno"); + + var s2 = d.CreateSnapshot(); + var v2 = s2.Get(1); + Assert.AreEqual("uno", v2); + + v1 = s1.Get(1); + Assert.AreEqual("one", v1); + + Assert.AreEqual(2, d.SnapCount); + + s1 = null; + GC.Collect(); + await d.CollectAsync(); + + // in Release mode, it works, but in Debug mode, the weak reference is still alive + // and for some reason we need to do this to ensure it is collected +#if DEBUG + GC.Collect(); + await d.CollectAsync(); +#endif + + Assert.AreEqual(1, d.SnapCount); + v2 = s2.Get(1); + Assert.AreEqual("uno", v2); + + s2 = null; + GC.Collect(); + await d.CollectAsync(); + + Assert.AreEqual(0, d.SnapCount); + } + + [Test] + public async void RandomTest2() + { + var d = new SnapDictionary(); + d.Test.CollectAuto = false; + + d.Set(1, "one"); + d.Set(2, "two"); + + var s1 = d.CreateSnapshot(); + var v1 = s1.Get(1); + Assert.AreEqual("one", v1); + + d.Clear(1); + + var s2 = d.CreateSnapshot(); + var v2 = s2.Get(1); + Assert.AreEqual(null, v2); + + v1 = s1.Get(1); + Assert.AreEqual("one", v1); + + Assert.AreEqual(2, d.SnapCount); + + s1 = null; + GC.Collect(); + await d.CollectAsync(); + + // in Release mode, it works, but in Debug mode, the weak reference is still alive + // and for some reason we need to do this to ensure it is collected +#if DEBUG + GC.Collect(); + await d.CollectAsync(); +#endif + + Assert.AreEqual(1, d.SnapCount); + v2 = s2.Get(1); + Assert.AreEqual(null, v2); + + s2 = null; + GC.Collect(); + await d.CollectAsync(); + + Assert.AreEqual(0, d.SnapCount); + } + + [Test] + public void WriteLockingFirstSnapshot() + { + var d = new SnapDictionary(); + d.Test.CollectAuto = false; + + // gen 1 + d.Set(1, "one"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + d.WriteLocked(() => + { + var s1 = d.CreateSnapshot(); + + Assert.AreEqual(0, s1.Gen); + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + Assert.IsNull(s1.Get(1)); + }); + + var s2 = d.CreateSnapshot(); + + Assert.AreEqual(1, s2.Gen); + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + Assert.AreEqual("one", s2.Get(1)); + } + + [Test] + public void WriteLocking() + { + var d = new SnapDictionary(); + d.Test.CollectAuto = false; + + // gen 1 + d.Set(1, "one"); + Assert.AreEqual(1, d.Test.GetValues(1).Length); + + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s1 = d.CreateSnapshot(); + + Assert.AreEqual(1, s1.Gen); + Assert.AreEqual(1, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + Assert.AreEqual("one", s1.Get(1)); + + // gen 2 + Assert.AreEqual(1, d.Test.GetValues(1).Length); + d.Set(1, "uno"); + Assert.AreEqual(2, d.Test.GetValues(1).Length); + + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s2 = d.CreateSnapshot(); + + Assert.AreEqual(2, s2.Gen); + Assert.AreEqual(2, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + Assert.AreEqual("uno", s2.Get(1)); + + d.WriteLocked(() => + { + // gen 3 + Assert.AreEqual(2, d.Test.GetValues(1).Length); + d.Set(1, "ein"); + Assert.AreEqual(3, d.Test.GetValues(1).Length); + + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); + + var s3 = d.CreateSnapshot(); + + Assert.AreEqual(2, s3.Gen); + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsTrue(d.Test.NextGen); // has NOT changed when (non) creating snapshot + Assert.AreEqual("uno", s3.Get(1)); + }); + + var s4 = d.CreateSnapshot(); + + Assert.AreEqual(3, s4.Gen); + Assert.AreEqual(3, d.Test.LiveGen); + Assert.IsFalse(d.Test.NextGen); + Assert.AreEqual("ein", s4.Get(1)); + } + } +} diff --git a/src/Umbraco.Tests/Umbraco.Tests.csproj b/src/Umbraco.Tests/Umbraco.Tests.csproj index 69cb2d2f2b..f48713ba26 100644 --- a/src/Umbraco.Tests/Umbraco.Tests.csproj +++ b/src/Umbraco.Tests/Umbraco.Tests.csproj @@ -226,6 +226,7 @@ + diff --git a/src/Umbraco.Web.UI.Client/src/views/dashboard/developer/facadestatus.controller.js b/src/Umbraco.Web.UI.Client/src/views/dashboard/developer/facadestatus.controller.js new file mode 100644 index 0000000000..d08ee5a249 --- /dev/null +++ b/src/Umbraco.Web.UI.Client/src/views/dashboard/developer/facadestatus.controller.js @@ -0,0 +1,15 @@ +function facadeStatusController($scope, umbRequestHelper, $log, $http, $q, $timeout) { + + // note: must defined 'facaStatusBaseUrl' in BackOfficeController + + umbRequestHelper.resourcePromise( + $http.get(umbRequestHelper.getApiUrl('facadeStatusBaseUrl', 'GetFacadeStatusUrl')), + 'Failed to get facade status url') + .then(function (result) { + $scope.includeUrl = angular.fromJson(result); + }); + + //$scope.includeUrl = 'views/dashboard/developer/xmldataintegrityreport.html'; + +} +angular.module("umbraco").controller("Umbraco.Dashboard.FacadeStatusController", facadeStatusController); \ No newline at end of file diff --git a/src/Umbraco.Web.UI.Client/src/views/dashboard/developer/facadestatus.html b/src/Umbraco.Web.UI.Client/src/views/dashboard/developer/facadestatus.html new file mode 100644 index 0000000000..7c09ad6fa9 --- /dev/null +++ b/src/Umbraco.Web.UI.Client/src/views/dashboard/developer/facadestatus.html @@ -0,0 +1,9 @@ +
+

Facade Status

+ +
+ Loading... +
+ +
+
\ No newline at end of file diff --git a/src/Umbraco.Web.UI.Client/src/views/dashboard/developer/nucache.controller.js b/src/Umbraco.Web.UI.Client/src/views/dashboard/developer/nucache.controller.js new file mode 100644 index 0000000000..8a6d3d5db3 --- /dev/null +++ b/src/Umbraco.Web.UI.Client/src/views/dashboard/developer/nucache.controller.js @@ -0,0 +1,57 @@ +function nuCacheController($scope, umbRequestHelper, $log, $http, $q, $timeout) { + + $scope.reload = function () { + if ($scope.working) return; + if (confirm("Trigger a in-memory and local file cache reload on all servers.")) { + $scope.working = true; + umbRequestHelper.resourcePromise( + $http.post(umbRequestHelper.getApiUrl("nuCacheStatusBaseUrl", "ReloadCache")), + 'Failed to trigger a cache reload') + .then(function (result) { + $scope.working = false; + }); + } + }; + + $scope.collect = function () { + if ($scope.working) return; + $scope.working = true; + umbRequestHelper.resourcePromise( + $http.get(umbRequestHelper.getApiUrl("nuCacheStatusBaseUrl", "Collect")), + 'Failed to verify the cache.') + .then(function (result) { + $scope.working = false; + $scope.status = angular.fromJson(result); + }); + }; + + $scope.verify = function () { + if ($scope.working) return; + $scope.working = true; + umbRequestHelper.resourcePromise( + $http.get(umbRequestHelper.getApiUrl("nuCacheStatusBaseUrl", "GetStatus")), + 'Failed to verify the cache.') + .then(function (result) { + $scope.working = false; + $scope.status = angular.fromJson(result); + }); + }; + + $scope.rebuild = function () { + if ($scope.working) return; + if (confirm("Rebuild cmsContentNu table content. Expensive.")) { + $scope.working = true; + umbRequestHelper.resourcePromise( + $http.post(umbRequestHelper.getApiUrl("nuCacheStatusBaseUrl", "RebuildDbCache")), + 'Failed to rebuild the cache.') + .then(function (result) { + $scope.working = false; + $scope.status = angular.fromJson(result); + }); + } + }; + + $scope.working = false; + $scope.verify(); +} +angular.module("umbraco").controller("Umbraco.Dashboard.NuCacheController", nuCacheController); \ No newline at end of file diff --git a/src/Umbraco.Web.UI.Client/src/views/dashboard/developer/nucache.html b/src/Umbraco.Web.UI.Client/src/views/dashboard/developer/nucache.html new file mode 100644 index 0000000000..ad2eece90b --- /dev/null +++ b/src/Umbraco.Web.UI.Client/src/views/dashboard/developer/nucache.html @@ -0,0 +1,54 @@ +
+ +
+ Loading... +
+ +

You are running the brand new NuCache!

+ +

+ + NuCache is working. + NuCache says: {{status}} +

+ +

+ This lets you refresh the status, or collect snapshots (after running a CLR GC). +

+
+ + +
+ +

+

+ This lets you rebuild the database cache (status above), ie the content of the cmsContentNu table. + Rebuilding can be expensive. +

+
+ +
+ +

+

+ This lets you reload the in-memory and local file cache by entirely reloading it from + the data in the cmsContentNu table, but it does not rebuild that table. This is relatively + fast. Triggers the reload on all servers in an LB environment. +

+
+ +
+ +
+
+
+ +
\ No newline at end of file diff --git a/src/Umbraco.Web.UI/config/Dashboard.Release.config b/src/Umbraco.Web.UI/config/Dashboard.Release.config index fe0082f009..3c070eefad 100644 --- a/src/Umbraco.Web.UI/config/Dashboard.Release.config +++ b/src/Umbraco.Web.UI/config/Dashboard.Release.config @@ -37,6 +37,11 @@ views/dashboard/developer/examinemanagement.html + + + views/dashboard/developer/facadestatus.html + +
diff --git a/src/Umbraco.Web.UI/config/Dashboard.config b/src/Umbraco.Web.UI/config/Dashboard.config index df45708e0f..0619662d4c 100644 --- a/src/Umbraco.Web.UI/config/Dashboard.config +++ b/src/Umbraco.Web.UI/config/Dashboard.config @@ -34,6 +34,11 @@ views/dashboard/developer/examinemanagement.html + + + views/dashboard/developer/facadestatus.html + +
diff --git a/src/Umbraco.Web.UI/packages.config b/src/Umbraco.Web.UI/packages.config index 974d9f50c9..e2bb2aeb8d 100644 --- a/src/Umbraco.Web.UI/packages.config +++ b/src/Umbraco.Web.UI/packages.config @@ -2,8 +2,9 @@ + - + diff --git a/src/Umbraco.Web/Editors/BackOfficeController.cs b/src/Umbraco.Web/Editors/BackOfficeController.cs index 997767ab4c..aaf6d0e1a4 100644 --- a/src/Umbraco.Web/Editors/BackOfficeController.cs +++ b/src/Umbraco.Web/Editors/BackOfficeController.cs @@ -363,6 +363,14 @@ namespace Umbraco.Web.Editors { "healthCheckBaseUrl", Url.GetUmbracoApiServiceBaseUrl( controller => controller.GetAllHealthChecks()) + }, + { + "facadeStatusBaseUrl", Url.GetUmbracoApiServiceBaseUrl( + controller => controller.GetFacadeStatusUrl()) + }, + { + "nuCacheStatusBaseUrl", Url.GetUmbracoApiServiceBaseUrl( + controller => controller.GetStatus()) } } }, diff --git a/src/Umbraco.Web/PublishedCache/NuCache/CacheKeys.cs b/src/Umbraco.Web/PublishedCache/NuCache/CacheKeys.cs new file mode 100644 index 0000000000..fe47b335ab --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/CacheKeys.cs @@ -0,0 +1,78 @@ +using System; +using System.Runtime.CompilerServices; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + static class CacheKeys + { + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static string DraftOrPub(bool previewing) + { + return previewing ? "D:" : "P:"; + } + + public static string PublishedContentChildren(Guid contentUid, bool previewing) + { + return "NuCache.Content.Children[" + DraftOrPub(previewing) + ":" + contentUid + "]"; + } + + public static string ContentCacheRoots(bool previewing) + { + return "NuCache.ContentCache.Roots[" + DraftOrPub(previewing) + "]"; + } + + public static string MediaCacheRoots(bool previewing) + { + return "NuCache.MediaCache.Roots[" + DraftOrPub(previewing) + "]"; + } + + public static string PublishedContentAsPreviewing(Guid contentUid) + { + return "NuCache.Content.AsPreviewing[" + contentUid + "]"; + } + + public static string ProfileName(int userId) + { + return "NuCache.Profile.Name[" + userId + "]"; + } + + public static string PropertyRecurse(Guid contentUid, string typeAlias, bool previewing) + { + return "NuCache.Property.Recurse[" + DraftOrPub(previewing) + contentUid + ":" + typeAlias + "]"; + } + + public static string PropertyValueSet(Guid contentUid, string typeAlias, bool previewing) + { + return "NuCache.Property.ValueSet[" + DraftOrPub(previewing) + contentUid + ":" + typeAlias + "]"; + } + + // routes still use int id and not Guid uid, because routable nodes must have + // a valid ID in the database at that point, whereas content and properties + // may be virtual (and not in umbracoNode). + + public static string ContentCacheRouteByContent(int id, bool previewing) + { + return "NuCache.ContentCache.RouteByContent[" + DraftOrPub(previewing) + id + "]"; + } + + public static string ContentCacheContentByRoute(string route, bool previewing) + { + return "NuCache.ContentCache.ContentByRoute[" + DraftOrPub(previewing) + route + "]"; + } + + //public static string ContentCacheRouteByContentStartsWith() + //{ + // return "NuCache.ContentCache.RouteByContent["; + //} + + //public static string ContentCacheContentByRouteStartsWith() + //{ + // return "NuCache.ContentCache.ContentByRoute["; + //} + + public static string MemberCacheMember(string name, bool previewing, object p) + { + return "NuCache.MemberCache." + name + "[" + DraftOrPub(previewing) + p + "]"; + } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/ContentCache.cs b/src/Umbraco.Web/PublishedCache/NuCache/ContentCache.cs new file mode 100644 index 0000000000..040d1017a9 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/ContentCache.cs @@ -0,0 +1,386 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Xml.XPath; +using Umbraco.Core; +using Umbraco.Core.Cache; +using Umbraco.Core.Configuration; +using Umbraco.Core.Models; +using Umbraco.Core.Models.PublishedContent; +using Umbraco.Core.Xml; +using Umbraco.Core.Xml.XPath; +using Umbraco.Web.PublishedCache.NuCache.Navigable; +using Umbraco.Web.Routing; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + class ContentCache : PublishedCacheBase, IPublishedContentCache, INavigableData, IDisposable + { + private readonly ContentStore2.Snapshot _snapshot; + private readonly ICacheProvider _facadeCache; + private readonly ICacheProvider _snapshotCache; + private readonly DomainHelper _domainHelper; + + #region Constructor + + public ContentCache(bool previewDefault, ContentStore2.Snapshot snapshot, ICacheProvider facadeCache, ICacheProvider snapshotCache, DomainHelper domainHelper) + : base(previewDefault) + { + _snapshot = snapshot; + _facadeCache = facadeCache; + _snapshotCache = snapshotCache; + _domainHelper = domainHelper; + } + + #endregion + + #region Routes + + // routes can be + // "/" + // "123/" + // "/path/to/node" + // "123/path/to/node" + + // at the moment we try our best to be backward compatible, but really, + // should get rid of hideTopLevelNode and other oddities entirely, eventually + + public IPublishedContent GetByRoute(string route, bool? hideTopLevelNode = null) + { + return GetByRoute(PreviewDefault, route, hideTopLevelNode); + } + + public IPublishedContent GetByRoute(bool preview, string route, bool? hideTopLevelNode = null) + { + if (route == null) throw new ArgumentNullException("route"); + + var cache = (preview == false || FacadeService.FullCacheWhenPreviewing) ? _snapshotCache : _facadeCache; + var key = CacheKeys.ContentCacheContentByRoute(route, preview); + return cache.GetCacheItem(key, () => GetByRouteInternal(preview, route, hideTopLevelNode)); + } + + private IPublishedContent GetByRouteInternal(bool preview, string route, bool? hideTopLevelNode) + { + hideTopLevelNode = hideTopLevelNode ?? GlobalSettings.HideTopLevelNodeFromPath; // default = settings + + // the route always needs to be lower case because we only store the urlName attribute in lower case + route = route.ToLowerInvariant(); + + var pos = route.IndexOf('/'); + var path = pos == 0 ? route : route.Substring(pos); + var startNodeId = pos == 0 ? 0 : int.Parse(route.Substring(0, pos)); + var parts = path.Split(new[] { '/' }, StringSplitOptions.RemoveEmptyEntries); + + IPublishedContent content; + + if (startNodeId > 0) + { + // if in a domain then start with the root node of the domain + // and follow the path + // note: if domain has a path (eg example.com/en) which is not recommended anymore + // then then /en part of the domain is basically ignored here... + content = GetById(preview, startNodeId); + content = FollowRoute(content, parts, 0); + } + else if (parts.Length == 0) + { + // if not in a domain, and path is empty - what is the default page? + // let's say it is the first one in the tree, if any -- order by sortOrder + content = GetAtRoot(preview).FirstOrDefault(); + } + else + { + // if not in a domain... + // hideTopLevelNode = support legacy stuff, look for /*/path/to/node + // else normal, look for /path/to/node + content = hideTopLevelNode.Value + ? GetAtRoot(preview).SelectMany(x => x.Children).FirstOrDefault(x => x.UrlName == parts[0]) + : GetAtRoot(preview).FirstOrDefault(x => x.UrlName == parts[0]); + content = FollowRoute(content, parts, 1); + } + + // if hideTopLevelNodePath is true then for url /foo we looked for /*/foo + // but maybe that was the url of a non-default top-level node, so we also + // have to look for /foo (see note in ApplyHideTopLevelNodeFromPath). + if (content == null && hideTopLevelNode.Value && parts.Length == 1) + { + content = GetAtRoot(preview).FirstOrDefault(x => x.UrlName == parts[0]); + } + + return content; + } + + public string GetRouteById(int contentId) + { + return GetRouteById(PreviewDefault, contentId); + } + + public string GetRouteById(bool preview, int contentId) + { + var cache = (preview == false || FacadeService.FullCacheWhenPreviewing) ? _snapshotCache : _facadeCache; + var key = CacheKeys.ContentCacheRouteByContent(contentId, preview); + return cache.GetCacheItem(key, () => GetRouteByIdInternal(preview, contentId, null)); + } + + private string GetRouteByIdInternal(bool preview, int contentId, bool? hideTopLevelNode) + { + var node = GetById(preview, contentId); + if (node == null) + return null; + + hideTopLevelNode = hideTopLevelNode ?? GlobalSettings.HideTopLevelNodeFromPath; // default = settings + + // walk up from that node until we hit a node with a domain, + // or we reach the content root, collecting urls in the way + var pathParts = new List(); + var n = node; + var hasDomains = _domainHelper.NodeHasDomains(n.Id); + while (hasDomains == false && n != null) // n is null at root + { + // get the url + var urlName = n.UrlName; + pathParts.Add(urlName); + + // move to parent node + n = n.Parent; + hasDomains = n != null && _domainHelper.NodeHasDomains(n.Id); + } + + // no domain, respect HideTopLevelNodeFromPath for legacy purposes + if (hasDomains == false && hideTopLevelNode.Value) + ApplyHideTopLevelNodeFromPath(node, pathParts, preview); + + // assemble the route + pathParts.Reverse(); + var path = "/" + string.Join("/", pathParts); // will be "/" or "/foo" or "/foo/bar" etc + var route = (n == null ? "" : n.Id.ToString(CultureInfo.InvariantCulture)) + path; + + return route; + } + + private static IPublishedContent FollowRoute(IPublishedContent content, IReadOnlyList parts, int start) + { + var i = start; + while (content != null && i < parts.Count) + { + var part = parts[i++]; + content = content.Children.FirstOrDefault(x => x.UrlName == part); + } + return content; + } + + private void ApplyHideTopLevelNodeFromPath(IPublishedContent content, IList segments, bool preview) + { + // in theory if hideTopLevelNodeFromPath is true, then there should be only one + // top-level node, or else domains should be assigned. but for backward compatibility + // we add this check - we look for the document matching "/" and if it's not us, then + // we do not hide the top level path + // it has to be taken care of in GetByRoute too so if + // "/foo" fails (looking for "/*/foo") we try also "/foo". + // this does not make much sense anyway esp. if both "/foo/" and "/bar/foo" exist, but + // that's the way it works pre-4.10 and we try to be backward compat for the time being + if (content.Parent == null) + { + var rootNode = GetByRoute(preview, "/", true); + if (rootNode == null) + throw new Exception("Failed to get node at /."); + if (rootNode.Id == content.Id) // remove only if we're the default node + segments.RemoveAt(segments.Count - 1); + } + else + { + segments.RemoveAt(segments.Count - 1); + } + } + + #endregion + + #region Get, Has + + public override IPublishedContent GetById(bool preview, int contentId) + { + var n = _snapshot.Get(contentId); + if (n == null) return null; + + // both .Draft and .Published cannot be null at the same time + return preview + ? n.Draft ?? GetPublishedContentAsPreviewing(n.Published) + : n.Published; + } + + public override bool HasById(bool preview, int contentId) + { + var n = _snapshot.Get(contentId); + if (n == null) return false; + + return preview || n.Published != null; + } + + public override IEnumerable GetAtRoot(bool preview) + { + if (FacadeService.CacheContentCacheRoots == false) + return GetAtRootNoCache(preview); + + var facade = Facade.Current; + var cache = (facade == null) + ? null + : (preview == false || FacadeService.FullCacheWhenPreviewing + ? facade.SnapshotCache + : facade.FacadeCache); + + if (cache == null) + return GetAtRootNoCache(preview); + + // note: ToArray is important here, we want to cache the result, not the function! + return (IEnumerable) cache.GetCacheItem( + CacheKeys.ContentCacheRoots(preview), + () => GetAtRootNoCache(preview).ToArray()); + } + + private IEnumerable GetAtRootNoCache(bool preview) + { + var c = _snapshot.GetAtRoot(); + + // both .Draft and .Published cannot be null at the same time + return c.Select(n => preview + ? n.Draft ?? GetPublishedContentAsPreviewing(n.Published) + : n.Published).WhereNotNull().OrderBy(x => x.SortOrder); + } + + // gets a published content as a previewing draft, if preview is true + // this is for published content when previewing + internal static IPublishedContent GetPublishedContentAsPreviewing(IPublishedContent content /*, bool preview*/) + { + if (content == null /*|| preview == false*/) return null; //content; + + // an object in the cache is either an IPublishedContentOrMedia, + // or a model inheriting from PublishedContentExtended - in which + // case we need to unwrap to get to the original IPublishedContentOrMedia. + + var inner = PublishedContent.UnwrapIPublishedContent(content); + return inner.AsPreviewingModel(); + } + + public override bool HasContent(bool preview) + { + return preview + ? _snapshot.IsEmpty == false + : _snapshot.GetAtRoot().Any(x => x.Published != null); + } + + #endregion + + #region XPath + + public override IPublishedContent GetSingleByXPath(bool preview, string xpath, XPathVariable[] vars) + { + var navigator = CreateNavigator(preview); + var iterator = navigator.Select(xpath, vars); + return GetSingleByXPath(iterator); + } + + public override IPublishedContent GetSingleByXPath(bool preview, XPathExpression xpath, XPathVariable[] vars) + { + var navigator = CreateNavigator(preview); + var iterator = navigator.Select(xpath, vars); + return GetSingleByXPath(iterator); + } + + private static IPublishedContent GetSingleByXPath(XPathNodeIterator iterator) + { + if (iterator.MoveNext() == false) return null; + + var xnav = iterator.Current as NavigableNavigator; + if (xnav == null) return null; + + var xcontent = xnav.UnderlyingObject as NavigableContent; + return xcontent == null ? null : xcontent.InnerContent; + } + + public override IEnumerable GetByXPath(bool preview, string xpath, XPathVariable[] vars) + { + var navigator = CreateNavigator(preview); + var iterator = navigator.Select(xpath, vars); + return GetByXPath(iterator); + } + + public override IEnumerable GetByXPath(bool preview, XPathExpression xpath, XPathVariable[] vars) + { + var navigator = CreateNavigator(preview); + var iterator = navigator.Select(xpath, vars); + return GetByXPath(iterator); + } + + private static IEnumerable GetByXPath(XPathNodeIterator iterator) + { + while (iterator.MoveNext()) + { + var xnav = iterator.Current as NavigableNavigator; + if (xnav == null) continue; + + var xcontent = xnav.UnderlyingObject as NavigableContent; + if (xcontent == null) continue; + + yield return xcontent.InnerContent; + } + } + + public override XPathNavigator CreateNavigator(bool preview) + { + var source = new Source(this, preview); + var navigator = new NavigableNavigator(source); + return navigator; + } + + public override XPathNavigator CreateNodeNavigator(int id, bool preview) + { + var source = new Source(this, preview); + var navigator = new NavigableNavigator(source); + return navigator.CloneWithNewRoot(id, 0); + } + + #endregion + + #region Detached + + // detached is something that needs to be refactored entirely eventually + // detached property should accept the "container content" guid + // etc + + public IPublishedProperty CreateDetachedProperty(PublishedPropertyType propertyType, object value, bool isPreviewing) + { + throw new NotImplementedException(); + } + + #endregion + + #region Content types + + public override PublishedContentType GetContentType(int id) + { + return _snapshot.GetContentType(id); + } + + public override PublishedContentType GetContentType(string alias) + { + return _snapshot.GetContentType(alias); + } + + public override IEnumerable GetByContentType(PublishedContentType contentType) + { + throw new NotImplementedException(); + } + + #endregion + + #region IDisposable + + public void Dispose() + { + _snapshot.Dispose(); + } + + #endregion + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/ContentNode.cs b/src/Umbraco.Web/PublishedCache/NuCache/ContentNode.cs new file mode 100644 index 0000000000..709662c249 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/ContentNode.cs @@ -0,0 +1,156 @@ +using System; +using System.Collections.Generic; +using Umbraco.Core.Models; +using Umbraco.Core.Models.PublishedContent; +using Umbraco.Web.PublishedCache.NuCache.DataSource; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + // represents a content "node" ie a pair of draft + published versions + // internal, never exposed, to be accessed from ContentStore (only!) + internal class ContentNode + { + // special ctor with no content data - for members + public ContentNode(int id, Guid uid, PublishedContentType contentType, + int level, string path, int sortOrder, + int parentContentId, + DateTime createDate, int creatorId) + { + Id = id; + Uid = uid; + ContentType = contentType; + Level = level; + Path = path; + SortOrder = sortOrder; + ParentContentId = parentContentId; + CreateDate = createDate; + CreatorId = creatorId; + + ChildContentIds = new List(); + } + + public ContentNode(int id, Guid uid, PublishedContentType contentType, + int level, string path, int sortOrder, + int parentContentId, + DateTime createDate, int creatorId, + ContentData draftData, ContentData publishedData) + : this(id, uid, level, path, sortOrder, parentContentId, createDate, creatorId) + { + SetContentTypeAndData(contentType, draftData, publishedData); + } + + // 2-phases ctor, phase 1 + public ContentNode(int id, Guid uid, + int level, string path, int sortOrder, + int parentContentId, + DateTime createDate, int creatorId) + { + Id = id; + Uid = uid; + Level = level; + Path = path; + SortOrder = sortOrder; + ParentContentId = parentContentId; + CreateDate = createDate; + CreatorId = creatorId; + + ChildContentIds = new List(); + } + + // two-phase ctor, phase 2 + public void SetContentTypeAndData(PublishedContentType contentType, ContentData draftData, ContentData publishedData) + { + ContentType = contentType; + + if (draftData == null && publishedData == null) + throw new ArgumentException("Both draftData and publishedData cannot be null at the same time."); + + if (draftData != null) + Draft = new PublishedContent(this, draftData).CreateModel(); + if (publishedData != null) + Published = new PublishedContent(this, publishedData).CreateModel(); + } + + // clone parent + private ContentNode(ContentNode origin) + { + // everything is the same, except for the child items + // list which is a clone of the original list + + Id = origin.Id; + Uid = origin.Uid; + ContentType = origin.ContentType; + Level = origin.Level; + Path = origin.Path; + SortOrder = origin.SortOrder; + ParentContentId = origin.ParentContentId; + CreateDate = origin.CreateDate; + CreatorId = origin.CreatorId; + + var originDraft = origin.Draft == null ? null : PublishedContent.UnwrapIPublishedContent(origin.Draft); + var originPublished = origin.Published == null ? null : PublishedContent.UnwrapIPublishedContent(origin.Published); + + Draft = originDraft == null ? null : new PublishedContent(this, originDraft).CreateModel(); + Published = originPublished == null ? null : new PublishedContent(this, originPublished).CreateModel(); + + ChildContentIds = new List(origin.ChildContentIds); // needs to be *another* list + } + + // clone with new content type + public ContentNode(ContentNode origin, PublishedContentType contentType) + { + Id = origin.Id; + Uid = origin.Uid; + ContentType = contentType; // change! + Level = origin.Level; + Path = origin.Path; + SortOrder = origin.SortOrder; + ParentContentId = origin.ParentContentId; + CreateDate = origin.CreateDate; + CreatorId = origin.CreatorId; + + var originDraft = origin.Draft == null ? null : PublishedContent.UnwrapIPublishedContent(origin.Draft); + var originPublished = origin.Published == null ? null : PublishedContent.UnwrapIPublishedContent(origin.Published); + + Draft = originDraft == null ? null : new PublishedContent(this, originDraft._contentData).CreateModel(); + Published = originPublished == null ? null : new PublishedContent(this, originPublished._contentData).CreateModel(); + + ChildContentIds = origin.ChildContentIds; // can be the *same* list FIXME oh really? + } + + // everything that is common to both draft and published versions + // keep this as small as possible + public readonly int Id; + public readonly Guid Uid; + public PublishedContentType ContentType; + public readonly int Level; + public readonly string Path; + public readonly int SortOrder; + public readonly int ParentContentId; + public List ChildContentIds; + public readonly DateTime CreateDate; + public readonly int CreatorId; + + // draft and published version (either can be null, but not both) + public IPublishedContent Draft; + public IPublishedContent Published; + + public ContentNode CloneParent() + { + return new ContentNode(this); + } + + public ContentNodeKit ToKit() + { + return new ContentNodeKit + { + Node = this, + ContentTypeId = ContentType.Id, + // ReSharper disable MergeConditionalExpression + DraftData = Draft == null ? null : ((PublishedContent) Draft)._contentData, + PublishedData = Published == null ? null : ((PublishedContent) Published)._contentData + // ReSharper restore MergeConditionalExpression + }; + } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/ContentNodeKit.cs b/src/Umbraco.Web/PublishedCache/NuCache/ContentNodeKit.cs new file mode 100644 index 0000000000..c45b774486 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/ContentNodeKit.cs @@ -0,0 +1,21 @@ +using Umbraco.Core.Models.PublishedContent; +using Umbraco.Web.PublishedCache.NuCache.DataSource; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + // what's needed to actually build a content node + struct ContentNodeKit + { + public ContentNode Node; + public int ContentTypeId; + public ContentData DraftData; + public ContentData PublishedData; + + public bool IsEmpty { get { return Node == null; } } + + public void Build(PublishedContentType contentType) + { + Node.SetContentTypeAndData(contentType, DraftData, PublishedData); + } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/ContentStore2.cs b/src/Umbraco.Web/PublishedCache/NuCache/ContentStore2.cs new file mode 100644 index 0000000000..e4826a38d9 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/ContentStore2.cs @@ -0,0 +1,1041 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using CSharpTest.Net.Collections; +using Umbraco.Core.Logging; +using Umbraco.Core.Models.PublishedContent; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + // stores content + internal class ContentStore2 + { + // this class is an extended version of SnapDictionary + // most of the snapshots management code, etc is an exact copy + // SnapDictionary has unit tests to ensure it all works correctly + + private readonly ConcurrentDictionary> _contentNodes; + private readonly ConcurrentDictionary> _contentRootNodes; + private readonly ConcurrentDictionary> _contentTypesById; + private readonly ConcurrentDictionary> _contentTypesByAlias; + private readonly Dictionary> _contentTypeNodes; + + private readonly ILogger _logger; + private BPlusTree _localDb; + private readonly ConcurrentQueue _genRefRefs; + private GenRefRef _genRefRef; + private readonly object _wlocko = new object(); + private readonly object _rlocko = new object(); + private long _liveGen, _floorGen; + private bool _nextGen, _collectAuto; + private Task _collectTask; + private volatile int _wlocked; + + // fixme - collection trigger (ok for now) + // see SnapDictionary notes + private const long CollectMinGenDelta = 8; + + #region Ctor + + public ContentStore2(ILogger logger, BPlusTree localDb = null) + { + _logger = logger; + _localDb = localDb; + + _contentNodes = new ConcurrentDictionary>(); + _contentRootNodes = new ConcurrentDictionary>(); + _contentTypesById = new ConcurrentDictionary>(); + _contentTypesByAlias = new ConcurrentDictionary>(StringComparer.InvariantCultureIgnoreCase); + _contentTypeNodes = new Dictionary>(); + + _genRefRefs = new ConcurrentQueue(); + _genRefRef = null; // no initial gen exists + _liveGen = _floorGen = 0; + _nextGen = false; // first time, must create a snapshot + _collectAuto = true; // collect automatically by default + } + + #endregion + + #region Locking + + public void WriteLocked(Action action) + { + var wtaken = false; + var wcount = false; + try + { + Monitor.Enter(_wlocko, ref wtaken); + + var rtaken = false; + try + { + Monitor.Enter(_rlocko, ref rtaken); + + // see SnapDictionary + try + { } + finally + { + _wlocked++; + wcount = true; + if (_nextGen == false) + { + // because we are changing things, a new generation + // is created, which will trigger a new snapshot + _nextGen = true; + _liveGen += 1; + } + } + } + finally + { + if (rtaken) Monitor.Exit(_rlocko); + } + + action(); + } + finally + { + if (wcount) _wlocked--; + if (wtaken) Monitor.Exit(_wlocko); + } + } + + public T WriteLocked(Func func) + { + var wtaken = false; + var wcount = false; + try + { + Monitor.Enter(_wlocko, ref wtaken); + + var rtaken = false; + try + { + Monitor.Enter(_rlocko, ref rtaken); + + try + { } + finally + { + _wlocked++; + wcount = true; + if (_nextGen == false) + { + // because we are changing things, a new generation + // is created, which will trigger a new snapshot + _nextGen = true; + _liveGen += 1; + } + } + } + finally + { + if (rtaken) Monitor.Exit(_rlocko); + } + + return func(); + } + finally + { + if (wcount) _wlocked--; + if (wtaken) Monitor.Exit(_wlocko); + } + } + + private T ReadLocked(Func func) + { + var rtaken = false; + try + { + Monitor.Enter(_rlocko, ref rtaken); + + // we have rlock, so it cannot ++ + // it could -- though, so... volatile + var wlocked = _wlocked > 0; + return func(wlocked); + } + finally + { + if (rtaken) Monitor.Exit(_rlocko); + } + } + + #endregion + + #region LocalDb + + public void ReleaseLocalDb() + { + WriteLocked(() => + { + if (_localDb == null) return; + _localDb.Dispose(); + _localDb = null; + }); + } + + #endregion + + #region Content types + + public void UpdateContentTypes(IEnumerable removedIds, IEnumerable refreshedTypes, IEnumerable kits) + { + removedIds = removedIds ?? Enumerable.Empty(); + refreshedTypes = refreshedTypes ?? Enumerable.Empty(); + kits = kits ?? new ContentNodeKit[0]; + + WriteLocked(() => + { + foreach (var id in removedIds) + { + // all content should have been deleted - but + if (_contentTypeNodes.ContainsKey(id)) + { + foreach (var node in _contentTypeNodes[id]) + ClearBranchLocked(node); + _contentTypeNodes.Remove(id); + } + + LinkedNode link; + if (_contentTypesById.TryGetValue(id, out link) == false || link.Value == null) + continue; + SetValueLocked(_contentTypesById, id, null); + SetValueLocked(_contentTypesByAlias, link.Value.Alias, null); + } + + var temp = new Dictionary>(); + + foreach (var type in refreshedTypes) + { + if (_contentTypeNodes.ContainsKey(type.Id) == false) + _contentTypeNodes[type.Id] = new HashSet(); + + SetValueLocked(_contentTypesById, type.Id, type); + SetValueLocked(_contentTypesByAlias, type.Alias, type); + + temp.Add(type.Id, new HashSet(_contentTypeNodes[type.Id])); + } + + // skip missing type + // skip missing parents & unbuildable kits - what else could we do? + foreach (var kit in kits.Where(x => + temp.ContainsKey(x.ContentTypeId) && + ParentExistsLocked(x) && + BuildKit(x))) + { + SetValueLocked(_contentNodes, kit.Node.Id, kit.Node); + if (_localDb != null) + _localDb[kit.Node.Id] = kit; + temp[kit.ContentTypeId].Remove(kit.Node.Id); + } + + // all content should have been refreshed - but... + foreach (var id in temp.Values.SelectMany(x => x)) + ClearBranchLocked(id); + + if (_localDb != null) + _localDb.Commit(); + }); + } + + public void UpdateDataTypes(IEnumerable dataTypeIds, Func getContentType) + { + WriteLocked(() => + { + var contentTypes = _contentTypesById + .Where(kvp => + kvp.Value.Value != null && + kvp.Value.Value.PropertyTypes.Any(p => dataTypeIds.Contains(p.DataTypeId))) + .Select(kvp => kvp.Value.Value) + .Select(x => getContentType(x.Id)); + + foreach (var contentType in contentTypes) + { + // poof, gone, very unlikely and probably an anomaly + if (contentType == null) + continue; + + // again, weird situation + if (_contentTypeNodes.ContainsKey(contentType.Id) == false) + continue; + + foreach (var id in _contentTypeNodes[contentType.Id]) + { + LinkedNode link; + _contentNodes.TryGetValue(id, out link); + if (link == null || link.Value == null) + continue; + var node = new ContentNode(link.Value, contentType); + SetValueLocked(_contentNodes, id, node); + if (_localDb != null) + _localDb[id] = node.ToKit(); + } + } + + if (_localDb != null) + _localDb.Commit(); + }); + } + + private bool BuildKit(ContentNodeKit kit) + { + // make sure the kit is valid + if (kit.DraftData == null && kit.PublishedData == null) + return false; + + LinkedNode link; + + // unknown = bad + if (_contentTypesById.TryGetValue(kit.ContentTypeId, out link) == false || link.Value == null) + return false; + + // not checking ByAlias, assuming we don't have internal errors + + // register + if (_contentTypeNodes.ContainsKey(kit.ContentTypeId) == false) + _contentTypeNodes[kit.ContentTypeId] = new HashSet(); + _contentTypeNodes[kit.ContentTypeId].Add(kit.Node.Id); + + // and use + kit.Build(link.Value); + + return true; + } + + private void ReleaseContentTypeLocked(ContentNode content) + { + if (_contentTypeNodes.ContainsKey(content.ContentType.Id) == false) + return; // though, ?! + _contentTypeNodes[content.ContentType.Id].Remove(content.Id); + } + + #endregion + + #region Set, Clear, Get + + public int Count + { + get { return _contentNodes.Count; } + } + + private LinkedNode GetHead(ConcurrentDictionary> dict, TKey key) + where TValue : class + { + LinkedNode link; + dict.TryGetValue(key, out link); // else null + return link; + } + + public void Set(ContentNodeKit kit) + { + // ReSharper disable LocalizableElement + if (kit.IsEmpty) + throw new ArgumentException("Kit is empty.", "kit"); + if (kit.Node.ChildContentIds.Count > 0) + throw new ArgumentException("Kit content cannot have children.", "kit"); + // ReSharper restore LocalizableElement + + _logger.Debug("Set content ID:" + kit.Node.Id); + + WriteLocked(() => + { + // get existing + LinkedNode link; + _contentNodes.TryGetValue(kit.Node.Id, out link); + var existing = link == null ? null : link.Value; + + // else ignore, what else could we do? + if (ParentExistsLocked(kit) == false || BuildKit(kit) == false) + return; + + // moving? + var moving = existing != null && existing.ParentContentId != kit.Node.ParentContentId; + + // manage children + if (existing != null) + kit.Node.ChildContentIds = existing.ChildContentIds; + + // set + SetValueLocked(_contentNodes, kit.Node.Id, kit.Node); + if (_localDb != null) + _localDb[kit.Node.Id] = kit; + + // manage the tree + if (existing == null) + { + // new, add to parent + AddToParentLocked(kit.Node); + } + else if (moving) + { + // moved, remove existing from its parent, add content to its parent + RemoveFromParentLocked(existing); + AddToParentLocked(kit.Node); + } + + if (_localDb != null) + _localDb.Commit(); + }); + } + + public void SetAll(IEnumerable kits) + { + WriteLocked(() => + { + ClearLocked(_contentNodes); + ClearLocked(_contentRootNodes); + + // do NOT clear types else they are gone! + //ClearLocked(_contentTypesById); + //ClearLocked(_contentTypesByAlias); + + // skip missing parents & unbuildable kits - what else could we do? + foreach (var kit in kits.Where(x => ParentExistsLocked(x) && BuildKit(x))) + { + SetValueLocked(_contentNodes, kit.Node.Id, kit.Node); + if (_localDb != null) + _localDb[kit.Node.Id] = kit; + AddToParentLocked(kit.Node); + } + + if (_localDb != null) + _localDb.Commit(); + }); + } + + public void SetBranch(int rootContentId, IEnumerable kits) + { + WriteLocked(() => + { + // get existing + LinkedNode link; + _contentNodes.TryGetValue(rootContentId, out link); + var existing = link == null ? null : link.Value; + + // clear + if (existing != null) + { + ClearBranchLocked(existing); + RemoveFromParentLocked(existing); + } + + // now add them all back + // skip missing parents & unbuildable kits - what else could we do? + foreach (var kit in kits.Where(x => ParentExistsLocked(x) && BuildKit(x))) + { + SetValueLocked(_contentNodes, kit.Node.Id, kit.Node); + if (_localDb != null) + _localDb[kit.Node.Id] = kit; + AddToParentLocked(kit.Node); + } + + if (_localDb != null) + _localDb.Commit(); + }); + } + + public bool Clear(int id) + { + return WriteLocked(() => + { + // try to find the content + // if it is not there, nothing to do + LinkedNode link; + _contentNodes.TryGetValue(id, out link); // else null + if (link == null || link.Value == null) return false; + + var content = link.Value; + _logger.Debug("Clear content ID:" + content.Id); + + // clear the entire branch + ClearBranchLocked(content); + + // manage the tree + RemoveFromParentLocked(content); + + return true; + }); + } + + private void ClearBranchLocked(int id) + { + LinkedNode link; + _contentNodes.TryGetValue(id, out link); + if (link == null || link.Value == null) + return; + ClearBranchLocked(link.Value); + } + + private void ClearBranchLocked(ContentNode content) + { + SetValueLocked(_contentNodes, content.Id, null); + if (_localDb != null) + { + ContentNodeKit kit; + _localDb.TryRemove(content.Id, out kit); + } + ReleaseContentTypeLocked(content); + foreach (var childId in content.ChildContentIds) + { + LinkedNode link; + if (_contentNodes.TryGetValue(childId, out link) == false || link.Value == null) continue; + ClearBranchLocked(link.Value); + } + } + + private LinkedNode GetParentLink(ContentNode content) + { + LinkedNode link; + _contentNodes.TryGetValue(content.ParentContentId, out link); // else null + //if (link == null || link.Value == null) + // throw new Exception("Panic: parent not found."); + return link; + } + + private void RemoveFromParentLocked(ContentNode content) + { + // remove from root content index, + // or parent's children index + if (content.ParentContentId < 0) + { + SetValueLocked(_contentRootNodes, content.Id, null); + } + else + { + // obviously parent has to exist + var link = GetParentLink(content); + var parent = link.Value; + if (link.Gen < _liveGen) + parent = parent.CloneParent(); + parent.ChildContentIds.Remove(content.Id); + if (link.Gen < _liveGen) + SetValueLocked(_contentNodes, parent.Id, parent); + } + } + + private bool ParentExistsLocked(ContentNodeKit kit) + { + if (kit.Node.ParentContentId < 0) + return true; + var link = GetParentLink(kit.Node); + return link != null && link.Value != null; + } + + private void AddToParentLocked(ContentNode content) + { + // add to root content index, + // or parent's children index + if (content.ParentContentId < 0) + { + // need an object reference... just use this... + SetValueLocked(_contentRootNodes, content.Id, this); + } + else + { + // assume parent has been validated and exists + var link = GetParentLink(content); + var parent = link.Value; + if (link.Gen < _liveGen) + parent = parent.CloneParent(); + parent.ChildContentIds.Add(content.Id); + if (link.Gen < _liveGen) + SetValueLocked(_contentNodes, parent.Id, parent); + } + } + + private void SetValueLocked(ConcurrentDictionary> dict, TKey key, TValue value) + where TValue : class + { + // this is safe only because we're write-locked + var link = GetHead(dict, key); + if (link != null) + { + // already in the dict + if (link.Gen != _liveGen) + { + // for an older gen - if value is different then insert a new + // link for the new gen, with the new value + if (link.Value != value) + dict.TryUpdate(key, new LinkedNode(value, _liveGen, link), link); + } + else + { + // for the live gen - we can fix the live gen - and remove it + // if value is null and there's no next gen + if (value == null && link.Next == null) + dict.TryRemove(key, out link); + else + link.Value = value; + } + } + else + { + dict.TryAdd(key, new LinkedNode(value, _liveGen)); + } + } + + private void ClearLocked(ConcurrentDictionary> dict) + where TValue : class + { + WriteLocked(() => + { + // this is safe only because we're write-locked + foreach (var kvp in dict.Where(x => x.Value != null)) + { + if (kvp.Value.Gen < _liveGen) + { + var link = new LinkedNode(null, _liveGen, kvp.Value); + dict.TryUpdate(kvp.Key, link, kvp.Value); + } + else + { + kvp.Value.Value = null; + } + } + }); + } + + public ContentNode Get(int id, long gen) + { + return GetValue(_contentNodes, id, gen); + } + + public IEnumerable GetAtRoot(long gen) + { + // look ma, no lock! + foreach (var kvp in _contentRootNodes) + { + var link = kvp.Value; + while (link != null) + { + if (link.Gen <= gen) + break; + link = link.Next; + } + if (link != null && link.Value != null) + yield return Get(kvp.Key, gen); + } + } + + private TValue GetValue(ConcurrentDictionary> dict, TKey key, long gen) + where TValue : class + { + // look ma, no lock! + var link = GetHead(dict, key); + while (link != null) + { + if (link.Gen <= gen) + return link.Value; // may be null + link = link.Next; + } + return null; + } + + public bool IsEmpty(long gen) + { + var has = _contentNodes.Any(x => + { + var link = x.Value; + while (link != null) + { + if (link.Gen <= gen && link.Value != null) + return true; + link = link.Next; + } + return false; + }); + return has == false; + } + + public PublishedContentType GetContentType(int id, long gen) + { + return GetValue(_contentTypesById, id, gen); + } + + public PublishedContentType GetContentType(string alias, long gen) + { + return GetValue(_contentTypesByAlias, alias, gen); + } + + #endregion + + #region Snapshots + + public Snapshot CreateSnapshot() + { + return ReadLocked(wlocked => + { + // if no next generation is required, and we already have one, + // use it and create a new snapshot + if (_nextGen == false && _genRefRef != null) + return new Snapshot(this, _genRefRef.GetGenRef() +#if DEBUG + , _logger +#endif + ); + + // else we need to try to create a new gen ref + // whether we are wlocked or not, noone can rlock while we do, + // so _liveGen and _nextGen are safe + if (wlocked) + { + // write-locked, cannot use latest gen (at least 1) so use previous + var snapGen = _nextGen ? _liveGen - 1 : _liveGen; + + // create a new gen ref unless we already have it + if (_genRefRef == null) + _genRefRefs.Enqueue(_genRefRef = new GenRefRef(snapGen)); + else if (_genRefRef.Gen != snapGen) + throw new Exception("panic"); + } + else + { + // not write-locked, can use latest gen, create a new gen ref + _genRefRefs.Enqueue(_genRefRef = new GenRefRef(_liveGen)); + _nextGen = false; // this is the ONLY thing that triggers a _liveGen++ + } + + // so... + // the genRefRef has a weak ref to the genRef, and is queued + // the snapshot has a ref to the genRef, which has a ref to the genRefRef + // when the snapshot is disposed, it decreases genRefRef counter + // so after a while, one of these conditions is going to be true: + // - the genRefRef counter is zero because all snapshots have properly been disposed + // - the genRefRef weak ref is dead because all snapshots have been collected + // in both cases, we will dequeue and collect + + var snapshot = new Snapshot(this, _genRefRef.GetGenRef() +#if DEBUG + , _logger +#endif + ); + + // reading _floorGen is safe if _collectTask is null + if (_collectTask == null && _collectAuto && _liveGen - _floorGen > CollectMinGenDelta) + CollectAsyncLocked(); + + return snapshot; + }); + } + + public Task CollectAsync() + { + lock (_rlocko) + { + return CollectAsyncLocked(); + } + } + + private Task CollectAsyncLocked() + { + if (_collectTask != null) + return _collectTask; + + // ReSharper disable InconsistentlySynchronizedField + var task = _collectTask = Task.Run(() => Collect()); + _collectTask.ContinueWith(_ => + { + lock (_rlocko) + { + _collectTask = null; + } + }, TaskContinuationOptions.ExecuteSynchronously); + // ReSharper restore InconsistentlySynchronizedField + + return task; + } + + private void Collect() + { + // see notes in CreateSnapshot +#if DEBUG + _logger.Debug("Collect."); +#endif + GenRefRef genRefRef; + while (_genRefRefs.TryPeek(out genRefRef) && (genRefRef.Count == 0 || genRefRef.WGenRef.IsAlive == false)) + { + _genRefRefs.TryDequeue(out genRefRef); // cannot fail since TryPeek has succeeded + _floorGen = genRefRef.Gen; +#if DEBUG + //_logger.Debug("_floorGen=" + _floorGen + ", _liveGen=" + _liveGen); +#endif + } + + Collect(_contentNodes); + Collect(_contentRootNodes); + Collect(_contentTypesById); + Collect(_contentTypesByAlias); + } + + private void Collect(ConcurrentDictionary> dict) + where TValue : class + { + // it is OK to enumerate a concurrent dictionary and it does not lock + // it - and here it's not an issue if we skip some items, they will be + // processed next time we collect + + long liveGen; + lock (_rlocko) // r is good + { + liveGen = _liveGen; + if (_nextGen == false) + liveGen += 1; + } + + foreach (var kvp in dict) + { + var link = kvp.Value; + +#if DEBUG + //_logger.Debug("Collect id:" + kvp.Key + ", gen:" + link.Gen + + // ", nxt:" + (link.Next == null ? "null" : "link") + + // ", val:" + (link.Value == null ? "null" : "value")); +#endif + + // reasons to collect the head: + // gen must be < liveGen (we never collect live gen) + // next == null && value == null (we have no data at all) + // next != null && value == null BUT gen > floor (noone wants us) + // not live means .Next and .Value are safe + if (link.Gen < liveGen && link.Value == null + && (link.Next == null || link.Gen <= _floorGen)) + { + // not live, null value, no next link = remove that one -- but only if + // the dict has not been updated, have to do it via ICollection<> (thanks + // Mr Toub) -- and if the dict has been updated there is nothing to collect + var idict = dict as ICollection>>; + idict.Remove(kvp); + continue; + } + + // in any other case we're not collecting the head, we need to go to Next + // and if there is no Next, skip + if (link.Next == null) + continue; + + // else go to Next and loop while above floor, and kill everything below + while (link.Next != null && link.Next.Gen > _floorGen) + link = link.Next; + link.Next = null; + } + } + + public async Task WaitForPendingCollect() + { + Task task; + lock (_rlocko) + { + task = _collectTask; + } + if (task != null) + await task; + } + + public long GenCount + { + get { return _genRefRefs.Count; } + } + + public long SnapCount + { + get + { + return _genRefRefs.Sum(x => x.Count); + } + } + + #endregion + + #region Unit testing + + private TestHelper _unitTesting; + + // note: nothing here is thread-safe + internal class TestHelper + { + private readonly ContentStore2 _store; + + public TestHelper(ContentStore2 store) + { + _store = store; + } + + public long LiveGen { get { return _store._liveGen; } } + public long FloorGen { get { return _store._floorGen; } } + public bool NextGen { get { return _store._nextGen; } } + public bool CollectAuto { get { return _store._collectAuto; } set { _store._collectAuto = value; } } + + public Tuple[] GetValues(int id) + { + LinkedNode link; + _store._contentNodes.TryGetValue(id, out link); // else null + + if (link == null) + return new Tuple[0]; + + var tuples = new List>(); + do + { + tuples.Add(Tuple.Create(link.Gen, link.Value)); + link = link.Next; + } while (link != null); + return tuples.ToArray(); + } + } + + internal TestHelper Test { get { return _unitTesting ?? (_unitTesting = new TestHelper(this)); } } + + #endregion + + #region Classes + + private class LinkedNode + where TValue: class + { + public LinkedNode(TValue value, long gen, LinkedNode next = null) + { + Value = value; + Gen = gen; + Next = next; + } + + internal readonly long Gen; + + // reading & writing references is thread-safe on all .NET platforms + // mark as volatile to ensure we always read the correct value + internal volatile TValue Value; + internal volatile LinkedNode Next; + } + + public class Snapshot : IDisposable + { + private readonly ContentStore2 _store; + private readonly GenRef _genRef; + private long _gen; +#if DEBUG + private readonly ILogger _logger; +#endif + + //private static int _count; + //private readonly int _thisCount; + + internal Snapshot(ContentStore2 store, GenRef genRef +#if DEBUG + , ILogger logger +#endif + ) + { + _store = store; + _genRef = genRef; + _gen = genRef.Gen; + Interlocked.Increment(ref genRef.GenRefRef.Count); + //_thisCount = _count++; + +#if DEBUG + _logger = logger; + _logger.Debug("Creating snapshot."); +#endif + } + + public ContentNode Get(int id) + { + if (_gen < 0) + throw new ObjectDisposedException("snapshot" /*+ " (" + _thisCount + ")"*/); + return _store.Get(id, _gen); + } + + public IEnumerable GetAtRoot() + { + if (_gen < 0) + throw new ObjectDisposedException("snapshot" /*+ " (" + _thisCount + ")"*/); + return _store.GetAtRoot(_gen); + } + + public PublishedContentType GetContentType(int id) + { + if (_gen < 0) + throw new ObjectDisposedException("snapshot" /*+ " (" + _thisCount + ")"*/); + return _store.GetContentType(id, _gen); + } + + public PublishedContentType GetContentType(string alias) + { + if (_gen < 0) + throw new ObjectDisposedException("snapshot" /*+ " (" + _thisCount + ")"*/); + return _store.GetContentType(alias, _gen); + } + + public bool IsEmpty + { + get + { + if (_gen < 0) + throw new ObjectDisposedException("snapshot" /*+ " (" + _thisCount + ")"*/); + return _store.IsEmpty(_gen); + } + } + + public long Gen + { + get + { + if (_gen < 0) + throw new ObjectDisposedException("snapshot" /*+ " (" + _thisCount + ")"*/); + return _gen; + } + } + + public void Dispose() + { + if (_gen < 0) return; +#if DEBUG + _logger.Debug("Dispose snapshot (" + _genRef.GenRefRef.Count + ")."); +#endif + _gen = -1; + Interlocked.Decrement(ref _genRef.GenRefRef.Count); + GC.SuppressFinalize(this); + } + } + + internal class GenRefRef + { + public GenRefRef(long gen) + { + Gen = gen; + WGenRef = new WeakReference(null); + } + + public GenRef GetGenRef() + { + // not thread-safe but always invoked from within a lock + var genRef = (GenRef) WGenRef.Target; + if (genRef == null) + WGenRef.Target = genRef = new GenRef(this, Gen); + return genRef; + } + + public readonly long Gen; + public readonly WeakReference WGenRef; + public int Count; + } + + internal class GenRef + { + public GenRef(GenRefRef genRefRef, long gen) + { + GenRefRef = genRefRef; + Gen = gen; + } + + public readonly GenRefRef GenRefRef; + public readonly long Gen; + } + + #endregion + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/DataSource/BTree.cs b/src/Umbraco.Web/PublishedCache/NuCache/DataSource/BTree.cs new file mode 100644 index 0000000000..ae8e28b223 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/DataSource/BTree.cs @@ -0,0 +1,204 @@ +using System; +using System.Collections.Generic; +using System.IO; +using CSharpTest.Net.Collections; +using CSharpTest.Net.Serialization; + +namespace Umbraco.Web.PublishedCache.NuCache.DataSource +{ + class BTree + { + public static BPlusTree GetTree(string filepath, bool exists) + { + var keySerializer = new PrimitiveSerializer(); + var valueSerializer = new ContentNodeKitSerializer(); + var options = new BPlusTree.OptionsV2(keySerializer, valueSerializer) + { + CreateFile = exists ? CreatePolicy.IfNeeded : CreatePolicy.Always, + FileName = filepath, + + // other options? + }; + + var tree = new BPlusTree(options); + + // anything? + //btree. + + return tree; + } + + class ContentNodeKitSerializer : ISerializer + { + static readonly ContentDataSerializer DataSerializer = new ContentDataSerializer(); + //static readonly ListOfIntSerializer ChildContentIdsSerializer = new ListOfIntSerializer(); + + public ContentNodeKit ReadFrom(Stream stream) + { + var kit = new ContentNodeKit + { + Node = new ContentNode( + PrimitiveSerializer.Int32.ReadFrom(stream), // id + PrimitiveSerializer.Guid.ReadFrom(stream), // uid + PrimitiveSerializer.Int32.ReadFrom(stream), // level + PrimitiveSerializer.String.ReadFrom(stream), // path + PrimitiveSerializer.Int32.ReadFrom(stream), // sort order + PrimitiveSerializer.Int32.ReadFrom(stream), // parent id + PrimitiveSerializer.DateTime.ReadFrom(stream), // date created + PrimitiveSerializer.Int32.ReadFrom(stream) // creator id + ), + ContentTypeId = PrimitiveSerializer.Int32.ReadFrom(stream) + }; + var hasDraft = PrimitiveSerializer.Boolean.ReadFrom(stream); + if (hasDraft) + kit.DraftData = DataSerializer.ReadFrom(stream); + var hasPublished = PrimitiveSerializer.Boolean.ReadFrom(stream); + if (hasPublished) + kit.PublishedData = DataSerializer.ReadFrom(stream); + return kit; + } + + public void WriteTo(ContentNodeKit value, Stream stream) + { + PrimitiveSerializer.Int32.WriteTo(value.Node.Id, stream); + PrimitiveSerializer.Guid.WriteTo(value.Node.Uid, stream); + PrimitiveSerializer.Int32.WriteTo(value.Node.Level, stream); + PrimitiveSerializer.String.WriteTo(value.Node.Path, stream); + PrimitiveSerializer.Int32.WriteTo(value.Node.SortOrder, stream); + PrimitiveSerializer.Int32.WriteTo(value.Node.ParentContentId, stream); + PrimitiveSerializer.DateTime.WriteTo(value.Node.CreateDate, stream); + PrimitiveSerializer.Int32.WriteTo(value.Node.CreatorId, stream); + PrimitiveSerializer.Int32.WriteTo(value.ContentTypeId, stream); + + PrimitiveSerializer.Boolean.WriteTo(value.DraftData != null, stream); + if (value.DraftData != null) + DataSerializer.WriteTo(value.DraftData, stream); + + PrimitiveSerializer.Boolean.WriteTo(value.PublishedData != null, stream); + if (value.PublishedData != null) + DataSerializer.WriteTo(value.PublishedData, stream); + } + } + + class ContentDataSerializer : ISerializer + { + private readonly static DictionaryOfValuesSerializer PropertiesSerializer = new DictionaryOfValuesSerializer(); + + public ContentData ReadFrom(Stream stream) + { + return new ContentData + { + Published = PrimitiveSerializer.Boolean.ReadFrom(stream), + Name = PrimitiveSerializer.String.ReadFrom(stream), + Version = PrimitiveSerializer.Guid.ReadFrom(stream), + VersionDate = PrimitiveSerializer.DateTime.ReadFrom(stream), + WriterId = PrimitiveSerializer.Int32.ReadFrom(stream), + TemplateId = PrimitiveSerializer.Int32.ReadFrom(stream), + Properties = PropertiesSerializer.ReadFrom(stream) + }; + } + + public void WriteTo(ContentData value, Stream stream) + { + PrimitiveSerializer.Boolean.WriteTo(value.Published, stream); + PrimitiveSerializer.String.WriteTo(value.Name, stream); + PrimitiveSerializer.Guid.WriteTo(value.Version, stream); + PrimitiveSerializer.DateTime.WriteTo(value.VersionDate, stream); + PrimitiveSerializer.Int32.WriteTo(value.WriterId, stream); + PrimitiveSerializer.Int32.WriteTo(value.TemplateId, stream); + PropertiesSerializer.WriteTo(value.Properties, stream); + } + } + + /* + class ListOfIntSerializer : ISerializer> + { + public List ReadFrom(Stream stream) + { + var list = new List(); + var count = PrimitiveSerializer.Int32.ReadFrom(stream); + for (var i = 0; i < count; i++) + list.Add(PrimitiveSerializer.Int32.ReadFrom(stream)); + return list; + } + + public void WriteTo(List value, Stream stream) + { + PrimitiveSerializer.Int32.WriteTo(value.Count, stream); + foreach (var item in value) + PrimitiveSerializer.Int32.WriteTo(item, stream); + } + } + */ + + class DictionaryOfValuesSerializer : ISerializer> + { + public IDictionary ReadFrom(Stream stream) + { + var dict = new Dictionary(); + var count = PrimitiveSerializer.Int32.ReadFrom(stream); + for (var i = 0; i < count; i++) + { + var key = PrimitiveSerializer.String.ReadFrom(stream); + var type = PrimitiveSerializer.Char.ReadFrom(stream); + switch (type) + { + case 'N': + dict.Add(key, null); + break; + case 'S': + dict.Add(key, PrimitiveSerializer.String.ReadFrom(stream)); + break; + case 'I': + dict.Add(key, PrimitiveSerializer.Int32.ReadFrom(stream)); + break; + case 'L': + dict.Add(key, PrimitiveSerializer.Int64.ReadFrom(stream)); + break; + case 'D': + dict.Add(key, PrimitiveSerializer.DateTime.ReadFrom(stream)); + break; + default: + throw new NotSupportedException("Cannot deserialize '" + type + "' value."); + } + } + return dict; + } + + public void WriteTo(IDictionary value, Stream stream) + { + PrimitiveSerializer.Int32.WriteTo(value.Count, stream); + foreach (var kvp in value) + { + PrimitiveSerializer.String.WriteTo(kvp.Key, stream); + if (kvp.Value == null) + { + PrimitiveSerializer.Char.WriteTo('N', stream); + } + else if (kvp.Value is string) + { + PrimitiveSerializer.Char.WriteTo('S', stream); + PrimitiveSerializer.String.WriteTo((string) kvp.Value, stream); + } + else if (kvp.Value is int) + { + PrimitiveSerializer.Char.WriteTo('I', stream); + PrimitiveSerializer.Int32.WriteTo((int) kvp.Value, stream); + } + else if (kvp.Value is long) + { + PrimitiveSerializer.Char.WriteTo('L', stream); + PrimitiveSerializer.Int64.WriteTo((long) kvp.Value, stream); + } + else if (kvp.Value is DateTime) + { + PrimitiveSerializer.Char.WriteTo('D', stream); + PrimitiveSerializer.DateTime.WriteTo((DateTime) kvp.Value, stream); + } + else + throw new NotSupportedException("Value type " + kvp.Value.GetType().FullName + " cannot be serialized."); + } + } + } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/DataSource/ContentData.cs b/src/Umbraco.Web/PublishedCache/NuCache/DataSource/ContentData.cs new file mode 100644 index 0000000000..2647ffdcdd --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/DataSource/ContentData.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections.Generic; + +namespace Umbraco.Web.PublishedCache.NuCache.DataSource +{ + // represents everything that is specific to draft or published version + class ContentData + { + public bool Published { get; set; } + + public string Name { get; set; } + public Guid Version { get; set; } + public DateTime VersionDate { get; set; } + public int WriterId { get; set; } + public int TemplateId { get; set; } + + public IDictionary Properties { get; set; } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/DataSource/ContentNuDto.cs b/src/Umbraco.Web/PublishedCache/NuCache/DataSource/ContentNuDto.cs new file mode 100644 index 0000000000..50136c0fdd --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/DataSource/ContentNuDto.cs @@ -0,0 +1,27 @@ +using NPoco; +using Umbraco.Core.Models.Rdbms; +using Umbraco.Core.Persistence.DatabaseAnnotations; + +namespace Umbraco.Web.PublishedCache.NuCache.DataSource +{ + [TableName("cmsContentNu")] + [PrimaryKey("nodeId", AutoIncrement = false)] + [ExplicitColumns] + internal class ContentNuDto + { + [Column("nodeId")] + [PrimaryKeyColumn(AutoIncrement = false, Name = "PK_cmsContentNu", OnColumns = "nodeId, published")] + [ForeignKey(typeof(ContentDto), Column = "nodeId")] + public int NodeId { get; set; } + + [Column("published")] + public bool Published { get; set; } + + [Column("data")] + [SpecialDbType(SpecialDbTypes.NTEXT)] + public string Data { get; set; } + + [Column("rv")] + public long Rv { get; set; } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/DataSource/ContentSourceDto.cs b/src/Umbraco.Web/PublishedCache/NuCache/DataSource/ContentSourceDto.cs new file mode 100644 index 0000000000..cd96ea38f1 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/DataSource/ContentSourceDto.cs @@ -0,0 +1,34 @@ +using System; + +namespace Umbraco.Web.PublishedCache.NuCache.DataSource +{ + // read-only dto + internal class ContentSourceDto + { + public int Id { get; set; } + public Guid Uid { get; set; } + public int ContentTypeId { get; set; } + + public int Level { get; set; } + public string Path { get; set; } + public int SortOrder { get; set; } + public int ParentId { get; set; } + + public DateTime CreateDate { get; set; } + public int CreatorId { get; set; } + + public string DraftName { get; set; } + public Guid DraftVersion { get; set; } + public DateTime DraftVersionDate { get; set; } + public int DraftWriterId { get; set; } + public int DraftTemplateId { get; set; } + public string DraftData { get; set; } + + public string PubName { get; set; } + public Guid PubVersion { get; set; } + public DateTime PubVersionDate { get; set; } + public int PubWriterId { get; set; } + public int PubTemplateId { get; set; } + public string PubData { get; set; } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/DataSource/Database.cs b/src/Umbraco.Web/PublishedCache/NuCache/DataSource/Database.cs new file mode 100644 index 0000000000..9b9e6eb5ac --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/DataSource/Database.cs @@ -0,0 +1,293 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Newtonsoft.Json; +using NPoco; +using Umbraco.Core; +using Umbraco.Core.Logging; +using Umbraco.Core.Persistence.UnitOfWork; +using Umbraco.Core.Serialization; + +namespace Umbraco.Web.PublishedCache.NuCache.DataSource +{ + // provides efficient database access for NuCache + class Database + { + public ContentNodeKit GetContentSource(IDatabaseUnitOfWork uow, int id) + { + var dto = uow.Database.Fetch(new Sql(@"SELECT +n.id Id, n.uniqueId Uid, +cmsContent.contentType ContentTypeId, +n.level Level, n.path Path, n.sortOrder SortOrder, n.parentId ParentId, +n.createDate CreateDate, n.nodeUser CreatorId, +docDraft.text DraftName, docDraft.versionId DraftVersion, docDraft.updateDate DraftVersionDate, docDraft.documentUser DraftWriterId, docDraft.templateId DraftTemplateId, +nuDraft.data DraftData, +docPub.text PubName, docPub.versionId PubVersion, docPub.updateDate PubVersionDate, docPub.documentUser PubWriterId, docPub.templateId PubTemplateId, +nuPub.data PubData +FROM umbracoNode n +JOIN cmsContent ON (cmsContent.nodeId=n.id) +LEFT JOIN cmsDocument docDraft ON (docDraft.nodeId=n.id AND docDraft.newest=1 AND docDraft.published=0) +LEFT JOIN cmsDocument docPub ON (docPub.nodeId=n.id AND docPub.published=1) +LEFT JOIN cmsContentNu nuDraft ON (nuDraft.nodeId=n.id AND nuDraft.published=0) +LEFT JOIN cmsContentNu nuPub ON (nuPub.nodeId=n.id AND nuPub.published=1) +WHERE n.nodeObjectType=@objType AND n.id=@id +", new { objType = Constants.ObjectTypes.DocumentGuid, /*id =*/ id })).FirstOrDefault(); + return dto == null ? new ContentNodeKit() : CreateContentNodeKit(dto); + } + + public ContentNodeKit GetMediaSource(IDatabaseUnitOfWork uow, int id) + { + // should be only 1 version for medias + + var dto = uow.Database.Fetch(new Sql(@"SELECT +n.id Id, n.uniqueId Uid, +cmsContent.contentType ContentTypeId, +n.level Level, n.path Path, n.sortOrder SortOrder, n.parentId ParentId, +n.createDate CreateDate, n.nodeUser CreatorId, +n.text PubName, ver.versionId PubVersion, ver.versionDate PubVersionDate, +nuPub.data PubData +FROM umbracoNode n +JOIN cmsContent ON (cmsContent.nodeId=n.id) +JOIN cmsContentVersion ver ON (ver.contentId=n.id) +LEFT JOIN cmsContentNu nuPub ON (nuPub.nodeId=n.id AND nuPub.published=1) +WHERE n.nodeObjectType=@objType AND n.id=@id +", new { objType = Constants.ObjectTypes.MediaGuid, /*id =*/ id })).FirstOrDefault(); + return dto == null ? new ContentNodeKit() : CreateMediaNodeKit(dto); + } + + // we want arrays, we want them all loaded, not an enumerable + + public IEnumerable GetAllContentSources(IDatabaseUnitOfWork uow) + { + return uow.Database.Query(new Sql(@"SELECT +n.id Id, n.uniqueId Uid, +cmsContent.contentType ContentTypeId, +n.level Level, n.path Path, n.sortOrder SortOrder, n.parentId ParentId, +n.createDate CreateDate, n.nodeUser CreatorId, +docDraft.text DraftName, docDraft.versionId DraftVersion, docDraft.updateDate DraftVersionDate, docDraft.documentUser DraftWriterId, docDraft.templateId DraftTemplateId, +nuDraft.data DraftData, +docPub.text PubName, docPub.versionId PubVersion, docPub.updateDate PubVersionDate, docPub.documentUser PubWriterId, docPub.templateId PubTemplateId, +nuPub.data PubData +FROM umbracoNode n +JOIN cmsContent ON (cmsContent.nodeId=n.id) +LEFT JOIN cmsDocument docDraft ON (docDraft.nodeId=n.id AND docDraft.newest=1 AND docDraft.published=0) +LEFT JOIN cmsDocument docPub ON (docPub.nodeId=n.id AND docPub.published=1) +LEFT JOIN cmsContentNu nuDraft ON (nuDraft.nodeId=n.id AND nuDraft.published=0) +LEFT JOIN cmsContentNu nuPub ON (nuPub.nodeId=n.id AND nuPub.published=1) +WHERE n.nodeObjectType=@objType +ORDER BY n.level, n.sortOrder +", new { objType = Constants.ObjectTypes.DocumentGuid })).Select(CreateContentNodeKit); + } + + public IEnumerable GetAllMediaSources(IDatabaseUnitOfWork uow) + { + // should be only 1 version for medias + + return uow.Database.Query(new Sql(@"SELECT +n.id Id, n.uniqueId Uid, +cmsContent.contentType ContentTypeId, +n.level Level, n.path Path, n.sortOrder SortOrder, n.parentId ParentId, +n.createDate CreateDate, n.nodeUser CreatorId, +n.text PubName, ver.versionId PubVersion, ver.versionDate PubVersionDate, +nuPub.data PubData +FROM umbracoNode n +JOIN cmsContent ON (cmsContent.nodeId=n.id) +JOIN cmsContentVersion ver ON (ver.contentId=n.id) +LEFT JOIN cmsContentNu nuPub ON (nuPub.nodeId=n.id AND nuPub.published=1) +WHERE n.nodeObjectType=@objType +ORDER BY n.level, n.sortOrder +", new { objType = Constants.ObjectTypes.MediaGuid })).Select(CreateMediaNodeKit); + } + + public IEnumerable GetBranchContentSources(IDatabaseUnitOfWork uow, int id) + { + return uow.Database.Query(new Sql(@"SELECT +n.id Id, n.uniqueId Uid, +cmsContent.contentType ContentTypeId, +n.level Level, n.path Path, n.sortOrder SortOrder, n.parentId ParentId, +n.createDate CreateDate, n.nodeUser CreatorId, +docDraft.text DraftName, docDraft.versionId DraftVersion, docDraft.updateDate DraftVersionDate, docDraft.documentUser DraftWriterId, docDraft.templateId DraftTemplateId, +nuDraft.data DraftData, +docPub.text PubName, docPub.versionId PubVersion, docPub.updateDate PubVersionDate, docPub.documentUser PubWriterId, docPub.templateId PubTemplateId, +nuPub.data PubData +FROM umbracoNode n +JOIN umbracoNode x ON (n.id=x.id OR n.path LIKE " + uow.Database.SqlSyntax.GetConcat("x.path", "',%'") + @") +JOIN cmsContent ON (cmsContent.nodeId=n.id) +LEFT JOIN cmsDocument docDraft ON (docDraft.nodeId=n.id AND docDraft.newest=1 AND docDraft.published=0) +LEFT JOIN cmsDocument docPub ON (docPub.nodeId=n.id AND docPub.published=1) +LEFT JOIN cmsContentNu nuDraft ON (nuDraft.nodeId=n.id AND nuDraft.published=0) +LEFT JOIN cmsContentNu nuPub ON (nuPub.nodeId=n.id AND nuPub.published=1) +WHERE n.nodeObjectType=@objType AND x.id=@id +ORDER BY n.level, n.sortOrder +", new { objType = Constants.ObjectTypes.DocumentGuid, /*id =*/ id })).Select(CreateContentNodeKit); + } + + public IEnumerable GetBranchMediaSources(IDatabaseUnitOfWork uow, int id) + { + // should be only 1 version for medias + + return uow.Database.Query(new Sql(@"SELECT +n.id Id, n.uniqueId Uid, +cmsContent.contentType ContentTypeId, +n.level Level, n.path Path, n.sortOrder SortOrder, n.parentId ParentId, +n.createDate CreateDate, n.nodeUser CreatorId, +n.text PubName, ver.versionId PubVersion, ver.versionDate PubVersionDate, +nuPub.data PubData +FROM umbracoNode n +JOIN umbracoNode x ON (n.id=x.id OR n.path LIKE " + uow.Database.SqlSyntax.GetConcat("x.path", "',%'") + @") +JOIN cmsContent ON (cmsContent.nodeId=n.id) +JOIN cmsContentVersion ver ON (ver.contentId=n.id) +LEFT JOIN cmsContentNu nuPub ON (nuPub.nodeId=n.id AND nuPub.published=1) +WHERE n.nodeObjectType=@objType AND x.id=@id +ORDER BY n.level, n.sortOrder +", new { objType = Constants.ObjectTypes.MediaGuid, /*id =*/ id })).Select(CreateMediaNodeKit); + } + + public IEnumerable GetTypeContentSources(IDatabaseUnitOfWork uow, IEnumerable ids) + { + return uow.Database.Query(new Sql(@"SELECT +n.id Id, n.uniqueId Uid, +cmsContent.contentType ContentTypeId, +n.level Level, n.path Path, n.sortOrder SortOrder, n.parentId ParentId, +n.createDate CreateDate, n.nodeUser CreatorId, +docDraft.text DraftName, docDraft.versionId DraftVersion, docDraft.updateDate DraftVersionDate, docDraft.documentUser DraftWriterId, docDraft.templateId DraftTemplateId, +nuDraft.data DraftData, +docPub.text PubName, docPub.versionId PubVersion, docPub.updateDate PubVersionDate, docPub.documentUser PubWriterId, docPub.templateId PubTemplateId, +nuPub.data PubData +FROM umbracoNode n +JOIN cmsContent ON (cmsContent.nodeId=n.id) +LEFT JOIN cmsDocument docDraft ON (docDraft.nodeId=n.id AND docDraft.newest=1 AND docDraft.published=0) +LEFT JOIN cmsDocument docPub ON (docPub.nodeId=n.id AND docPub.published=1) +LEFT JOIN cmsContentNu nuDraft ON (nuDraft.nodeId=n.id AND nuDraft.published=0) +LEFT JOIN cmsContentNu nuPub ON (nuPub.nodeId=n.id AND nuPub.published=1) +WHERE n.nodeObjectType=@objType AND cmsContent.contentType IN (@ids) +ORDER BY n.level, n.sortOrder +", new { objType = Constants.ObjectTypes.DocumentGuid, /*id =*/ ids })).Select(CreateContentNodeKit); + } + + public IEnumerable GetTypeMediaSources(IDatabaseUnitOfWork uow, IEnumerable ids) + { + // should be only 1 version for medias + + return uow.Database.Query(new Sql(@"SELECT +n.id Id, n.uniqueId Uid, +cmsContent.contentType ContentTypeId, +n.level Level, n.path Path, n.sortOrder SortOrder, n.parentId ParentId, +n.createDate CreateDate, n.nodeUser CreatorId, +n.text PubName, ver.versionId PubVersion, ver.versionDate PubVersionDate, +nuPub.data PubData +FROM umbracoNode n +JOIN cmsContent ON (cmsContent.nodeId=n.id) +JOIN cmsContentVersion ver ON (ver.contentId=n.id) +LEFT JOIN cmsContentNu nuPub ON (nuPub.nodeId=n.id AND nuPub.published=1) +WHERE n.nodeObjectType=@objType AND cmsContent.contentType IN (@ids) +ORDER BY n.level, n.sortOrder +", new { objType = Constants.ObjectTypes.MediaGuid, /*id =*/ ids })).Select(CreateMediaNodeKit); + } + + private static ContentNodeKit CreateContentNodeKit(ContentSourceDto dto) + { + ContentData d = null; + ContentData p = null; + + if (dto.DraftVersion != Guid.Empty) + { + if (dto.DraftData == null) + { + //throw new Exception("Missing cmsContentNu content for node " + dto.Id + ", consider rebuilding."); + LogHelper.Warn("Missing cmsContentNu content for node " + dto.Id + ", consider rebuilding."); + } + else + { + d = new ContentData + { + Name = dto.DraftName, + Published = false, + TemplateId = dto.DraftTemplateId, + Version = dto.DraftVersion, + VersionDate = dto.DraftVersionDate, + WriterId = dto.DraftWriterId, + Properties = DeserializeData(dto.DraftData) + }; + } + } + + if (dto.PubVersion != Guid.Empty) + { + if (dto.PubData == null) + { + //throw new Exception("Missing cmsContentNu content for node " + dto.Id + ", consider rebuilding."); + LogHelper.Warn("Missing cmsContentNu content for node " + dto.Id + ", consider rebuilding."); + } + else + { + p = new ContentData + { + Name = dto.PubName, + Published = true, + TemplateId = dto.PubTemplateId, + Version = dto.PubVersion, + VersionDate = dto.PubVersionDate, + WriterId = dto.PubWriterId, + Properties = DeserializeData(dto.PubData) + }; + } + } + + var n = new ContentNode(dto.Id, dto.Uid, + dto.Level, dto.Path, dto.SortOrder, dto.ParentId, dto.CreateDate, dto.CreatorId); + + var s = new ContentNodeKit + { + Node = n, + ContentTypeId = dto.ContentTypeId, + DraftData = d, + PublishedData = p + }; + + return s; + } + + private static ContentNodeKit CreateMediaNodeKit(ContentSourceDto dto) + { + if (dto.PubData == null) + throw new Exception("No data for media " + dto.Id); + + var p = new ContentData + { + Name = dto.PubName, + Published = true, + TemplateId = -1, + Version = dto.PubVersion, + VersionDate = dto.PubVersionDate, + WriterId = dto.CreatorId, // what-else? + Properties = DeserializeData(dto.PubData) + }; + + var n = new ContentNode(dto.Id, dto.Uid, + dto.Level, dto.Path, dto.SortOrder, dto.ParentId, dto.CreateDate, dto.CreatorId); + + var s = new ContentNodeKit + { + Node = n, + ContentTypeId = dto.ContentTypeId, + PublishedData = p + }; + + return s; + } + + private static Dictionary DeserializeData(string data) + { + // by default JsonConvert will deserialize our numeric values as Int64 + // which is bad, because they were Int32 in the database - take care + + var settings = new JsonSerializerSettings + { + Converters = new List { new ForceInt32Converter() } + }; + + return JsonConvert.DeserializeObject>(data, settings); + } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/DomainCache.cs b/src/Umbraco.Web/PublishedCache/NuCache/DomainCache.cs new file mode 100644 index 0000000000..b39340e493 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/DomainCache.cs @@ -0,0 +1,34 @@ +using System.Collections.Generic; +using System.Linq; +using Umbraco.Web.Routing; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + class DomainCache : IDomainCache + { + private readonly SnapDictionary.Snapshot _snapshot; + + public DomainCache(SnapDictionary.Snapshot snapshot) + { + _snapshot = snapshot; + } + + public IEnumerable GetAll(bool includeWildcards) + { + var list = _snapshot.GetAll(); + if (includeWildcards == false) list = list.Where(x => x.IsWildcard == false); + return list; + } + + public IEnumerable GetAssigned(int contentId, bool includeWildcards) + { + // probably this could be optimized with an index + // but then we'd need a custom DomainStore of some sort + + var list = _snapshot.GetAll(); + list = list.Where(x => x.ContentId == contentId); + if (includeWildcards == false) list = list.Where(x => x.IsWildcard == false); + return list; + } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/Facade.cs b/src/Umbraco.Web/PublishedCache/NuCache/Facade.cs new file mode 100644 index 0000000000..fc9c36eab9 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/Facade.cs @@ -0,0 +1,126 @@ +using System; +using Umbraco.Core; +using Umbraco.Core.Cache; +using Umbraco.Core.ObjectResolution; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + // implements the facade + class Facade : IFacade, IDisposable + { + private readonly FacadeService _service; + private readonly bool _defaultPreview; + private FacadeElements _elements; + + #region Constructors + + public Facade(FacadeService service, bool defaultPreview) + { + _service = service; + _defaultPreview = defaultPreview; + FacadeCache = new ObjectCacheRuntimeCacheProvider(); + } + + public class FacadeElements : IDisposable + { + public ContentCache ContentCache; + public MediaCache MediaCache; + public MemberCache MemberCache; + public DomainCache DomainCache; + public ICacheProvider FacadeCache; + public ICacheProvider SnapshotCache; + + public void Dispose() + { + ContentCache.Dispose(); + MediaCache.Dispose(); + } + } + + private FacadeElements Elements + { + get + { + // no lock - facades are single-thread + return _elements ?? (_elements = _service.GetElements(_defaultPreview)); + } + } + + public void Resync() + { + // no lock - facades are single-thread + if (_elements != null) + _elements.Dispose(); + _elements = null; + } + + #endregion + + #region Current - for tests + + private static Func _getCurrentFacadeFunc = () => + { +#if DEBUG + if (FacadeServiceResolver.HasCurrent == false) return null; + var service = FacadeServiceResolver.Current.Service as FacadeService; + if (service == null) return null; + return (Facade) service.GetFacadeFunc(); +#else + return (Facade) ((FacadeService) FacadeServiceResolver.Current.Service).GetFacadeFunc(); +#endif + }; + + public static Func GetCurrentFacadeFunc + { + get { return _getCurrentFacadeFunc; } + set + { + using (Resolution.Configuration) + { + _getCurrentFacadeFunc = value; + } + } + } + + public static Facade Current + { + get { return _getCurrentFacadeFunc(); } + } + + #endregion + + #region Caches + + public ICacheProvider FacadeCache { get; private set; } + + public ICacheProvider SnapshotCache { get { return Elements.SnapshotCache; } } + + #endregion + + #region IFacade + + public IPublishedContentCache ContentCache { get { return Elements.ContentCache; } } + + public IPublishedMediaCache MediaCache { get { return Elements.MediaCache; } } + + public IPublishedMemberCache MemberCache { get { return Elements.MemberCache; } } + + public IDomainCache DomainCache { get { return Elements.DomainCache; } } + + #endregion + + #region IDisposable + + private bool _disposed; + + public void Dispose() + { + if (_disposed) return; + _disposed = true; + if (_elements != null) + _elements.Dispose(); + } + + #endregion + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/FacadeService.cs b/src/Umbraco.Web/PublishedCache/NuCache/FacadeService.cs new file mode 100644 index 0000000000..ed387d3bea --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/FacadeService.cs @@ -0,0 +1,1488 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Web.Hosting; +using CSharpTest.Net.Collections; +using Newtonsoft.Json; +using Umbraco.Core; +using Umbraco.Core.Cache; +using Umbraco.Core.Logging; +using Umbraco.Core.Models; +using Umbraco.Core.Models.Membership; +using Umbraco.Core.Models.PublishedContent; +using Umbraco.Core.ObjectResolution; +using Umbraco.Core.Persistence; +using Umbraco.Core.Persistence.DatabaseModelDefinitions; +using Umbraco.Core.Persistence.Repositories; +using Umbraco.Core.Persistence.UnitOfWork; +using Umbraco.Core.Services; +using Umbraco.Core.Services.Changes; +using Umbraco.Web.Cache; +using Umbraco.Web.PublishedCache.NuCache.DataSource; +using Umbraco.Web.PublishedCache.XmlPublishedCache; +using Umbraco.Web.Routing; +using Database = Umbraco.Web.PublishedCache.NuCache.DataSource.Database; +#pragma warning disable 618 +using Content = umbraco.cms.businesslogic.Content; +#pragma warning restore 618 + +namespace Umbraco.Web.PublishedCache.NuCache +{ + class FacadeService : FacadeServiceBase + { + private readonly ServiceContext _serviceContext; + private readonly IDatabaseUnitOfWorkProvider _uowProvider; + private readonly Database _dataSource; + private readonly ILogger _logger; + private readonly Options _options; + + // volatile because we read it with no lock + private volatile bool _isReady; + + private readonly ContentStore2 _contentStore; + private readonly ContentStore2 _mediaStore; + private readonly SnapDictionary _domainStore; + private readonly object _storesLock = new object(); + + private BPlusTree _localContentDb; + private BPlusTree _localMediaDb; + private readonly bool _localDbExists; + + // define constant - determines whether to use cache when previewing + // to store eg routes, property converted values, anything - caching + // means faster execution, but uses memory - not sure if we want it + // so making it configureable. + public static readonly bool FullCacheWhenPreviewing = true; + + // define constant - determines whether to cache the published content + // objects (in the snapshot cache, or facade cache, depending on preview) + // or to refetch them all the time. caching is faster but uses more + // memory. not sure what we want. + public static readonly bool CachePublishedContentChildren = true; + + // define constant - determines whether to cache the content cache root + // objects (in the snapshot cache, or facade cache, depending on preview) + // or to refecth them all the time. caching is faster but uses more + // memory - not sure what we want. + public static readonly bool CacheContentCacheRoots = true; + + #region Constructors + + public FacadeService(Options options, MainDom mainDom, ServiceContext serviceContext, IDatabaseUnitOfWorkProvider uowProvider, ILogger logger) + { + _serviceContext = serviceContext; + _uowProvider = uowProvider; + _dataSource = new Database(); + _logger = logger; + _options = options; + + // we always want to handle repository events, configured or not + // assuming no repository event will trigger before the whole db is ready + // (ideally we'd have Upgrading.App vs Upgrading.Data application states...) + InitializeRepositoryEvents(); + + // however, the cache is NOT available until we are configured, because loading + // content (and content types) from database cannot be consistent (see notes in "Handle + // Notifications" region), so + // - notifications will be ignored + // - trying to obtain a facade from the service will throw + if (ApplicationContext.Current.IsConfigured == false) + return; + + if (_options.IgnoreLocalDb == false) + { + var registered = mainDom.Register( + null, + () => + { + lock (_storesLock) + { + _contentStore.ReleaseLocalDb(); + _localContentDb = null; + _mediaStore.ReleaseLocalDb(); + _localMediaDb = null; + } + }); + + if (registered) + { + var localContentDbPath = HostingEnvironment.MapPath("~/App_Data/NuCache.Content.db"); + var localMediaDbPath = HostingEnvironment.MapPath("~/App_Data/NuCache.Media.db"); + _localDbExists = System.IO.File.Exists(localContentDbPath) && System.IO.File.Exists(localMediaDbPath); + + // if both local dbs exist then GetTree will open them, else new dbs will be created + _localContentDb = BTree.GetTree(localContentDbPath, _localDbExists); + _localMediaDb = BTree.GetTree(localMediaDbPath, _localDbExists); + } + + // stores are created with a db so they can write to it, but they do not read from it, + // stores need to be populated, happens in OnResolutionFrozen which uses _localDbExists to + // figure out whether it can read the dbs or it should populate them from sql + _contentStore = new ContentStore2(logger, _localContentDb); + _mediaStore = new ContentStore2(logger, _localMediaDb); + } + else + { + _contentStore = new ContentStore2(logger); + _mediaStore = new ContentStore2(logger); + } + + _domainStore = new SnapDictionary(); + + if (Resolution.IsFrozen) + OnResolutionFrozen(); + else + Resolution.Frozen += (sender, args) => OnResolutionFrozen(); + } + + private void OnResolutionFrozen() + { + lock (_storesLock) + { + // populate the stores + + try + { + if (_localDbExists) + { + LockAndLoadContent(LoadContentFromLocalDbLocked); + LockAndLoadMedia(LoadMediaFromLocalDbLocked); + } + else + { + LockAndLoadContent(LoadContentFromDatabaseLocked); + LockAndLoadMedia(LoadMediaFromDatabaseLocked); + } + + LockAndLoadDomains(); + } + catch (Exception e) + { + _logger.Error("Panic, exception while loading cache data.", e); + } + + // finaly, cache is ready! + _isReady = true; + } + } + + private void InitializeRepositoryEvents() + { + // plug repository event handlers + // these trigger within the transaction to ensure consistency + // and are used to maintain the central, database-level XML cache + ContentRepository.UowRemovingEntity += OnContentRemovingEntity; + //ContentRepository.RemovedVersion += OnContentRemovedVersion; + ContentRepository.UowRefreshedEntity += OnContentRefreshedEntity; + MediaRepository.UowRemovingEntity += OnMediaRemovingEntity; + //MediaRepository.RemovedVersion += OnMediaRemovedVersion; + MediaRepository.UowRefreshedEntity += OnMediaRefreshedEntity; + MemberRepository.UowRemovingEntity += OnMemberRemovingEntity; + //MemberRepository.RemovedVersion += OnMemberRemovedVersion; + MemberRepository.UowRefreshedEntity += OnMemberRefreshedEntity; + + // plug + ContentTypeService.UowRefreshedEntity += OnContentTypeRefreshedEntity; + MediaTypeService.UowRefreshedEntity+= OnMediaTypeRefreshedEntity; + MemberTypeService.UowRefreshedEntity += OnMemberTypeRefreshedEntity; + + // temp - until we get rid of Content +#pragma warning disable 618 + Content.DeletedContent += OnDeletedContent; +#pragma warning restore 618 + } + + public class Options + { + // indicates that the facade cache should reuse the application request cache + // otherwise a new cache object would be created for the facade specifically, + // which is the default - web boot manager uses this to optimze facades + public bool FacadeCacheIsApplicationRequestCache; + + public bool IgnoreLocalDb; + } + + #endregion + + #region Populate Stores + + // sudden panic... but in RepeatableRead can a content that I haven't already read, be removed + // before I read it? NO! because the WHOLE content tree is read-locked using WithReadLocked. + // don't panic. + + private void LockAndLoadContent(Action action) + { + _contentStore.WriteLocked(() => + { + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.ContentTree); + action(uow); + uow.Complete(); + } + }); + } + + private void LoadContentFromDatabaseLocked(IDatabaseUnitOfWork uow) + { + // locks: + // contentStore is wlocked (1 thread) + // content (and types) are read-locked + + var contentTypes = _serviceContext.ContentTypeService.GetAll() + .Select(x => new PublishedContentType(PublishedItemType.Content, x)); + _contentStore.UpdateContentTypes(null, contentTypes, null); + + _localContentDb?.Clear(); + + _logger.Debug("Loading content from database..."); + var sw = Stopwatch.StartNew(); + var kits = _dataSource.GetAllContentSources(uow); + _contentStore.SetAll(kits); + sw.Stop(); + _logger.Debug("Loaded content from database (" + sw.ElapsedMilliseconds + "ms)."); + } + + private void LoadContentFromLocalDbLocked(IDatabaseUnitOfWork uow) + { + var contentTypes = _serviceContext.ContentTypeService.GetAll() + .Select(x => new PublishedContentType(PublishedItemType.Content, x)); + _contentStore.UpdateContentTypes(null, contentTypes, null); + + _logger.Debug("Loading content from local db..."); + var sw = Stopwatch.StartNew(); + var kits = _localContentDb.Select(x => x.Value); + _contentStore.SetAll(kits); + sw.Stop(); + _logger.Debug("Loaded content from local db (" + sw.ElapsedMilliseconds + "ms)."); + } + + // keep these around - might be useful + + //private void LoadContentBranch(IContent content) + //{ + // LoadContent(content); + + // foreach (var child in content.Children()) + // LoadContentBranch(child); + //} + + //private void LoadContent(IContent content) + //{ + // var contentService = _serviceContext.ContentService as ContentService; + // if (contentService == null) throw new Exception("oops"); + // var newest = content; + // var published = newest.Published + // ? newest + // : (newest.HasPublishedVersion ? contentService.GetByVersion(newest.PublishedVersionGuid) : null); + + // var contentNode = CreateContentNode(newest, published); + // _contentStore.Set(contentNode); + //} + + private void LockAndLoadMedia(Action action) + { + _mediaStore.WriteLocked(() => + { + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.MediaTree); + action(uow); + uow.Complete(); + } + }); + } + + private void LoadMediaFromDatabaseLocked(IDatabaseUnitOfWork uow) + { + // locks & notes: see content + + var mediaTypes = _serviceContext.MediaTypeService.GetAll() + .Select(x => new PublishedContentType(PublishedItemType.Media, x)); + _mediaStore.UpdateContentTypes(null, mediaTypes, null); + + _localMediaDb?.Clear(); + + _logger.Debug("Loading media from database..."); + var sw = Stopwatch.StartNew(); + var kits = _dataSource.GetAllMediaSources(uow); + _mediaStore.SetAll(kits); + sw.Stop(); + _logger.Debug("Loaded media from database (" + sw.ElapsedMilliseconds + "ms)."); + } + + private void LoadMediaFromLocalDbLocked(IDatabaseUnitOfWork uow) + { + var mediaTypes = _serviceContext.MediaTypeService.GetAll() + .Select(x => new PublishedContentType(PublishedItemType.Media, x)); + _mediaStore.UpdateContentTypes(null, mediaTypes, null); + + _logger.Debug("Loading media from local db..."); + var sw = Stopwatch.StartNew(); + var kits = _localMediaDb.Select(x => x.Value); + _mediaStore.SetAll(kits); + sw.Stop(); + _logger.Debug("Loaded media from local db (" + sw.ElapsedMilliseconds + "ms)."); + } + + // keep these around - might be useful + + //private void LoadMediaBranch(IMedia media) + //{ + // LoadMedia(media); + + // foreach (var child in media.Children()) + // LoadMediaBranch(child); + //} + + //private void LoadMedia(IMedia media) + //{ + // var mediaType = _contentTypeCache.Get(PublishedItemType.Media, media.ContentTypeId); + + // var mediaData = new ContentData + // { + // Name = media.Name, + // Published = true, + // Version = media.Version, + // VersionDate = media.UpdateDate, + // WriterId = media.CreatorId, // what else? + // TemplateId = -1, // have none + // Properties = GetPropertyValues(media) + // }; + + // var mediaNode = new ContentNode(media.Id, mediaType, + // media.Level, media.Path, media.SortOrder, + // media.ParentId, media.CreateDate, media.CreatorId, + // null, mediaData); + + // _mediaStore.Set(mediaNode); + //} + + //private Dictionary GetPropertyValues(IContentBase content) + //{ + // var propertyEditorResolver = PropertyEditorResolver.Current; // should inject + + // return content + // .Properties + // .Select(property => + // { + // var e = propertyEditorResolver.GetByAlias(property.PropertyType.PropertyEditorAlias); + // var v = e == null + // ? property.Value + // : e.ValueEditor.ConvertDbToString(property, property.PropertyType, _serviceContext.DataTypeService); + // return new KeyValuePair(property.Alias, v); + // }) + // .ToDictionary(x => x.Key, x => x.Value); + //} + + //private ContentData CreateContentData(IContent content) + //{ + // return new ContentData + // { + // Name = content.Name, + // Published = content.Published, + // Version = content.Version, + // VersionDate = content.UpdateDate, + // WriterId = content.WriterId, + // TemplateId = content.Template == null ? -1 : content.Template.Id, + // Properties = GetPropertyValues(content) + // }; + //} + + //private ContentNode CreateContentNode(IContent newest, IContent published) + //{ + // var contentType = _contentTypeCache.Get(PublishedItemType.Content, newest.ContentTypeId); + + // var draftData = newest.Published + // ? null + // : CreateContentData(newest); + + // var publishedData = newest.Published + // ? CreateContentData(newest) + // : (published == null ? null : CreateContentData(published)); + + // var contentNode = new ContentNode(newest.Id, contentType, + // newest.Level, newest.Path, newest.SortOrder, + // newest.ParentId, newest.CreateDate, newest.CreatorId, + // draftData, publishedData); + + // return contentNode; + //} + + private void LockAndLoadDomains() + { + _domainStore.WriteLocked(() => + { + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.Domains); + LoadDomainsLocked(); + uow.Complete(); + } + }); + } + + private void LoadDomainsLocked() + { + var domains = _serviceContext.DomainService.GetAll(true); + foreach (var domain in domains + .Where(x => x.RootContentId.HasValue && x.LanguageIsoCode.IsNullOrWhiteSpace() == false) + .Select(x => new Domain(x.Id, x.DomainName, x.RootContentId.Value, CultureInfo.GetCultureInfo(x.LanguageIsoCode), x.IsWildcard))) + { + _domainStore.Set(domain.Id, domain); + } + } + + #endregion + + #region Handle Notifications + + // note: if the service is not ready, ie _isReady is false, then notifications are ignored + + // SetUmbracoVersionStep issues a DistributedCache.Instance.RefreshAllFacade() call which should cause + // the entire content, media etc caches to reload from database -- and then the app restarts -- however, + // at the time SetUmbracoVersionStep runs, Umbraco is not fully initialized and therefore some property + // value converters, etc are not registered, and rebuilding the NuCache may not work properly. + // + // More details: ApplicationContext.IsConfigured being false, ApplicationEventHandler.ExecuteWhen... is + // called and in most cases events are skipped, so property value converters are not registered or + // removed, so PublishedPropertyType either initializes with the wrong converter, or throws because it + // detects more than one converter for a property type. + // + // It's not an issue for XmlStore - the app restart takes place *after* the install has refreshed the + // cache, and XmlStore just writes a new umbraco.config file upon RefreshAll, so that's OK. + // + // But for NuCache... we cannot rebuild the cache now. So it will NOT work and we are not fixing it, + // because now we should ALWAYS run with the database server messenger, and then the RefreshAll will + // be processed as soon as we are configured and the messenger processes instructions. + + public override void Notify(ContentCacheRefresher.JsonPayload[] payloads, out bool draftChanged, out bool publishedChanged) + { + // no cache, nothing we can do + if (_isReady == false) + { + draftChanged = publishedChanged = false; + return; + } + + var draftChanged2 = false; + var publishedChanged2 = false; + _contentStore.WriteLocked(() => + { + NotifyLocked(payloads, out draftChanged2, out publishedChanged2); + }); + draftChanged = draftChanged2; + publishedChanged = publishedChanged2; + + if (draftChanged || publishedChanged) + Facade.Current.Resync(); + } + + private void NotifyLocked(IEnumerable payloads, out bool draftChanged, out bool publishedChanged) + { + publishedChanged = false; + draftChanged = false; + + var contentService = _serviceContext.ContentService as ContentService; + if (contentService == null) throw new Exception("oops"); + + // locks: + // content (and content types) are read-locked while reading content + // contentStore is wlocked (so readable, only no new views) + // and it can be wlocked by 1 thread only at a time + // contentStore is write-locked during changes + + foreach (var payload in payloads) + { + _logger.Debug($"Notified {payload.ChangeTypes} for content {payload.Id}"); + + if (payload.ChangeTypes.HasType(TreeChangeTypes.RefreshAll)) + { + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.ContentTree); + LoadContentFromDatabaseLocked(uow); + uow.Complete(); + } + draftChanged = publishedChanged = true; + continue; + } + + if (payload.ChangeTypes.HasType(TreeChangeTypes.Remove)) + { + if (_contentStore.Clear(payload.Id)) + draftChanged = publishedChanged = true; + continue; + } + + if (payload.ChangeTypes.HasTypesNone(TreeChangeTypes.RefreshNode | TreeChangeTypes.RefreshBranch)) + { + // ?! + continue; + } + + // fixme - should we do some RV check here? (later) + + var capture = payload; + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.ContentTree); + + if (capture.ChangeTypes.HasType(TreeChangeTypes.RefreshBranch)) + { + // ?? should we do some RV check here? + var kits = _dataSource.GetBranchContentSources(uow, capture.Id); + _contentStore.SetBranch(capture.Id, kits); + } + else + { + // ?? should we do some RV check here? + var kit = _dataSource.GetContentSource(uow, capture.Id); + if (kit.IsEmpty) + { + _contentStore.Clear(capture.Id); + } + else + { + _contentStore.Set(kit); + } + } + + uow.Complete(); + } + + // ?? cannot tell really because we're not doing RV checks + draftChanged = publishedChanged = true; + } + } + + public override void Notify(MediaCacheRefresher.JsonPayload[] payloads, out bool anythingChanged) + { + // no cache, nothing we can do + if (_isReady == false) + { + anythingChanged = false; + return; + } + + var anythingChanged2 = false; + _mediaStore.WriteLocked(() => + { + NotifyLocked(payloads, out anythingChanged2); + }); + anythingChanged = anythingChanged2; + + if (anythingChanged) + Facade.Current.Resync(); + } + + private void NotifyLocked(IEnumerable payloads, out bool anythingChanged) + { + anythingChanged = false; + + var mediaService = _serviceContext.MediaService as MediaService; + if (mediaService == null) throw new Exception("oops"); + + // locks: + // see notes for content cache refresher + + foreach (var payload in payloads) + { + _logger.Debug($"Notified {payload.ChangeTypes} for media {payload.Id}"); + + if (payload.ChangeTypes.HasType(TreeChangeTypes.RefreshAll)) + { + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.MediaTree); + LoadMediaFromDatabaseLocked(uow); + uow.Complete(); + } + anythingChanged = true; + continue; + } + + if (payload.ChangeTypes.HasType(TreeChangeTypes.Remove)) + { + if (_mediaStore.Clear(payload.Id)) + anythingChanged = true; + continue; + } + + if (payload.ChangeTypes.HasTypesNone(TreeChangeTypes.RefreshNode | TreeChangeTypes.RefreshBranch)) + { + // ?! + continue; + } + + // fixme - should we do some RV checks here? (later) + + var capture = payload; + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.MediaTree); + + if (capture.ChangeTypes.HasType(TreeChangeTypes.RefreshBranch)) + { + // ?? should we do some RV check here? + var kits = _dataSource.GetBranchMediaSources(uow, capture.Id); + _mediaStore.SetBranch(capture.Id, kits); + } + else + { + // ?? should we do some RV check here? + var kit = _dataSource.GetMediaSource(uow, capture.Id); + if (kit.IsEmpty) + { + _mediaStore.Clear(capture.Id); + } + else + { + _mediaStore.Set(kit); + } + } + + uow.Complete(); + } + + // ?? cannot tell really because we're not doing RV checks + anythingChanged = true; + } + } + + public override void Notify(ContentTypeCacheRefresher.JsonPayload[] payloads) + { + // no cache, nothing we can do + if (_isReady == false) + return; + + foreach (var payload in payloads) + LogHelper.Debug($"Notified {payload.ChangeTypes} for {payload.ItemType} {payload.Id}"); + + var removedIds = payloads + .Where(x => x.ItemType == typeof(IContentType).Name && x.ChangeTypes.HasType(ContentTypeChangeTypes.Remove)) + .Select(x => x.Id) + .ToArray(); + + var refreshedIds = payloads + .Where(x => x.ItemType == typeof(IContentType).Name && x.ChangeTypes.HasType(ContentTypeChangeTypes.RefreshMain)) + .Select(x => x.Id) + .ToArray(); + + if (removedIds.Length > 0 || refreshedIds.Length > 0) + _contentStore.WriteLocked(() => + { + // ReSharper disable AccessToModifiedClosure + RefreshContentTypesLocked(removedIds, refreshedIds); + // ReSharper restore AccessToModifiedClosure + }); + + // same for media cache + + removedIds = payloads + .Where(x => x.ItemType == typeof(IMediaType).Name && x.ChangeTypes.HasType(ContentTypeChangeTypes.Remove)) + .Select(x => x.Id) + .ToArray(); + + refreshedIds = payloads + .Where(x => x.ItemType == typeof(IMediaType).Name && x.ChangeTypes.HasType(ContentTypeChangeTypes.RefreshMain)) + .Select(x => x.Id) + .ToArray(); + + if (removedIds.Length > 0 || refreshedIds.Length > 0) + _mediaStore.WriteLocked(() => + { + RefreshMediaTypesLocked(removedIds, refreshedIds); + }); + + Facade.Current.Resync(); + } + + public override void Notify(DataTypeCacheRefresher.JsonPayload[] payloads) + { + // no cache, nothing we can do + if (_isReady == false) + return; + + var idsA = payloads.Select(x => x.Id).ToArray(); + + foreach (var payload in payloads) + LogHelper.Debug($"Notified {(payload.Removed ? "Removed" : "Refreshed")} for data type {payload.Id}"); + + _contentStore.WriteLocked(() => + _mediaStore.WriteLocked(() => + { + var contentService = _serviceContext.ContentService as ContentService; + if (contentService == null) throw new Exception("oops"); + + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.ContentTree); + _contentStore.UpdateDataTypes(idsA, id => CreateContentType(PublishedItemType.Content, id)); + uow.Complete(); + } + + var mediaService = _serviceContext.MediaService as MediaService; + if (mediaService == null) throw new Exception("oops"); + + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.MediaTree); + _mediaStore.UpdateDataTypes(idsA, id => CreateContentType(PublishedItemType.Media, id)); + uow.Complete(); + } + })); + + Facade.Current.Resync(); + } + + public override void Notify(DomainCacheRefresher.JsonPayload[] payloads) + { + // no cache, nothing we can do + if (_isReady == false) + return; + + _domainStore.WriteLocked(() => + { + foreach (var payload in payloads) + { + switch (payload.ChangeType) + { + case DomainCacheRefresher.ChangeTypes.RefreshAll: + var domainService = _serviceContext.DomainService as DomainService; + if (domainService == null) throw new Exception("oops"); + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.Domains); + LoadDomainsLocked(); + uow.Complete(); + } + break; + case DomainCacheRefresher.ChangeTypes.Remove: + _domainStore.Clear(payload.Id); + break; + case DomainCacheRefresher.ChangeTypes.Refresh: + var domain = _serviceContext.DomainService.GetById(payload.Id); + if (domain == null) continue; + if (domain.RootContentId.HasValue == false) continue; // anomaly + if (domain.LanguageIsoCode.IsNullOrWhiteSpace()) continue; // anomaly + var culture = CultureInfo.GetCultureInfo(domain.LanguageIsoCode); + _domainStore.Set(domain.Id, new Domain(domain.Id, domain.DomainName, domain.RootContentId.Value, culture, domain.IsWildcard)); + break; + } + } + }); + } + + #endregion + + #region Content Types + + private IEnumerable CreateContentTypes(PublishedItemType itemType, params int[] ids) + { + IEnumerable contentTypes; + switch (itemType) + { + case PublishedItemType.Content: + contentTypes = _serviceContext.ContentTypeService.GetAll(ids); + break; + case PublishedItemType.Media: + contentTypes = _serviceContext.MediaTypeService.GetAll(ids); + break; + case PublishedItemType.Member: + contentTypes = _serviceContext.MemberTypeService.GetAll(ids); + break; + default: + throw new ArgumentOutOfRangeException(nameof(itemType)); + } + + // some may be missing - not checking here + + return contentTypes.Select(x => new PublishedContentType(itemType, x)); + } + + private PublishedContentType CreateContentType(PublishedItemType itemType, int id) + { + IContentTypeComposition contentType; + switch (itemType) + { + case PublishedItemType.Content: + contentType = _serviceContext.ContentTypeService.Get(id); + break; + case PublishedItemType.Media: + contentType = _serviceContext.MediaTypeService.Get(id); + break; + case PublishedItemType.Member: + contentType = _serviceContext.MemberTypeService.Get(id); + break; + default: + throw new ArgumentOutOfRangeException(nameof(itemType)); + } + + return contentType == null ? null : new PublishedContentType(itemType, contentType); + } + + private void RefreshContentTypesLocked(IEnumerable removedIds, IEnumerable refreshedIds) + { + // locks: + // content (and content types) are read-locked while reading content + // contentStore is wlocked (so readable, only no new views) + // and it can be wlocked by 1 thread only at a time + + var contentService = _serviceContext.ContentService as ContentService; + if (contentService == null) throw new Exception("oops"); + + var refreshedIdsA = refreshedIds.ToArray(); + + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.ContentTypes); + var typesA = CreateContentTypes(PublishedItemType.Content, refreshedIdsA).ToArray(); + var kits = _dataSource.GetTypeContentSources(uow, refreshedIdsA); + _contentStore.UpdateContentTypes(removedIds, typesA, kits); + uow.Complete(); + } + } + + private void RefreshMediaTypesLocked(IEnumerable removedIds, IEnumerable refreshedIds) + { + // locks: + // media (and content types) are read-locked while reading media + // mediaStore is wlocked (so readable, only no new views) + // and it can be wlocked by 1 thread only at a time + + var mediaService = _serviceContext.MediaService as MediaService; + if (mediaService == null) throw new Exception("oops"); + + var refreshedIdsA = refreshedIds.ToArray(); + + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.MediaTypes); + var typesA = CreateContentTypes(PublishedItemType.Media, refreshedIdsA).ToArray(); + var kits = _dataSource.GetTypeMediaSources(uow, refreshedIdsA); + _mediaStore.UpdateContentTypes(removedIds, typesA, kits); + uow.Complete(); + } + } + + #endregion + + #region Create, Get Facade + + private long _contentGen, _mediaGen, _domainGen; + private ICacheProvider _snapshotCache; + + public override IFacade CreateFacade(string previewToken) + { + // no cache, no joy + if (_isReady == false) + throw new InvalidOperationException("The facade service has not properly initialized."); + + var preview = previewToken.IsNullOrWhiteSpace() == false; + return new Facade(this, preview); + } + + public Facade.FacadeElements GetElements(bool previewDefault) + { + // note: using ObjectCacheRuntimeCacheProvider for snapshot and facade caches + // is not recommended because it creates an inner MemoryCache which is a heavy + // thing - better use a StaticCacheProvider which "just" creates a concurrent + // dictionary + + // for facade cache, StaticCacheProvider MAY be OK but it is not thread-safe, + // nothing like that... + // for snapshot cache, StaticCacheProvider is a No-No, use something better. + + ContentStore2.Snapshot contentSnap, mediaSnap; + SnapDictionary.Snapshot domainSnap; + ICacheProvider snapshotCache; + lock (_storesLock) + { + contentSnap = _contentStore.CreateSnapshot(); + mediaSnap = _mediaStore.CreateSnapshot(); + domainSnap = _domainStore.CreateSnapshot(); + snapshotCache = _snapshotCache; + + // create a new snapshot cache if snapshots are different gens + if (contentSnap.Gen != _contentGen || mediaSnap.Gen != _mediaGen || domainSnap.Gen != _domainGen || _snapshotCache == null) + { + _contentGen = contentSnap.Gen; + _mediaGen = mediaSnap.Gen; + _domainGen = domainSnap.Gen; + snapshotCache = _snapshotCache = new DictionaryCacheProvider(); + } + } + + var facadeCache = _options.FacadeCacheIsApplicationRequestCache + ? ApplicationContext.Current.ApplicationCache.RequestCache + : new StaticCacheProvider(); // assuming that's OK for tests, etc + var memberTypeCache = new PublishedContentTypeCache(null, null, _serviceContext.MemberTypeService); + + var domainCache = new DomainCache(domainSnap); + + return new Facade.FacadeElements + { + ContentCache = new ContentCache(previewDefault, contentSnap, facadeCache, snapshotCache, new DomainHelper(domainCache)), + MediaCache = new MediaCache(previewDefault, mediaSnap), + MemberCache = new MemberCache(previewDefault, _serviceContext.MemberService, _serviceContext.DataTypeService, memberTypeCache), + DomainCache = domainCache, + FacadeCache = facadeCache, + SnapshotCache = snapshotCache + }; + } + + #endregion + + #region Preview + + public override string EnterPreview(IUser user, int contentId) + { + return "preview"; // anything + } + + public override void RefreshPreview(string previewToken, int contentId) + { + // nothing + } + + public override void ExitPreview(string previewToken) + { + // nothing + } + + #endregion + + #region Handle Repository Events For Database PreCache + + // note: if the service is not ready, ie _isReady is false, then we still handle repository events, + // because we can, we do not need a working facade to do it - the only reason why it could cause an + // issue is if the database table is not ready, but that should be prevented by migrations. + + // we need them to be "repository" events ie to trigger from within the repository transaction, + // because they need to be consistent with the content that is being refreshed/removed - and that + // should be guaranteed by a DB transaction + + private void OnContentRemovingEntity(ContentRepository sender, ContentRepository.UnitOfWorkEntityEventArgs args) + { + OnRemovedEntity(args.UnitOfWork.Database, args.Entity); + } + + private void OnMediaRemovingEntity(MediaRepository sender, MediaRepository.UnitOfWorkEntityEventArgs args) + { + OnRemovedEntity(args.UnitOfWork.Database, args.Entity); + } + + private void OnMemberRemovingEntity(MemberRepository sender, MemberRepository.UnitOfWorkEntityEventArgs args) + { + OnRemovedEntity(args.UnitOfWork.Database, args.Entity); + } + + private void OnRemovedEntity(UmbracoDatabase db, IContentBase item) + { + db.Execute("DELETE FROM cmsContentNu WHERE nodeId=@id", new { id = item.Id }); + } + + private static readonly string[] PropertiesImpactingAllVersions = { "SortOrder", "ParentId", "Level", "Path", "Trashed" }; + + private static bool HasChangesImpactingAllVersions(IContent icontent) + { + var content = (Core.Models.Content) icontent; + + // UpdateDate will be dirty + // Published may be dirty if saving a Published entity + // so cannot do this (would always be true): + //return content.IsEntityDirty(); + + // have to be more precise & specify properties + return PropertiesImpactingAllVersions.Any(content.IsPropertyDirty); + } + + private void OnContentRefreshedEntity(ContentRepository sender, ContentRepository.UnitOfWorkEntityEventArgs args) + { + var db = args.UnitOfWork.Database; + var content = args.Entity; + + OnRepositoryRefreshed(db, content, false); + + // if unpublishing, remove from table + if (((Core.Models.Content) content).PublishedState == PublishedState.Unpublishing) + { + db.Execute("DELETE FROM cmsContentNu WHERE nodeId=@id AND published=1", new { id = content.Id }); + return; + } + + // need to update the published data if we're saving the published version, + // or having an impact on that version - we update the published data even when masked + + IContent pc = null; + if (content.Published) + { + // saving the published version = update data + pc = content; + } + else + { + // saving the non-published version, but there is a published version + // check whether we have changes that impact the published version (move...) + if (content.HasPublishedVersion && HasChangesImpactingAllVersions(content)) + pc = sender.GetByVersion(content.PublishedVersionGuid); + } + + if (pc == null) + return; + + OnRepositoryRefreshed(db, pc, true); + } + + private void OnMediaRefreshedEntity(MediaRepository sender, MediaRepository.UnitOfWorkEntityEventArgs args) + { + var db = args.UnitOfWork.Database; + var media = args.Entity; + + // for whatever reason we delete some data when the media is trashed + // at least that's what the MediaService implementation did + if (media.Trashed) + db.Execute("DELETE FROM cmsContentXml WHERE nodeId=@id", new { id = media.Id }); + + OnRepositoryRefreshed(db, media, true); + } + + private void OnMemberRefreshedEntity(MemberRepository sender, MemberRepository.UnitOfWorkEntityEventArgs args) + { + var db = args.UnitOfWork.Database; + var member = args.Entity; + + OnRepositoryRefreshed(db, member, true); + } + + private void OnRepositoryRefreshed(UmbracoDatabase db, IContentBase content, bool published) + { + // use a custom SQL to update row version on each update + //db.InsertOrUpdate(dto); + + var dto = GetDto(content, published); + db.InsertOrUpdate(dto, + "SET data=@data, rv=rv+1 WHERE nodeId=@id AND published=@published", + new + { + data = dto.Data, + id = dto.NodeId, + published = dto.Published + }); + } + +#pragma warning disable 618 + private static void OnDeletedContent(object sender, Content.ContentDeleteEventArgs args) +#pragma warning restore 618 + { + var db = args.Database; + var parms = new { @nodeId = args.Id }; + db.Execute("DELETE FROM cmsContentNu WHERE nodeId=@nodeId", parms); + } + + private void OnContentTypeRefreshedEntity(IContentTypeService sender, ContentTypeChange.EventArgs args) + { + // handling a transaction event that does not play well with cache... + //RepositoryBase.SetCacheEnabledForCurrentRequest(false); // fixme !! + + const ContentTypeChangeTypes types // only for those that have been refreshed + = ContentTypeChangeTypes.RefreshMain | ContentTypeChangeTypes.RefreshOther; + var contentTypeIds = args.Changes.Where(x => x.ChangeTypes.HasTypesAny(types)).Select(x => x.Item.Id).ToArray(); + if (contentTypeIds.Any()) + RebuildContentDbCache(contentTypeIds: contentTypeIds); + } + + private void OnMediaTypeRefreshedEntity(IMediaTypeService sender, ContentTypeChange.EventArgs args) + { + // handling a transaction event that does not play well with cache... + //RepositoryBase.SetCacheEnabledForCurrentRequest(false); // fixme !! + + const ContentTypeChangeTypes types // only for those that have been refreshed + = ContentTypeChangeTypes.RefreshMain | ContentTypeChangeTypes.RefreshOther; + var mediaTypeIds = args.Changes.Where(x => x.ChangeTypes.HasTypesAny(types)).Select(x => x.Item.Id).ToArray(); + if (mediaTypeIds.Any()) + RebuildMediaDbCache(contentTypeIds: mediaTypeIds); + } + + private void OnMemberTypeRefreshedEntity(IMemberTypeService sender, ContentTypeChange.EventArgs args) + { + // handling a transaction event that does not play well with cache... + //RepositoryBase.SetCacheEnabledForCurrentRequest(false); // fixme !! + + const ContentTypeChangeTypes types // only for those that have been refreshed + = ContentTypeChangeTypes.RefreshMain | ContentTypeChangeTypes.RefreshOther; + var memberTypeIds = args.Changes.Where(x => x.ChangeTypes.HasTypesAny(types)).Select(x => x.Item.Id).ToArray(); + if (memberTypeIds.Any()) + RebuildMemberDbCache(contentTypeIds: memberTypeIds); + } + + private static ContentNuDto GetDto(IContentBase content, bool published) + { + // should inject these in ctor + // BUT for the time being we decide not to support ConvertDbToXml/String + //var propertyEditorResolver = PropertyEditorResolver.Current; + //var dataTypeService = ApplicationContext.Current.Services.DataTypeService; + + var data = new Dictionary(); + foreach (var prop in content.Properties) + { + var value = prop.Value; + //if (value != null) + //{ + // var e = propertyEditorResolver.GetByAlias(prop.PropertyType.PropertyEditorAlias); + + // // We are converting to string, even for database values which are integer or + // // DateTime, which is not optimum. Doing differently would require that we have a way to tell + // // whether the conversion to XML string changes something or not... which we don't, and we + // // don't want to implement it as PropertyValueEditor.ConvertDbToXml/String should die anyway. + + // // Don't think about improving the situation here: this is a corner case and the real + // // thing to do is to get rig of PropertyValueEditor.ConvertDbToXml/String. + + // // Use ConvertDbToString to keep it simple, although everywhere we use ConvertDbToXml and + // // nothing ensures that the two methods are consistent. + + // if (e != null) + // value = e.ValueEditor.ConvertDbToString(prop, prop.PropertyType, dataTypeService); + //} + data[prop.Alias] = value; + } + + var dto = new ContentNuDto + { + NodeId = content.Id, + Published = published, + + // note that numeric values (which are Int32) are serialized without their + // type (eg "value":1234) and JsonConvert by default deserializes them as Int64 + + Data = JsonConvert.SerializeObject(data) + }; + + return dto; + } + + #endregion + + #region Rebuild Database PreCache + + public void RebuildContentDbCache(int groupSize = 5000, IEnumerable contentTypeIds = null) + { + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.ContentTree); + RebuildContentDbCacheLocked(uow, groupSize, contentTypeIds); + uow.Complete(); + } + } + + // assumes content tree lock + private void RebuildContentDbCacheLocked(IDatabaseUnitOfWork uow, int groupSize, IEnumerable contentTypeIds) + { + var contentTypeIdsA = contentTypeIds?.ToArray(); + var contentObjectType = Guid.Parse(Constants.ObjectTypes.Document); + var db = uow.Database; + + // remove all - if anything fails the transaction will rollback + if (contentTypeIds == null || contentTypeIdsA.Length == 0) + { + // must support SQL-CE + db.Execute(@"DELETE FROM cmsContentNu +WHERE cmsContentNu.nodeId IN ( + SELECT id FROM umbracoNode WHERE umbracoNode.nodeObjectType=@objType +)", + new { objType = contentObjectType }); + } + else + { + // assume number of ctypes won't blow IN(...) + // must support SQL-CE + db.Execute(@"DELETE FROM cmsContentNu +WHERE cmsContentNu.nodeId IN ( + SELECT id FROM umbracoNode + JOIN cmsContent ON cmsContent.nodeId=umbracoNode.id + WHERE umbracoNode.nodeObjectType=@objType + AND cmsContent.contentType IN (@ctypes) +)", + new { objType = contentObjectType, ctypes = contentTypeIdsA }); + } + + // insert back - if anything fails the transaction will rollback + var repository = uow.CreateRepository(); + var query = repository.Query; + if (contentTypeIds != null && contentTypeIdsA.Length > 0) + query = query.WhereIn(x => x.ContentTypeId, contentTypeIdsA); // assume number of ctypes won't blow IN(...) + + long pageIndex = 0; + long processed = 0; + long total; + do + { + // .GetPagedResultsByQuery implicitely adds (cmsDocument.newest = 1) + var descendants = repository.GetPagedResultsByQuery(query, pageIndex++, groupSize, out total, "Path", Direction.Ascending, true); + var items = new List(); + var guids = new List(); + foreach (var c in descendants) + { + items.Add(GetDto(c, c.Published)); + if (c.Published == false && c.HasPublishedVersion) + guids.Add(c.PublishedVersionGuid); + } + items.AddRange(guids.Select(x => GetDto(repository.GetByVersion(x), true))); + + // ReSharper disable once RedundantArgumentDefaultValue + db.BulkInsertRecords(db.SqlSyntax, items, null, false); // run within the current transaction and do NOT commit + processed += items.Count; + } while (processed < total); + } + + public void RebuildMediaDbCache(int groupSize = 5000, IEnumerable contentTypeIds = null) + { + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.MediaTree); + RebuildMediaDbCacheLocked(uow, groupSize, contentTypeIds); + uow.Complete(); + } + } + + // assumes media tree lock + public void RebuildMediaDbCacheLocked(IDatabaseUnitOfWork uow, int groupSize, IEnumerable contentTypeIds) + { + var contentTypeIdsA = contentTypeIds?.ToArray(); + var mediaObjectType = Guid.Parse(Constants.ObjectTypes.Media); + var db = uow.Database; + + // remove all - if anything fails the transaction will rollback + if (contentTypeIds == null || contentTypeIdsA.Length == 0) + { + // must support SQL-CE + db.Execute(@"DELETE FROM cmsContentNu +WHERE cmsContentNu.nodeId IN ( + SELECT id FROM umbracoNode WHERE umbracoNode.nodeObjectType=@objType +)", + new { objType = mediaObjectType }); + } + else + { + // assume number of ctypes won't blow IN(...) + // must support SQL-CE + db.Execute(@"DELETE FROM cmsContentNu +WHERE cmsContentNu.nodeId IN ( + SELECT id FROM umbracoNode + JOIN cmsContent ON cmsContent.nodeId=umbracoNode.id + WHERE umbracoNode.nodeObjectType=@objType + AND cmsContent.contentType IN (@ctypes) +)", + new { objType = mediaObjectType, ctypes = contentTypeIdsA }); + } + + // insert back - if anything fails the transaction will rollback + var repository = uow.CreateRepository(); + var query = repository.Query; + if (contentTypeIds != null && contentTypeIdsA.Length > 0) + query = query.WhereIn(x => x.ContentTypeId, contentTypeIdsA); // assume number of ctypes won't blow IN(...) + + long pageIndex = 0; + long processed = 0; + long total; + do + { + var descendants = repository.GetPagedResultsByQuery(query, pageIndex++, groupSize, out total, "Path", Direction.Ascending, true); + var items = descendants.Select(m => GetDto(m, true)).ToArray(); + // ReSharper disable once RedundantArgumentDefaultValue + db.BulkInsertRecords(db.SqlSyntax, items, null, false); // run within the current transaction and do NOT commit + processed += items.Length; + } while (processed < total); + } + + public void RebuildMemberDbCache(int groupSize = 5000, IEnumerable contentTypeIds = null) + { + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.MemberTree); + RebuildMemberDbCacheLocked(uow, groupSize, contentTypeIds); + uow.Complete(); + } + } + + // assumes member tree lock + public void RebuildMemberDbCacheLocked(IDatabaseUnitOfWork uow, int groupSize, IEnumerable contentTypeIds) + { + var contentTypeIdsA = contentTypeIds?.ToArray(); + var memberObjectType = Guid.Parse(Constants.ObjectTypes.Member); + var db = uow.Database; + + // remove all - if anything fails the transaction will rollback + if (contentTypeIds == null || contentTypeIdsA.Length == 0) + { + // must support SQL-CE + db.Execute(@"DELETE FROM cmsContentNu +WHERE cmsContentNu.nodeId IN ( + SELECT id FROM umbracoNode WHERE umbracoNode.nodeObjectType=@objType +)", + new { objType = memberObjectType }); + } + else + { + // assume number of ctypes won't blow IN(...) + // must support SQL-CE + db.Execute(@"DELETE FROM cmsContentNu +WHERE cmsContentNu.nodeId IN ( + SELECT id FROM umbracoNode + JOIN cmsContent ON cmsContent.nodeId=umbracoNode.id + WHERE umbracoNode.nodeObjectType=@objType + AND cmsContent.contentType IN (@ctypes) +)", + new { objType = memberObjectType, ctypes = contentTypeIdsA }); + } + + // insert back - if anything fails the transaction will rollback + var repository = uow.CreateRepository(); + var query = repository.Query; + if (contentTypeIds != null && contentTypeIdsA.Length > 0) + query = query.WhereIn(x => x.ContentTypeId, contentTypeIdsA); // assume number of ctypes won't blow IN(...) + + long pageIndex = 0; + long processed = 0; + long total; + do + { + var descendants = repository.GetPagedResultsByQuery(query, pageIndex++, groupSize, out total, "Path", Direction.Ascending, true); + var items = descendants.Select(m => GetDto(m, true)).ToArray(); + // ReSharper disable once RedundantArgumentDefaultValue + db.BulkInsertRecords(db.SqlSyntax, items, null, false); // run within the current transaction and do NOT commit + processed += items.Length; + } while (processed < total); + } + + public bool VerifyContentDbCache() + { + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.ContentTree); + var ok = VerifyContentDbCacheLocked(uow); + uow.Complete(); + return ok; + } + } + + // assumes content tree lock + private bool VerifyContentDbCacheLocked(IDatabaseUnitOfWork uow) + { + // every published content item should have a corresponding row in cmsContentXml + // every content item should have a corresponding row in cmsPreviewXml + + var contentObjectType = Guid.Parse(Constants.ObjectTypes.Document); + var db = uow.Database; + + var count = db.ExecuteScalar(@"SELECT COUNT(*) +FROM umbracoNode +JOIN cmsDocument ON (umbracoNode.id=cmsDocument.nodeId AND (cmsDocument.newest=1 OR cmsDocument.published=1)) +LEFT JOIN cmsContentNu ON (umbracoNode.id=cmsContentNu.nodeId AND cmsContentNu.published=cmsDocument.published) +WHERE umbracoNode.nodeObjectType=@objType +AND cmsContentNu.nodeId IS NULL;" + , new { objType = contentObjectType }); + + return count == 0; + } + + public bool VerifyMediaDbCache() + { + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.MediaTree); + var ok = VerifyMediaDbCacheLocked(uow); + uow.Complete(); + return ok; + } + } + + // assumes media tree lock + public bool VerifyMediaDbCacheLocked(IDatabaseUnitOfWork uow) + { + // every non-trashed media item should have a corresponding row in cmsContentXml + + var mediaObjectType = Guid.Parse(Constants.ObjectTypes.Media); + var db = uow.Database; + + var count = db.ExecuteScalar(@"SELECT COUNT(*) +FROM umbracoNode +JOIN cmsDocument ON (umbracoNode.id=cmsDocument.nodeId AND cmsDocument.published=1) +LEFT JOIN cmsContentNu ON (umbracoNode.id=cmsContentNu.nodeId AND cmsContentNu.published=1) +WHERE umbracoNode.nodeObjectType=@objType +AND cmsContentNu.nodeId IS NULL +", new { objType = mediaObjectType }); + + return count == 0; + } + + public bool VerifyMemberDbCache() + { + using (var uow = _uowProvider.CreateUnitOfWork()) + { + uow.ReadLock(Constants.Locks.MemberTree); + var ok = VerifyMemberDbCacheLocked(uow); + uow.Complete(); + return ok; + } + } + + // assumes member tree lock + public bool VerifyMemberDbCacheLocked(IDatabaseUnitOfWork uow) + { + // every member item should have a corresponding row in cmsContentXml + + var memberObjectType = Guid.Parse(Constants.ObjectTypes.Member); + var db = uow.Database; + + var count = db.ExecuteScalar(@"SELECT COUNT(*) +FROM umbracoNode +LEFT JOIN cmsContentNu ON (umbracoNode.id=cmsContentNu.nodeId AND cmsContentNu.published=1) +WHERE umbracoNode.nodeObjectType=@objType +AND cmsContentNu.nodeId IS NULL +", new { objType = memberObjectType }); + + return count == 0; + } + + #endregion + + #region Instrument + + public string GetStatus() + { + var dbCacheIsOk = VerifyContentDbCache() + && VerifyMediaDbCache() + && VerifyMemberDbCache(); + + var cg = _contentStore.GenCount; + var mg = _mediaStore.GenCount; + var cs = _contentStore.SnapCount; + var ms = _mediaStore.SnapCount; + var ce = _contentStore.Count; + var me = _mediaStore.Count; + + return "I'm feeling good, really." + + " Database cache is " + (dbCacheIsOk ? "ok" : "NOT ok (rebuild?)") + "." + + " ContentStore has " + cg + " generation" + (cg > 1 ? "s" : "") + + ", " + cs + " snapshot" + (cs > 1 ? "s" : "") + + " and " + ce + " entr" + (ce > 1 ? "ies" : "y") + "." + + " MediaStore has " + mg + " generation" + (mg > 1 ? "s" : "") + + ", " + ms + " snapshot" + (ms > 1 ? "s" : "") + + " and " + me + " entr" + (me > 1 ? "ies" : "y") + "."; + } + + public void Collect() + { + var contentCollect = _contentStore.CollectAsync(); + var mediaCollect = _mediaStore.CollectAsync(); + System.Threading.Tasks.Task.WaitAll(contentCollect, mediaCollect); + } + + #endregion + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/MediaCache.cs b/src/Umbraco.Web/PublishedCache/NuCache/MediaCache.cs new file mode 100644 index 0000000000..2c4da662b1 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/MediaCache.cs @@ -0,0 +1,180 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Xml.XPath; +using Umbraco.Core; +using Umbraco.Core.Models; +using Umbraco.Core.Models.PublishedContent; +using Umbraco.Core.Xml; +using Umbraco.Core.Xml.XPath; +using Umbraco.Web.PublishedCache.NuCache.Navigable; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + class MediaCache : PublishedCacheBase, IPublishedMediaCache, INavigableData, IDisposable + { + private readonly ContentStore2.Snapshot _snapshot; + + #region Constructors + + public MediaCache(bool previewDefault, ContentStore2.Snapshot snapshot) + : base(previewDefault) + { + _snapshot = snapshot; + } + + #endregion + + #region Get, Has + + public override IPublishedContent GetById(bool preview, int contentId) + { + var n = _snapshot.Get(contentId); + return n == null ? null : n.Published; + } + + public override bool HasById(bool preview, int contentId) + { + var n = _snapshot.Get(contentId); + return n != null; + } + + public override IEnumerable GetAtRoot(bool preview) + { + if (FacadeService.CacheContentCacheRoots == false) + return GetAtRootNoCache(preview); + + var facade = Facade.Current; + var cache = (facade == null) + ? null + : (preview == false || FacadeService.FullCacheWhenPreviewing + ? facade.SnapshotCache + : facade.FacadeCache); + + if (cache == null) + return GetAtRootNoCache(preview); + + // note: ToArray is important here, we want to cache the result, not the function! + return (IEnumerable)cache.GetCacheItem( + CacheKeys.MediaCacheRoots(preview), + () => GetAtRootNoCache(preview).ToArray()); + } + + private IEnumerable GetAtRootNoCache(bool preview) + { + var c = _snapshot.GetAtRoot(); + + // there's no .Draft for medias, only non-null .Published + // but we may want published as previewing, still + return c.Select(n => preview + ? ContentCache.GetPublishedContentAsPreviewing(n.Published) + : n.Published); + } + + public override bool HasContent(bool preview) + { + return _snapshot.IsEmpty == false; + } + + #endregion + + #region XPath + + public override IPublishedContent GetSingleByXPath(bool preview, string xpath, XPathVariable[] vars) + { + var navigator = CreateNavigator(preview); + var iterator = navigator.Select(xpath, vars); + return GetSingleByXPath(iterator); + } + + public override IPublishedContent GetSingleByXPath(bool preview, XPathExpression xpath, XPathVariable[] vars) + { + var navigator = CreateNavigator(preview); + var iterator = navigator.Select(xpath, vars); + return GetSingleByXPath(iterator); + } + + private static IPublishedContent GetSingleByXPath(XPathNodeIterator iterator) + { + if (iterator.MoveNext() == false) return null; + + var xnav = iterator.Current as NavigableNavigator; + if (xnav == null) return null; + + var xcontent = xnav.UnderlyingObject as NavigableContent; + return xcontent == null ? null : xcontent.InnerContent; + } + + public override IEnumerable GetByXPath(bool preview, string xpath, XPathVariable[] vars) + { + var navigator = CreateNavigator(preview); + var iterator = navigator.Select(xpath, vars); + return GetByXPath(iterator); + } + + public override IEnumerable GetByXPath(bool preview, XPathExpression xpath, XPathVariable[] vars) + { + var navigator = CreateNavigator(preview); + var iterator = navigator.Select(xpath, vars); + return GetByXPath(iterator); + } + + private static IEnumerable GetByXPath(XPathNodeIterator iterator) + { + while (iterator.MoveNext()) + { + var xnav = iterator.Current as NavigableNavigator; + if (xnav == null) continue; + + var xcontent = xnav.UnderlyingObject as NavigableContent; + if (xcontent == null) continue; + + yield return xcontent.InnerContent; + } + } + + public override XPathNavigator CreateNavigator(bool preview) + { + var source = new Source(this, preview); + var navigator = new NavigableNavigator(source); + return navigator; + } + + public override XPathNavigator CreateNodeNavigator(int id, bool preview) + { + var source = new Source(this, preview); + var navigator = new NavigableNavigator(source); + return navigator.CloneWithNewRoot(id, 0); + } + + #endregion + + #region Content types + + public override PublishedContentType GetContentType(int id) + { + return _snapshot.GetContentType(id); + } + + public override PublishedContentType GetContentType(string alias) + { + return _snapshot.GetContentType(alias); + } + + public override IEnumerable GetByContentType(PublishedContentType contentType) + { + throw new NotImplementedException(); + } + + #endregion + + #region IDisposable + + public void Dispose() + { + _snapshot.Dispose(); + } + + #endregion + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/MemberCache.cs b/src/Umbraco.Web/PublishedCache/NuCache/MemberCache.cs new file mode 100644 index 0000000000..c6a8f3b0fa --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/MemberCache.cs @@ -0,0 +1,167 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Xml.XPath; +using Umbraco.Core; +using Umbraco.Core.Cache; +using Umbraco.Core.Models; +using Umbraco.Core.Models.PublishedContent; +using Umbraco.Core.Security; +using Umbraco.Core.Services; +using Umbraco.Core.Xml.XPath; +using Umbraco.Web.PublishedCache.NuCache.Navigable; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + class MemberCache : IPublishedMemberCache, INavigableData + { + private readonly IMemberService _memberService; + private readonly IDataTypeService _dataTypeService; + private readonly PublishedContentTypeCache _contentTypeCache; + private readonly bool _previewDefault; + + public MemberCache(bool previewDefault, IMemberService memberService, IDataTypeService dataTypeService, PublishedContentTypeCache contentTypeCache) + { + _memberService = memberService; + _dataTypeService = dataTypeService; + _previewDefault = previewDefault; + _contentTypeCache = contentTypeCache; + } + + //private static T GetCacheItem(string cacheKey) + // where T : class + //{ + // var facade = Facade.Current; + // var cache = facade == null ? null : facade.FacadeCache; + // return cache == null + // ? null + // : (T) cache.GetCacheItem(cacheKey); + //} + + private static T GetCacheItem(string cacheKey, Func getCacheItem) + where T : class + { + var facade = Facade.Current; + var cache = facade == null ? null : facade.FacadeCache; + return cache == null + ? getCacheItem() + : cache.GetCacheItem(cacheKey, getCacheItem); + } + + private static void EnsureProvider() + { + var provider = Core.Security.MembershipProviderExtensions.GetMembersMembershipProvider(); + if (provider.IsUmbracoMembershipProvider() == false) + throw new NotSupportedException("Cannot access this method unless the Umbraco membership provider is active"); + } + + public IPublishedContent GetById(bool preview, int memberId) + { + return GetById(memberId); + } + + public IPublishedContent /*IPublishedMember*/ GetById(int memberId) + { + return GetCacheItem(CacheKeys.MemberCacheMember("ById", _previewDefault, memberId), () => + { + EnsureProvider(); + var member = _memberService.GetById(memberId); + return member == null + ? null + : PublishedMember.Create(member, GetContentType(member.ContentTypeId), _previewDefault); + }); + } + + private IPublishedContent /*IPublishedMember*/ GetById(IMember member, bool previewing) + { + return GetCacheItem(CacheKeys.MemberCacheMember("ById", _previewDefault, member.Id), () => + PublishedMember.Create(member, GetContentType(member.ContentTypeId), previewing)); + } + + public IPublishedContent /*IPublishedMember*/ GetByProviderKey(object key) + { + return GetCacheItem(CacheKeys.MemberCacheMember("ByProviderKey", _previewDefault, key), () => + { + EnsureProvider(); + var member = _memberService.GetByProviderKey(key); + return member == null ? null : GetById(member, _previewDefault); + }); + } + + public IPublishedContent /*IPublishedMember*/ GetByUsername(string username) + { + return GetCacheItem(CacheKeys.MemberCacheMember("ByUsername", _previewDefault, username), () => + { + EnsureProvider(); + var member = _memberService.GetByUsername(username); + return member == null ? null : GetById(member, _previewDefault); + }); + } + + public IPublishedContent /*IPublishedMember*/ GetByEmail(string email) + { + return GetCacheItem(CacheKeys.MemberCacheMember("ByEmail", _previewDefault, email), () => + { + EnsureProvider(); + var member = _memberService.GetByEmail(email); + return member == null ? null : GetById(member, _previewDefault); + }); + } + + public IPublishedContent /*IPublishedMember*/ GetByMember(IMember member) + { + return PublishedMember.Create(member, GetContentType(member.ContentTypeId), _previewDefault); + } + + public IEnumerable GetAtRoot(bool preview) + { + // because members are flat (not a tree) everything is at root + // because we're loading everything... let's just not cache? + var members = _memberService.GetAllMembers(); + return members.Select(m => PublishedMember.Create(m, GetContentType(m.ContentTypeId), preview)); + } + + public XPathNavigator CreateNavigator() + { + var source = new Source(this, false); + var navigator = new NavigableNavigator(source); + return navigator; + } + + public XPathNavigator CreateNavigator(bool preview) + { + return CreateNavigator(); + } + + public XPathNavigator CreateNodeNavigator(int id, bool preview) + { + var provider = Core.Security.MembershipProviderExtensions.GetMembersMembershipProvider(); + if (provider.IsUmbracoMembershipProvider() == false) + { + throw new NotSupportedException("Cannot access this method unless the Umbraco membership provider is active"); + } + + var result = _memberService.GetById(id); + if (result == null) return null; + + var exs = new EntityXmlSerializer(); + var s = exs.Serialize(_dataTypeService, result); + var n = s.GetXmlNode(); + return n.CreateNavigator(); + } + + #region Content types + + public PublishedContentType GetContentType(int id) + { + return _contentTypeCache.Get(PublishedItemType.Member, id); + } + + public PublishedContentType GetContentType(string alias) + { + return _contentTypeCache.Get(PublishedItemType.Member, alias); + } + + #endregion + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/Navigable/INavigableData.cs b/src/Umbraco.Web/PublishedCache/NuCache/Navigable/INavigableData.cs new file mode 100644 index 0000000000..6bbb1de70a --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/Navigable/INavigableData.cs @@ -0,0 +1,11 @@ +using System.Collections.Generic; +using Umbraco.Core.Models; + +namespace Umbraco.Web.PublishedCache.NuCache.Navigable +{ + interface INavigableData + { + IPublishedContent GetById(bool preview, int contentId); + IEnumerable GetAtRoot(bool preview); + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/Navigable/NavigableContent.cs b/src/Umbraco.Web/PublishedCache/NuCache/Navigable/NavigableContent.cs new file mode 100644 index 0000000000..3a205d69dc --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/Navigable/NavigableContent.cs @@ -0,0 +1,112 @@ +using System; +using System.Collections.Generic; +using Umbraco.Core.Models; +using Umbraco.Core.Xml.XPath; + +namespace Umbraco.Web.PublishedCache.NuCache.Navigable +{ + class NavigableContent : INavigableContent + { + private readonly IPublishedContent _icontent; + private readonly PublishedContent _content; + //private readonly object[] _builtInValues1; + private readonly string[] _builtInValues; + + public NavigableContent(IPublishedContent content) + { + _icontent = content; + _content = PublishedContent.UnwrapIPublishedContent(_icontent); + + // built-in properties (attributes) + //_builtInValues1 = new object[] + // { + // _content.Name, + // _content.ParentId, + // _content.CreateDate, + // _content.UpdateDate, + // true, // isDoc + // _content.SortOrder, + // _content.Level, + // _content.TemplateId, + // _content.WriterId, + // _content.CreatorId, + // _content.UrlName, + // _content.IsDraft + // }; + + var i = 0; + _builtInValues = new [] + { + XmlString(i++, _content.Name), + XmlString(i++, _content.ParentId), + XmlString(i++, _content.CreateDate), + XmlString(i++, _content.UpdateDate), + XmlString(i++, true), // isDoc + XmlString(i++, _content.SortOrder), + XmlString(i++, _content.Level), + XmlString(i++, _content.TemplateId), + XmlString(i++, _content.WriterId), + XmlString(i++, _content.CreatorId), + XmlString(i++, _content.UrlName), + XmlString(i, _content.IsDraft) + }; + } + + private string XmlString(int index, object value) + { + var field = Type.FieldTypes[index]; + return field.XmlStringConverter == null ? value.ToString() : field.XmlStringConverter(value); + } + + #region INavigableContent + + public IPublishedContent InnerContent + { + get { return _icontent; } + } + + public int Id + { + get { return _content.Id; } + } + + public int ParentId + { + get { return _content.ParentId; } + } + + public INavigableContentType Type + { + get { return NavigableContentType.GetContentType(_content.ContentType); } + } + + // returns all child ids, will be filtered by the source + public IList ChildIds + { + get { return _content.ChildIds; } + } + + public object Value(int index) + { + if (index < 0) + throw new ArgumentOutOfRangeException("index"); + + if (index < NavigableContentType.BuiltinProperties.Length) + { + // built-in field, ie attribute + //return XmlString(index, _builtInValues1[index]); + return _builtInValues[index]; + } + + index -= NavigableContentType.BuiltinProperties.Length; + var properties = _content.PropertiesArray; + if (index >= properties.Length) + throw new ArgumentOutOfRangeException("index"); + + // custom property, ie element + return properties[index].XPathValue; + } + + #endregion + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/Navigable/NavigableContentType.cs b/src/Umbraco.Web/PublishedCache/NuCache/Navigable/NavigableContentType.cs new file mode 100644 index 0000000000..4f98f417af --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/Navigable/NavigableContentType.cs @@ -0,0 +1,70 @@ +using System; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Xml; +using Umbraco.Core.Models.PublishedContent; +using Umbraco.Core.Xml.XPath; + +namespace Umbraco.Web.PublishedCache.NuCache.Navigable +{ + class NavigableContentType : INavigableContentType + { + public static readonly INavigableFieldType[] BuiltinProperties; + private readonly object _locko = new object(); + + // called by the conditional weak table -- must be public +// ReSharper disable EmptyConstructor + public NavigableContentType() +// ReSharper restore EmptyConstructor + { } + + // note - PublishedContentType are immutable ie they do not _change_ when the actual IContentTypeComposition + // changes, but they are replaced by a new instance, so our map here will clean itself automatically and + // we don't have to manage cache - ConditionalWeakTable does not prevent keys from beeing GCed + + static private readonly ConditionalWeakTable TypesMap + = new ConditionalWeakTable(); + + public static NavigableContentType GetContentType(PublishedContentType contentType) + { + return TypesMap.GetOrCreateValue(contentType).EnsureInitialized(contentType); + } + + static NavigableContentType() + { + BuiltinProperties = new INavigableFieldType[] + { + new NavigablePropertyType("nodeName"), + new NavigablePropertyType("parentId"), + new NavigablePropertyType("createDate", v => XmlConvert.ToString((DateTime)v, "yyyy-MM-ddTHH:mm:ss")), + new NavigablePropertyType("updateDate", v => XmlConvert.ToString((DateTime)v, "yyyy-MM-ddTHH:mm:ss")), + new NavigablePropertyType("isDoc", v => XmlConvert.ToString((bool)v)), + new NavigablePropertyType("sortOrder"), + new NavigablePropertyType("level"), + new NavigablePropertyType("templateId"), + new NavigablePropertyType("writerId"), + new NavigablePropertyType("creatorId"), + new NavigablePropertyType("urlName"), + new NavigablePropertyType("isDraft", v => XmlConvert.ToString((bool)v)) + }; + } + + private NavigableContentType EnsureInitialized(PublishedContentType contentType) + { + lock (_locko) + { + if (Name == null) + { + Name = contentType.Alias; + FieldTypes = BuiltinProperties + .Union(contentType.PropertyTypes.Select(propertyType => new NavigablePropertyType(propertyType.PropertyTypeAlias))) + .ToArray(); + } + } + return this; + } + + public string Name { get; private set; } + public INavigableFieldType[] FieldTypes { get; private set; } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/Navigable/NavigablePropertyType.cs b/src/Umbraco.Web/PublishedCache/NuCache/Navigable/NavigablePropertyType.cs new file mode 100644 index 0000000000..803e7d86b4 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/Navigable/NavigablePropertyType.cs @@ -0,0 +1,17 @@ +using System; +using Umbraco.Core.Xml.XPath; + +namespace Umbraco.Web.PublishedCache.NuCache.Navigable +{ + internal class NavigablePropertyType : INavigableFieldType + { + public NavigablePropertyType(string name, Func xmlStringConverter = null) + { + Name = name; + XmlStringConverter = xmlStringConverter; + } + + public string Name { get; private set; } + public Func XmlStringConverter { get; private set; } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/Navigable/RootContent.cs b/src/Umbraco.Web/PublishedCache/NuCache/Navigable/RootContent.cs new file mode 100644 index 0000000000..b95eb7dee3 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/Navigable/RootContent.cs @@ -0,0 +1,56 @@ +using System.Collections.Generic; +using System.Linq; +using Umbraco.Core.Xml.XPath; + +namespace Umbraco.Web.PublishedCache.NuCache.Navigable +{ + class RootContent : INavigableContent + { + private static readonly RootContentType ContentType = new RootContentType(); + private readonly int[] _childIds; + + public RootContent(IEnumerable childIds) + { + _childIds = childIds.ToArray(); + } + + public int Id + { + get { return -1; } + } + + public int ParentId + { + get { return -1; } + } + + public INavigableContentType Type + { + get { return ContentType; } + } + + public IList ChildIds + { + get { return _childIds; } + } + + public object Value(int index) + { + // only id has a value + return index == 0 ? "-1" : null; + } + + class RootContentType : INavigableContentType + { + public string Name + { + get { return "root"; } + } + + public INavigableFieldType[] FieldTypes + { + get { return NavigableContentType.BuiltinProperties; } + } + } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/Navigable/Source.cs b/src/Umbraco.Web/PublishedCache/NuCache/Navigable/Source.cs new file mode 100644 index 0000000000..1272f53580 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/Navigable/Source.cs @@ -0,0 +1,39 @@ +using System.Linq; +using Umbraco.Core.Xml.XPath; + +namespace Umbraco.Web.PublishedCache.NuCache.Navigable +{ + class Source : INavigableSource + { + private readonly INavigableData _data; + private readonly bool _preview; + private readonly RootContent _root; + + public Source(INavigableData data, bool preview) + { + _data = data; + _preview = preview; + + var contentAtRoot = data.GetAtRoot(preview); + _root = new RootContent(contentAtRoot.Select(x => x.Id)); + } + + public INavigableContent Get(int id) + { + // wrap in a navigable content + + var content = _data.GetById(_preview, id); + return content == null ? null : new NavigableContent(content); + } + + public int LastAttributeIndex + { + get { return NavigableContentType.BuiltinProperties.Length - 1; } + } + + public INavigableContent Root + { + get { return _root; } + } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/Property.cs b/src/Umbraco.Web/PublishedCache/NuCache/Property.cs new file mode 100644 index 0000000000..e4668aa731 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/Property.cs @@ -0,0 +1,183 @@ +using System; +using System.Xml.Serialization; +using Umbraco.Core.Cache; +using Umbraco.Core.Models; +using Umbraco.Core.Models.PublishedContent; +using Umbraco.Core.PropertyEditors; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + [Serializable] + [XmlType(Namespace = "http://umbraco.org/webservices/")] + class Property : PublishedPropertyBase + { + private readonly object _dataValue; + private readonly Guid _contentUid; + private readonly bool _isPreviewing; + private readonly bool _isMember; + + readonly object _locko = new object(); + + private ValueSet _valueSet; + private string _valueSetCacheKey; + private string _recurseCacheKey; + + // initializes a published content property with no value + public Property(PublishedPropertyType propertyType, PublishedContent content) + : this(propertyType, content, null) + { } + + // initializes a published content property with a value + public Property(PublishedPropertyType propertyType, PublishedContent content, object valueSource) + : base(propertyType) + { + _dataValue = valueSource; + _contentUid = content.Key; + var inner = PublishedContent.UnwrapIPublishedContent(content); + _isPreviewing = inner.IsPreviewing; + _isMember = content.ContentType.ItemType == PublishedItemType.Member; + } + + // clone for previewing as draft a published content that is published and has no draft + public Property(Property origin) + : base(origin.PropertyType) + { + _dataValue = origin._dataValue; + _contentUid = origin._contentUid; + _isPreviewing = true; + _isMember = origin._isMember; + } + + // detached + //internal Property(PublishedPropertyType propertyType, Guid contentUid, object valueSource, bool isPreviewing, bool isMember) + // : base(propertyType) + //{ + // _dataValue = valueSource; + // _contentUid = contentUid; + // _isPreviewing = isPreviewing; + // _isMember = isMember; + //} + + public override bool HasValue + { + get { return _dataValue != null && ((_dataValue is string) == false || string.IsNullOrWhiteSpace((string)_dataValue) == false); } + } + + private class ValueSet + { + public bool SourceInitialized; + public object Source; + public bool ValueInitialized; + public object Value; + public bool XPathInitialized; + public object XPath; + } + + internal string RecurseCacheKey + { + get { return _recurseCacheKey ?? (_recurseCacheKey = CacheKeys.PropertyRecurse(_contentUid, PropertyTypeAlias, _isPreviewing)); } + } + + internal string ValueSetCacheKey + { + get { return _valueSetCacheKey ?? (_valueSetCacheKey = CacheKeys.PropertyValueSet(_contentUid, PropertyTypeAlias, _isPreviewing)); } + } + + private ValueSet GetValueSet(PropertyCacheLevel cacheLevel) + { + ValueSet valueSet; + Facade facade; + ICacheProvider cache; + switch (cacheLevel) + { + case PropertyCacheLevel.None: + // never cache anything + valueSet = new ValueSet(); + break; + case PropertyCacheLevel.Content: + // cache within the property object itself, ie within the content object + valueSet = _valueSet ?? (_valueSet = new ValueSet()); + break; + case PropertyCacheLevel.ContentCache: + // cache within the snapshot cache, unless previewing, then use the facade or + // snapshot cache (if we don't want to pollute the snapshot cache with short-lived + // data) depending on settings + // for members, always cache in the facade cache - never pollute snapshot cache + facade = Facade.Current; + cache = facade == null + ? null + : ((_isPreviewing == false || FacadeService.FullCacheWhenPreviewing) && (_isMember == false) + ? facade.SnapshotCache + : facade.FacadeCache); + valueSet = GetValueSet(cache); + break; + case PropertyCacheLevel.Request: + // cache within the facade cache + facade = Facade.Current; + cache = facade == null ? null : facade.FacadeCache; + valueSet = GetValueSet(cache); + break; + default: + throw new InvalidOperationException("Invalid cache level."); + } + return valueSet; + } + + private ValueSet GetValueSet(ICacheProvider cache) + { + if (cache == null) // no cache, don't cache + return new ValueSet(); + return (ValueSet) cache.GetCacheItem(ValueSetCacheKey, () => new ValueSet()); + } + + private object GetSourceValue() + { + var valueSet = GetValueSet(PropertyType.SourceCacheLevel); + if (valueSet.SourceInitialized == false) + { + valueSet.Source = PropertyType.ConvertDataToSource(_dataValue, _isPreviewing); + valueSet.SourceInitialized = true; + } + return valueSet.Source; + } + + public override object DataValue + { + get { return _dataValue; } + } + + public override object Value + { + get + { + lock (_locko) + { + var valueSet = GetValueSet(PropertyType.ObjectCacheLevel); + if (valueSet.ValueInitialized == false) + { + valueSet.Value = PropertyType.ConvertSourceToObject(GetSourceValue(), _isPreviewing); + valueSet.ValueInitialized = true; + } + return valueSet.Value; + } + } + } + + public override object XPathValue + { + get + { + lock (_locko) + { + var valueSet = GetValueSet(PropertyType.XPathCacheLevel); + if (valueSet.XPathInitialized == false) + { + valueSet.XPath = PropertyType.ConvertSourceToXPath(GetSourceValue(), _isPreviewing); + valueSet.XPathInitialized = true; + } + return valueSet.XPath; + } + } + } + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/PublishedContent.cs b/src/Umbraco.Web/PublishedCache/NuCache/PublishedContent.cs new file mode 100644 index 0000000000..2c37b675b3 --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/PublishedContent.cs @@ -0,0 +1,355 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Umbraco.Core; +using Umbraco.Core.Cache; +using Umbraco.Core.Models; +using Umbraco.Core.Models.PublishedContent; +using Umbraco.Web.Models; +using Umbraco.Web.PublishedCache.NuCache.DataSource; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + internal class PublishedContent : PublishedContentWithKeyBase + { + private readonly ContentNode _contentNode; + // ReSharper disable once InconsistentNaming + internal readonly ContentData _contentData; // internal for ContentNode cloning + + private readonly IPublishedProperty[] _properties; + private readonly string _urlName; + private readonly bool _isPreviewing; + + #region Constructors + + public PublishedContent(ContentNode contentNode, ContentData contentData) + { + _contentNode = contentNode; + _contentData = contentData; + + _urlName = _contentData.Name.ToUrlSegment(); + _isPreviewing = _contentData.Published == false; + _properties = CreateProperties(this, contentData.Properties); + } + + private static string GetProfileNameById(int id) + { + var facade = Facade.Current; + var cache = facade == null ? null : facade.FacadeCache; + return cache == null + ? GetProfileNameByIdNoCache(id) + : (string) cache.GetCacheItem(CacheKeys.ProfileName(id), () => GetProfileNameByIdNoCache(id)); + } + + private static string GetProfileNameByIdNoCache(int id) + { +#if DEBUG + var context = ApplicationContext.Current; + var servicesContext = context == null ? null : context.Services; + var userService = servicesContext == null ? null : servicesContext.UserService; + if (userService == null) return "[null]"; // for tests +#else + // we don't want each published content to hold a reference to the service + // so where should they get the service from really? from the source... + var userService = ApplicationContext.Current.Services.UserService; +#endif + var user = userService.GetProfileById(id); + return user == null ? null : user.Name; + } + + private static IPublishedProperty[] CreateProperties(PublishedContent content, IDictionary values) + { + return content._contentNode.ContentType + .PropertyTypes + .Select(propertyType => + { + object value; + return values.TryGetValue(propertyType.PropertyTypeAlias, out value) + ? (IPublishedProperty) new Property(propertyType, content, value) + : (IPublishedProperty) new Property(propertyType, content); + }) + .ToArray(); + } + + // (see ContentNode.CloneParent) + public PublishedContent(ContentNode contentNode, PublishedContent origin) + { + _contentNode = contentNode; + _contentData = origin._contentData; + + _urlName = origin._urlName; + _isPreviewing = origin._isPreviewing; + + // here is the main benefit: we do not re-create properties so if anything + // is cached locally, we share the cache - which is fine - if anything depends + // on the tree structure, it should not be cached locally to begin with + _properties = origin._properties; + } + + // clone for previewing as draft a published content that is published and has no draft + private PublishedContent(PublishedContent origin) + { + _contentNode = origin._contentNode; + _contentData = origin._contentData; + + _urlName = origin._urlName; + _isPreviewing = true; + + // clone properties so _isPreviewing is true + _properties = origin._properties.Select(x => (IPublishedProperty) new Property((Property) x)).ToArray(); + } + + #endregion + + #region Get Content/Media for Parent/Children + + // this is for tests purposes + // args are: current facade (may be null), previewing, content id - returns: content + private static Func _getContentByIdFunc = + (facade, previewing, id) => facade.ContentCache.GetById(previewing, id); + private static Func _getMediaByIdFunc = + (facade, previewing, id) => facade.MediaCache.GetById(previewing, id); + + internal static Func GetContentByIdFunc + { + get { return _getContentByIdFunc; } + set + { + _getContentByIdFunc = value; + } + } + + internal static Func GetMediaByIdFunc + { + get { return _getMediaByIdFunc; } + set + { + _getMediaByIdFunc = value; + } + } + + private static IPublishedContent GetContentById(bool previewing, int id) + { + return _getContentByIdFunc(Facade.Current, previewing, id); + } + + private static IEnumerable GetContentByIds(bool previewing, IEnumerable ids) + { + var facade = Facade.Current; + + // beware! the loop below CANNOT be converted to query such as: + //return ids.Select(x => _getContentByIdFunc(facade, previewing, x)).Where(x => x != null); + // because it would capture the facade and cause all sorts of issues + // + // we WANT to get the actual current facade each time we run + + // ReSharper disable once LoopCanBeConvertedToQuery + foreach (var id in ids) + { + var content = _getContentByIdFunc(facade, previewing, id); + if (content != null) yield return content; + } + } + + private static IPublishedContent GetMediaById(bool previewing, int id) + { + return _getMediaByIdFunc(Facade.Current, previewing, id); + } + + private static IEnumerable GetMediaByIds(bool previewing, IEnumerable ids) + { + var facade = Facade.Current; + + // see note above for content + + // ReSharper disable once LoopCanBeConvertedToQuery + foreach (var id in ids) + { + var content = _getMediaByIdFunc(facade, previewing, id); + if (content != null) yield return content; + } + } + + #endregion + + #region IPublishedContent + + public override int Id { get { return _contentNode.Id; } } + public override Guid Key { get { return _contentNode.Uid; } } + public override int DocumentTypeId { get { return _contentNode.ContentType.Id; } } + public override string DocumentTypeAlias { get { return _contentNode.ContentType.Alias; } } + public override PublishedItemType ItemType { get { return _contentNode.ContentType.ItemType; } } + + public override string Name { get { return _contentData.Name; } } + public override int Level { get { return _contentNode.Level; } } + public override string Path { get { return _contentNode.Path; } } + public override int SortOrder { get { return _contentNode.SortOrder; } } + public override Guid Version { get { return _contentData.Version; } } + public override int TemplateId { get { return _contentData.TemplateId; } } + + public override string UrlName { get { return _urlName; } } + + public override DateTime CreateDate { get { return _contentNode.CreateDate; } } + public override DateTime UpdateDate { get { return _contentData.VersionDate; } } + + public override int CreatorId { get { return _contentNode.CreatorId; } } + public override string CreatorName { get { return GetProfileNameById(_contentNode.CreatorId); } } + public override int WriterId { get { return _contentData.WriterId; } } + public override string WriterName { get { return GetProfileNameById(_contentData.WriterId); } } + + public override bool IsDraft { get { return _contentData.Published == false; } } + + // beware what you use that one for - you don't want to cache its result + private ICacheProvider GetAppropriateFacadeCache() + { + var facade = Facade.Current; + var cache = facade == null + ? null + : ((_isPreviewing == false || FacadeService.FullCacheWhenPreviewing) && (ItemType != PublishedItemType.Member) + ? facade.SnapshotCache + : facade.FacadeCache); + return cache; + } + + public override IPublishedContent Parent + { + get + { + // have to use the "current" cache because a PublishedContent can be shared + // amongst many snapshots and other content depend on the snapshots + switch (_contentNode.ContentType.ItemType) + { + case PublishedItemType.Content: + return GetContentById(_isPreviewing, _contentNode.ParentContentId); + case PublishedItemType.Media: + return GetMediaById(_isPreviewing, _contentNode.ParentContentId); + default: + throw new Exception("oops"); + } + } + } + + private string _childrenCacheKey; + + private string ChildrenCacheKey + { + get { return _childrenCacheKey ?? (_childrenCacheKey = CacheKeys.PublishedContentChildren(Key, _isPreviewing)); } + } + + public override IEnumerable Children + { + get + { + var cache = GetAppropriateFacadeCache(); + if (cache == null || FacadeService.CachePublishedContentChildren == false) + return GetChildren(); + + // note: ToArray is important here, we want to cache the result, not the function! + return (IEnumerable) cache.GetCacheItem(ChildrenCacheKey, () => GetChildren().ToArray()); + } + } + + private IEnumerable GetChildren() + { + IEnumerable c; + switch (_contentNode.ContentType.ItemType) + { + case PublishedItemType.Content: + c = GetContentByIds(_isPreviewing, _contentNode.ChildContentIds); + break; + case PublishedItemType.Media: + c = GetMediaByIds(_isPreviewing, _contentNode.ChildContentIds); + break; + default: + throw new Exception("oops"); + } + + return c.OrderBy(x => x.SortOrder); + + // notes: + // _contentNode.ChildContentIds is an unordered int[] + // need needs to fetch & sort - do it only once, lazyily, though + // Q: perfs-wise, is it better than having the store managed an ordered list + } + + public override ICollection Properties { get { return _properties; } } + + public override IPublishedProperty GetProperty(string alias) + { + var index = _contentNode.ContentType.GetPropertyIndex(alias); + var property = index < 0 ? null : _properties[index]; + return property; + } + + public override IPublishedProperty GetProperty(string alias, bool recurse) + { + var property = GetProperty(alias); + if (recurse == false) return property; + + var cache = GetAppropriateFacadeCache(); + if (cache == null) + return base.GetProperty(alias, true); + + var key = ((Property) property).RecurseCacheKey; + return (Property) cache.GetCacheItem(key, () => base.GetProperty(alias, true)); + } + + public override PublishedContentType ContentType + { + get { return _contentNode.ContentType; } + } + + #endregion + + #region Internal + + // used by navigable content + internal IPublishedProperty[] PropertiesArray { get { return _properties; } } + + // used by navigable content + internal int ParentId { get { return _contentNode.ParentContentId; } } + + // used by navigable content + // includes all children, published or unpublished + // NavigableNavigator takes care of selecting those it wants + internal IList ChildIds { get { return _contentNode.ChildContentIds; } } + + // used by Property + // gets a value indicating whether the content or media exists in + // a previewing context or not, ie whether its Parent, Children, and + // properties should refer to published, or draft content + internal bool IsPreviewing { get { return _isPreviewing; } } + + private string _asPreviewingCacheKey; + + private string AsPreviewingCacheKey + { + get { return _asPreviewingCacheKey ?? (_asPreviewingCacheKey = CacheKeys.PublishedContentAsPreviewing(Key)); } + } + + // used by ContentCache + internal IPublishedContent AsPreviewingModel() + { + if (_isPreviewing) + return this; + + var cache = GetAppropriateFacadeCache(); + if (cache == null) return new PublishedContent(this).CreateModel(); + return (IPublishedContent) cache.GetCacheItem(AsPreviewingCacheKey, () => new PublishedContent(this).CreateModel()); + } + + // used by Navigable.Source,... + internal static PublishedContent UnwrapIPublishedContent(IPublishedContent content) + { + PublishedContentWrapped wrapped; + while ((wrapped = content as PublishedContentWrapped) != null) + content = wrapped.Unwrap(); + var inner = content as PublishedContent; + if (inner == null) + throw new InvalidOperationException("Innermost content is not PublishedContent."); + return inner; + } + + #endregion + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/PublishedMember.cs b/src/Umbraco.Web/PublishedCache/NuCache/PublishedMember.cs new file mode 100644 index 0000000000..c1c635a8bf --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/PublishedMember.cs @@ -0,0 +1,151 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.ServiceModel.Security; +using Umbraco.Core; +using Umbraco.Core.Models; +using Umbraco.Core.Models.PublishedContent; +using Umbraco.Web.PublishedCache.NuCache.DataSource; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + // note + // the whole PublishedMember thing should be refactored because as soon as a member + // is wrapped on in a model, the inner IMember and all associated properties are lost + + class PublishedMember : PublishedContent //, IPublishedMember + { + private readonly IMember _member; + + private PublishedMember(IMember member, ContentNode contentNode, ContentData contentData) + : base(contentNode, contentData) + { + _member = member; + } + + public static IPublishedContent Create(IMember member, PublishedContentType contentType, bool previewing) + { + var d = new ContentData + { + Name = member.Name, + Published = previewing, + TemplateId = -1, + Version = member.Version, + VersionDate = member.UpdateDate, + WriterId = member.CreatorId, // what else? + Properties = GetPropertyValues(contentType, member) + }; + var n = new ContentNode(member.Id, member.Key, + contentType, + member.Level, member.Path, member.SortOrder, + member.ParentId, + member.CreateDate, member.CreatorId); + return new PublishedMember(member, n, d).CreateModel(); + } + + private static Dictionary GetPropertyValues(PublishedContentType contentType, IMember member) + { + // see node in FacadeService + // we do not (want to) support ConvertDbToXml/String + + //var propertyEditorResolver = PropertyEditorResolver.Current; + + var properties = member + .Properties + //.Select(property => + //{ + // var e = propertyEditorResolver.GetByAlias(property.PropertyType.PropertyEditorAlias); + // var v = e == null + // ? property.Value + // : e.ValueEditor.ConvertDbToString(property, property.PropertyType, ApplicationContext.Current.Services.DataTypeService); + // return new KeyValuePair(property.Alias, v); + //}) + //.ToDictionary(x => x.Key, x => x.Value); + .ToDictionary(x => x.Alias, x => x.Value, StringComparer.OrdinalIgnoreCase); + + AddIf(contentType, properties, "Email", member.Email); + AddIf(contentType, properties, "Username", member.Username); + //AddIf(contentType, properties, "PasswordQuestion", member.PasswordQuestion); + //AddIf(contentType, properties, "Comments", member.Comments); + //AddIf(contentType, properties, "IsApproved", member.IsApproved); + //AddIf(contentType, properties, "IsLockedOut", member.IsLockedOut); + //AddIf(contentType, properties, "LastLockoutDate", member.LastLockoutDate); + //AddIf(contentType, properties, "CreateDate", member.CreateDate); + //AddIf(contentType, properties, "LastLoginDate", member.LastLoginDate); + //AddIf(contentType, properties, "LastPasswordChangeDate", member.LastPasswordChangeDate); + + return properties; + } + + private static void AddIf(PublishedContentType contentType, IDictionary properties, string alias, object value) + { + var propertyType = contentType.GetPropertyType(alias); + if (propertyType == null || propertyType.IsUmbraco == false) return; + properties[alias] = value; + } + + #region IPublishedMember + + public IMember Member + { + get { return _member; } + } + + public string Email + { + get { return _member.Email; } + } + + public string UserName + { + get { return _member.Username; } + } + + public string PasswordQuestion + { + get { return _member.PasswordQuestion; } + } + + public string Comments + { + get { return _member.Comments; } + } + + public bool IsApproved + { + get { return _member.IsApproved; } + } + + public bool IsLockedOut + { + get { return _member.IsLockedOut; } + } + + public DateTime LastLockoutDate + { + get { return _member.LastLockoutDate; } + } + + public DateTime CreationDate + { + get { return _member.CreateDate; } + } + + public DateTime LastLoginDate + { + get { return _member.LastLoginDate; } + } + + public DateTime LastActivityDate + { + get { return _member.LastLoginDate; } + } + + public DateTime LastPasswordChangedDate + { + get { return _member.LastPasswordChangeDate; } + } + + #endregion + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/SnapDictionary.cs b/src/Umbraco.Web/PublishedCache/NuCache/SnapDictionary.cs new file mode 100644 index 0000000000..e99b73ce6d --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/SnapDictionary.cs @@ -0,0 +1,631 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; + +namespace Umbraco.Web.PublishedCache.NuCache +{ + internal class SnapDictionary + where TValue : class + { + // read + // http://www.codeproject.com/Articles/548406/Dictionary-plus-Locking-versus-ConcurrentDictionar + // http://arbel.net/2013/02/03/best-practices-for-using-concurrentdictionary/ + // http://blogs.msdn.com/b/pfxteam/archive/2011/04/02/10149222.aspx + + // Set, Clear and GetSnapshot have to be protected by a lock + // This class is optimized for many readers, few writers + // Readers are lock-free + + private readonly ConcurrentDictionary _items; + private readonly ConcurrentQueue _generationObjects; + private GenerationObject _generationObject; + private readonly object _wlocko = new object(); + private readonly object _rlocko = new object(); + private long _liveGen, _floorGen; + private bool _nextGen, _collectAuto; + private Task _collectTask; + private volatile int _wlocked; + + // fixme - collection trigger (ok for now) + // minGenDelta to be adjusted + // we may want to throttle collects even if delta is reached + // we may want to force collect if delta is not reached but very old + // we may want to adjust delta depending on the number of changes + private const long CollectMinGenDelta = 4; + + #region Ctor + + public SnapDictionary() + { + _items = new ConcurrentDictionary(); + _generationObjects = new ConcurrentQueue(); + _generationObject = null; // no initial gen exists + _liveGen = _floorGen = 0; + _nextGen = false; // first time, must create a snapshot + _collectAuto = true; // collect automatically by default + } + + #endregion + + #region Locking + + public void WriteLocked(Action action) + { + var wtaken = false; + var wcount = false; + try + { + Monitor.Enter(_wlocko, ref wtaken); + + var rtaken = false; + try + { + Monitor.Enter(_rlocko, ref rtaken); + + // assume everything in finally runs atomically + // http://stackoverflow.com/questions/18501678/can-this-unexpected-behavior-of-prepareconstrainedregions-and-thread-abort-be-ex + // http://joeduffyblog.com/2005/03/18/atomicity-and-asynchronous-exception-failures/ + // http://joeduffyblog.com/2007/02/07/introducing-the-new-readerwriterlockslim-in-orcas/ + // http://chabster.blogspot.fr/2013/12/readerwriterlockslim-fails-on-dual.html + //RuntimeHelpers.PrepareConstrainedRegions(); + try + { } + finally + { + _wlocked++; + wcount = true; + if (_nextGen == false) + { + // because we are changing things, a new generation + // is created, which will trigger a new snapshot + _nextGen = true; + _liveGen += 1; + } + } + } + finally + { + if (rtaken) Monitor.Exit(_rlocko); + } + + action(); + } + finally + { + if (wcount) _wlocked--; + if (wtaken) Monitor.Exit(_wlocko); + } + } + + public T WriteLocked(Func func) + { + var wtaken = false; + var wcount = false; + try + { + Monitor.Enter(_wlocko, ref wtaken); + + var rtaken = false; + try + { + Monitor.Enter(_rlocko, ref rtaken); + + try + { } + finally + { + _wlocked++; + wcount = true; + if (_nextGen == false) + { + // because we are changing things, a new generation + // is created, which will trigger a new snapshot + _nextGen = true; + _liveGen += 1; + } + } + } + finally + { + if (rtaken) Monitor.Exit(_rlocko); + } + + return func(); + } + finally + { + if (wcount) _wlocked--; + if (wtaken) Monitor.Exit(_wlocko); + } + } + + private T ReadLocked(Func func) + { + var rtaken = false; + try + { + Monitor.Enter(_rlocko, ref rtaken); + + // we have rlock, so it cannot ++ + // it could -- though, so... volatile + var wlocked = _wlocked > 0; + return func(wlocked); + } + finally + { + if (rtaken) Monitor.Exit(_rlocko); + } + } + + #endregion + + #region Set, Clear, Get, Has + + public int Count + { + get { return _items.Count; } + } + + private LinkedNode GetHead(TKey key) + { + LinkedNode link; + _items.TryGetValue(key, out link); // else null + return link; + } + + public void Set(TKey key, TValue value) + { + WriteLocked(() => + { + // this is safe only because we're write-locked + var link = GetHead(key); + if (link != null) + { + // already in the dict + if (link.Gen != _liveGen) + { + // for an older gen - if value is different then insert a new + // link for the new gen, with the new value + if (link.Value != value) + _items.TryUpdate(key, new LinkedNode(value, _liveGen, link), link); + } + else + { + // for the live gen - we can fix the live gen - and remove it + // if value is null and there's no next gen + if (value == null && link.Next == null) + _items.TryRemove(key, out link); + else + link.Value = value; + } + } + else + { + _items.TryAdd(key, new LinkedNode(value, _liveGen)); + } + }); + } + + public void Clear(TKey key) + { + Set(key, null); + } + + public void Clear() + { + WriteLocked(() => + { + // this is safe only because we're write-locked + foreach (var kvp in _items.Where(x => x.Value != null)) + { + if (kvp.Value.Gen < _liveGen) + { + var link = new LinkedNode(null, _liveGen, kvp.Value); + _items.TryUpdate(kvp.Key, link, kvp.Value); + } + else + { + kvp.Value.Value = null; + } + } + }); + } + + public TValue Get(TKey key, long gen) + { + // look ma, no lock! + var link = GetHead(key); + while (link != null) + { + if (link.Gen <= gen) + return link.Value; // may be null + link = link.Next; + } + return null; + } + + public IEnumerable GetAll(long gen) + { + // enumerating on .Values locks the concurrent dictionary, + // so better get a shallow clone in an array and release + var links = _items.Values.ToArray(); + return links.Select(link => + { + while (link != null) + { + if (link.Gen <= gen) + return link.Value; // may be null + link = link.Next; + } + return null; + }).Where(x => x != null); + } + + public bool IsEmpty(long gen) + { + var has = _items.Any(x => + { + var link = x.Value; + while (link != null) + { + if (link.Gen <= gen && link.Value != null) + return true; + link = link.Next; + } + return false; + }); + return has == false; + } + + #endregion + + #region Snapshots + + public Snapshot CreateSnapshot() + { + return ReadLocked(wlocked => + { + // if no next generation is required, and we already have one, + // use it and create a new snapshot + if (_nextGen == false && _generationObject != null) + return new Snapshot(this, _generationObject.GetReference()); + + // else we need to try to create a new gen ref + // whether we are wlocked or not, noone can rlock while we do, + // so _liveGen and _nextGen are safe + if (wlocked) + { + // write-locked, cannot use latest gen (at least 1) so use previous + var snapGen = _nextGen ? _liveGen - 1 : _liveGen; + + // create a new gen ref unless we already have it + if (_generationObject == null) + _generationObjects.Enqueue(_generationObject = new GenerationObject(snapGen)); + else if (_generationObject.Gen != snapGen) + throw new Exception("panic"); + } + else + { + // not write-locked, can use latest gen, create a new gen ref + _generationObjects.Enqueue(_generationObject = new GenerationObject(_liveGen)); + _nextGen = false; // this is the ONLY thing that triggers a _liveGen++ + } + + // so... + // the genRefRef has a weak ref to the genRef, and is queued + // the snapshot has a ref to the genRef, which has a ref to the genRefRef + // when the snapshot is disposed, it decreases genRefRef counter + // so after a while, one of these conditions is going to be true: + // - the genRefRef counter is zero because all snapshots have properly been disposed + // - the genRefRef weak ref is dead because all snapshots have been collected + // in both cases, we will dequeue and collect + + var snapshot = new Snapshot(this, _generationObject.GetReference()); + + // reading _floorGen is safe if _collectTask is null + if (_collectTask == null && _collectAuto && _liveGen - _floorGen > CollectMinGenDelta) + CollectAsyncLocked(); + + return snapshot; + }); + } + + public Task CollectAsync() + { + lock (_rlocko) + { + return CollectAsyncLocked(); + } + } + + private Task CollectAsyncLocked() + { + if (_collectTask != null) + return _collectTask; + + // ReSharper disable InconsistentlySynchronizedField + var task = _collectTask = Task.Run(() => Collect()); + _collectTask.ContinueWith(_ => + { + lock (_rlocko) + { + _collectTask = null; + } + }, TaskContinuationOptions.ExecuteSynchronously); + // ReSharper restore InconsistentlySynchronizedField + + return task; + } + + private void Collect() + { + // see notes in CreateSnapshot + GenerationObject generationObject; + while (_generationObjects.TryPeek(out generationObject) && (generationObject.Count == 0 || generationObject.WeakReference.IsAlive == false)) + { + _generationObjects.TryDequeue(out generationObject); // cannot fail since TryPeek has succeeded + _floorGen = generationObject.Gen; + } + + Collect(_items); + } + + private void Collect(ConcurrentDictionary dict) + { + // it is OK to enumerate a concurrent dictionary and it does not lock + // it - and here it's not an issue if we skip some items, they will be + // processed next time we collect + + long liveGen; + lock (_rlocko) // r is good + { + liveGen = _liveGen; + if (_nextGen == false) + liveGen += 1; + } + + //Console.WriteLine("Collect live=" + liveGen + " floor=" + _floorGen); + + foreach (var kvp in dict) + { + var link = kvp.Value; + + //Console.WriteLine("Collect id=" + kvp.Key + " gen=" + link.Gen + // + " nxt=" + (link.Next == null ? null : "next") + // + " val=" + link.Value); + + // reasons to collect the head: + // gen must be < liveGen (we never collect live gen) + // next == null && value == null (we have no data at all) + // next != null && value == null BUT gen > floor (noone wants us) + // not live means .Next and .Value are safe + if (link.Gen < liveGen && link.Value == null + && (link.Next == null || link.Gen <= _floorGen)) + { + // not live, null value, no next link = remove that one -- but only if + // the dict has not been updated, have to do it via ICollection<> (thanks + // Mr Toub) -- and if the dict has been updated there is nothing to collect + var idict = dict as ICollection>; + /*var removed =*/ idict.Remove(kvp); + //Console.WriteLine("remove (" + (removed ? "true" : "false") + ")"); + continue; + } + + // in any other case we're not collecting the head, we need to go to Next + // and if there is no Next, skip + if (link.Next == null) + continue; + + // else go to Next and loop while above floor, and kill everything below + while (link.Next != null && link.Next.Gen > _floorGen) + link = link.Next; + link.Next = null; + } + } + + public /*async*/ Task PendingCollect() + { + Task task; + lock (_rlocko) + { + task = _collectTask; + } + return task ?? Task.FromResult(0); + //if (task != null) + // await task; + } + + public long GenCount + { + get { return _generationObjects.Count; } + } + + public long SnapCount + { + get + { + return _generationObjects.Sum(x => x.Count); + } + } + + #endregion + + #region Unit testing + + private TestHelper _unitTesting; + + // note: nothing here is thread-safe + internal class TestHelper + { + private readonly SnapDictionary _dict; + + public TestHelper(SnapDictionary dict) + { + _dict = dict; + } + + public long LiveGen { get { return _dict._liveGen; } } + public long FloorGen { get { return _dict._floorGen; } } + public bool NextGen { get { return _dict._nextGen; } } + public bool CollectAuto { get { return _dict._collectAuto; } set { _dict._collectAuto = value; } } + + public ConcurrentQueue GenerationObjects { get { return _dict._generationObjects; } } + + public GenVal[] GetValues(TKey key) + { + LinkedNode link; + _dict._items.TryGetValue(key, out link); // else null + + if (link == null) + return new GenVal[0]; + + var genVals = new List(); + do + { + genVals.Add(new GenVal(link.Gen, link.Value)); + link = link.Next; + } while (link != null); + return genVals.ToArray(); + } + + public class GenVal + { + public GenVal(long gen, TValue value) + { + Gen = gen; + Value = value; + } + + public long Gen { get; private set; } + public TValue Value { get; private set; } + } + } + + internal TestHelper Test { get { return _unitTesting ?? (_unitTesting = new TestHelper(this)); } } + + #endregion + + #region Classes + + private class LinkedNode + { + public LinkedNode(TValue value, long gen, LinkedNode next = null) + { + Value = value; + Gen = gen; + Next = next; + } + + internal readonly long Gen; + + // reading & writing references is thread-safe on all .NET platforms + // mark as volatile to ensure we always read the correct value + internal volatile TValue Value; + internal volatile LinkedNode Next; + } + + public class Snapshot : IDisposable + { + private readonly SnapDictionary _store; + private readonly GenerationReference _generationReference; + private long _gen; // copied for perfs + + internal Snapshot(SnapDictionary store, GenerationReference generationReference) + { + _store = store; + _generationReference = generationReference; + _gen = generationReference.GenerationObject.Gen; + _generationReference.GenerationObject.Reference(); + } + + public TValue Get(TKey key) + { + if (_gen < 0) + throw new ObjectDisposedException("snapshot" /*+ " (" + _thisCount + ")"*/); + return _store.Get(key, _gen); + } + + public IEnumerable GetAll() + { + if (_gen < 0) + throw new ObjectDisposedException("snapshot" /*+ " (" + _thisCount + ")"*/); + return _store.GetAll(_gen); + } + + public bool IsEmpty + { + get + { + if (_gen < 0) + throw new ObjectDisposedException("snapshot" /*+ " (" + _thisCount + ")"*/); + return _store.IsEmpty(_gen); + } + } + + public long Gen + { + get + { + if (_gen < 0) + throw new ObjectDisposedException("snapshot" /*+ " (" + _thisCount + ")"*/); + return _gen; + } + } + + public void Dispose() + { + if (_gen < 0) return; + _gen = -1; + _generationReference.GenerationObject.Release(); + GC.SuppressFinalize(this); + } + } + + internal class GenerationObject + { + public GenerationObject(long gen) + { + Gen = gen; + WeakReference = new WeakReference(null); + } + + public GenerationReference GetReference() + { + // not thread-safe but always invoked from within a lock + var generationReference = (GenerationReference) WeakReference.Target; + if (generationReference == null) + WeakReference.Target = generationReference = new GenerationReference(this); + return generationReference; + } + + public readonly long Gen; + public readonly WeakReference WeakReference; + public int Count; + + public void Reference() + { + Interlocked.Increment(ref Count); + } + + public void Release() + { + Interlocked.Decrement(ref Count); + } + } + + internal class GenerationReference + { + public GenerationReference(GenerationObject generationObject) + { + GenerationObject = generationObject; + } + + public readonly GenerationObject GenerationObject; + } + + #endregion + } +} diff --git a/src/Umbraco.Web/PublishedCache/NuCache/notes.txt b/src/Umbraco.Web/PublishedCache/NuCache/notes.txt new file mode 100644 index 0000000000..ef145bf0fd --- /dev/null +++ b/src/Umbraco.Web/PublishedCache/NuCache/notes.txt @@ -0,0 +1,120 @@ +NuCache Documentation +====================== + +HOW IT WORKS +------------- + +NuCache uses a ContentStore to keep content - basically, a dictionary of int => content, +and some logic to maintain it up-to-date. In order to provide immutable content to +pages rendering, a ContentStore can create ContentViews. A ContentView basically is +another int => content dictionary, containing entries only for things that have changed +in the ContentStore - so the ContentStore changes, but it updates the views so that +they + +Views are chained, ie each new view is the parent of previously existing views. A view +knows its parent but not the other way round, so views just disappear when they are GC. + +When reading the cache, we read views up the chain until we find a value (which may be +null) for the given id, and finally we read the store itself. + + +The FacadeService manages a ContentStore for content, and another for media. +When a Facade is created, the FacadeService gets ContentView objects from the stores. +Views provide an immutable snapshot of the content and media. + +When the FacadeService is notified of changes, it notifies the stores. +Then it resync the current Facade, so that it requires new views, etc. + +Whenever a content, media or member is modified or removed, the cmsContentNu table +is updated with a json dictionary of alias => property value, so that a content, +media or member can be loaded with one database row - this is what is used to populate +the in-memory cache. + + +A ContentStore actually stores ContentNode instances, which contain what's common to +both the published and draft version of content + the actual published and/or draft +content. + + +LOCKS +------ + +Each ContentStore is protected by a reader/writer lock 'Locker' that is used both by +the store and its views to ensure that the store remains consistent. + +Each ContentStore has a _freezeLock object used to protect the 'Frozen' +state of the store. It's a disposable object that releases the lock when disposed, +so usage would be: using (store.Frozen) { ... }. + +The FacadeService has a _storesLock object used to guarantee atomic access to the +set of content, media stores. + + +CACHE +------ + +For each set of views, the FacadeService creates a SnapshotCache. So a SnapshotCache +is valid until anything changes in the content or media trees. In other words, things +that go in the SnapshotCache stay until a content or media is modified. + +For each Facade, the FacadeService creates a FacadeCache. So a FacadeCache is valid +for the duration of the Facade (usually, the request). In other words, things that go +in the FacadeCache stay (and are visible to) for the duration of the request only. + +The FacadeService defines a static constant FullCacheWhenPreviewing, that defines +how caches operate when previewing: +- when true, the caches in preview mode work normally. +- when false, everything that would go to the SnapshotCache goes to the FacadeCache. +At the moment it is true in the code, which means that eg converted values for +previewed content will go in the SnapshotCache. Makes for faster preview, but uses +more memory on the long term... would need some benchmarking to figure out what is +best. + +Members only live for the duration of the Facade. So, for members SnapshotCache is +never used, and everything goes to the FacadeCache. + +All cache keys are computed in the CacheKeys static class. + + +TESTS +----- + +For testing purposes the following mechanisms exist: + +The Facade type has a static Current property that is used to obtain the 'current' +facade in many places, going through the PublishedCachesServiceResolver to get the +current service, and asking the current service for the current facade, which by +default relies on UmbracoContext. For test purposes, it is possible to override the +entire mechanism by defining Facade.GetCurrentFacadeFunc which should return a facade. + +A PublishedContent keeps only id-references to its parent and children, and needs a +way to retrieve the actual objects from the cache - which depends on the current +facade. It is possible to override the entire mechanism by defining PublishedContent. +GetContentByIdFunc or .GetMediaByIdFunc. + +Setting these functions must be done before Resolution is frozen. + + +STATUS +------ + +"Detached" contents & properties, which need to be refactored anyway, are not supported +by NuCache - throwing NotImplemented in ContentCache. + +All the cached elements rely on guids for the cache key, and not ints, so it could be +possible to support detached contents & properties, even those that do not have an actual +int id, but again this should be refactored entirely anyway. + +Not doing any row-version checks (see XmlStore) when reloading from database, though it +is maintained in the database. Two FIXME in FacadeService. Should we do it? + +There is no on-disk cache at all so everything is reloaded from the cmsContentNu table +when the site restarts. This is pretty fast, but we should experiment with solutions to +store things locally (and deal with the sync issues, see XmlStore...). + +Doing our best with PublishedMember but the whole thing should be refactored, because +PublishedMember exposes properties that IPublishedContent does not, and that are going +to be lost soon as the member is wrapped (content set, model...) - so we probably need +some sort of IPublishedMember. + +/ \ No newline at end of file diff --git a/src/Umbraco.Web/PublishedCache/XmlPublishedCache/XmlStore.cs b/src/Umbraco.Web/PublishedCache/XmlPublishedCache/XmlStore.cs index 6fa32d6a14..7f0f96a990 100644 --- a/src/Umbraco.Web/PublishedCache/XmlPublishedCache/XmlStore.cs +++ b/src/Umbraco.Web/PublishedCache/XmlPublishedCache/XmlStore.cs @@ -17,7 +17,6 @@ using Umbraco.Core.Models.Rdbms; using Umbraco.Core.ObjectResolution; using Umbraco.Core.Persistence; using Umbraco.Core.Persistence.DatabaseModelDefinitions; -using Umbraco.Core.Persistence.Querying; using Umbraco.Core.Persistence.Repositories; using Umbraco.Core.Persistence.UnitOfWork; using Umbraco.Core.Services; diff --git a/src/Umbraco.Web/Umbraco.Web.csproj b/src/Umbraco.Web/Umbraco.Web.csproj index e27b8561d1..2b79b3d7d0 100644 --- a/src/Umbraco.Web/Umbraco.Web.csproj +++ b/src/Umbraco.Web/Umbraco.Web.csproj @@ -213,6 +213,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + @@ -236,6 +261,8 @@ + + @@ -1527,6 +1554,7 @@ + ASPXCodeBehind diff --git a/src/Umbraco.Web/WebBootManager.cs b/src/Umbraco.Web/WebBootManager.cs index a23a1c9732..0abe9672ee 100644 --- a/src/Umbraco.Web/WebBootManager.cs +++ b/src/Umbraco.Web/WebBootManager.cs @@ -335,11 +335,17 @@ namespace Umbraco.Web container.RegisterSingleton(); // register the facade service - container.RegisterSingleton(factory => new FacadeService( + //container.RegisterSingleton(factory => new FacadeService( + // factory.GetInstance(), + // factory.GetInstance(), + // factory.GetInstance().RequestCache, + // factory.GetAllInstances())); + container.RegisterSingleton(factory => new PublishedCache.NuCache.FacadeService( + new PublishedCache.NuCache.FacadeService.Options { FacadeCacheIsApplicationRequestCache = true }, + factory.GetInstance().MainDom, factory.GetInstance(), factory.GetInstance(), - factory.GetInstance().RequestCache, - factory.GetAllInstances())); + factory.GetInstance())); //no need to declare as per request, currently we manage it's lifetime as the singleton container.Register(factory => UmbracoContext.Current); diff --git a/src/Umbraco.Web/WebServices/FacadeStatusController.cs b/src/Umbraco.Web/WebServices/FacadeStatusController.cs new file mode 100644 index 0000000000..7e0bf36930 --- /dev/null +++ b/src/Umbraco.Web/WebServices/FacadeStatusController.cs @@ -0,0 +1,23 @@ +using System; +using System.Web.Http; +using Umbraco.Web.PublishedCache; +using Umbraco.Web.WebApi; + +namespace Umbraco.Web.WebServices +{ + public class FacadeStatusController : UmbracoAuthorizedApiController + { + [HttpGet] + public string GetFacadeStatusUrl() + { + var service = FacadeServiceResolver.Current.Service; + if (service is Umbraco.Web.PublishedCache.XmlPublishedCache.FacadeService) + return "views/dashboard/developer/xmldataintegrityreport.html"; + //if (service is PublishedCache.PublishedNoCache.FacadeService) + // return "views/dashboard/developer/nocache.html"; + if (service is PublishedCache.NuCache.FacadeService) + return "views/dashboard/developer/nucache.html"; + throw new NotSupportedException("Not supported: " + service.GetType().FullName); + } + } +} diff --git a/src/Umbraco.Web/WebServices/NuCacheStatusController.cs b/src/Umbraco.Web/WebServices/NuCacheStatusController.cs new file mode 100644 index 0000000000..4dcdddb1cb --- /dev/null +++ b/src/Umbraco.Web/WebServices/NuCacheStatusController.cs @@ -0,0 +1,56 @@ +using System; +using System.Web.Http; +using Umbraco.Web.Cache; +using Umbraco.Web.PublishedCache; +using Umbraco.Web.PublishedCache.NuCache; +using Umbraco.Web.WebApi; + +namespace Umbraco.Web.WebServices +{ + public class NuCacheStatusController : UmbracoAuthorizedApiController + { + private static FacadeService FacadeService + { + get + { + var svc = FacadeServiceResolver.Current.Service as FacadeService; + if (svc == null) + throw new NotSupportedException("Not running NuCache."); + return svc; + } + } + + [HttpPost] + public string RebuildDbCache() + { + // fixme - should wrap in a service scope once we have them + var service = FacadeService; + service.RebuildContentDbCache(); + service.RebuildMediaDbCache(); + service.RebuildMemberDbCache(); + return service.GetStatus(); + } + + [HttpGet] + public string GetStatus() + { + var service = FacadeService; + return service.GetStatus(); + } + + [HttpGet] + public string Collect() + { + var service = FacadeService; + GC.Collect(); + service.Collect(); + return service.GetStatus(); + } + + [HttpPost] + public void ReloadCache() + { + DistributedCache.Instance.RefreshAllFacade(); + } + } +} diff --git a/src/Umbraco.Web/project.json b/src/Umbraco.Web/project.json index 95949d48cf..88c0b263cc 100644 --- a/src/Umbraco.Web/project.json +++ b/src/Umbraco.Web/project.json @@ -35,7 +35,8 @@ "semver": "1.*", "SharpZipLib": "0.86.0", "UrlRewritingNet.UrlRewriter": "2.0.7", - "xmlrpcnet": "2.5.0" + "xmlrpcnet": "2.5.0", + "CSharpTest.Net.Collections": "14.906.1403.1082" }, "frameworks": { "net461": {}