diff --git a/src/Umbraco.Tests/Umbraco.Tests.csproj b/src/Umbraco.Tests/Umbraco.Tests.csproj
index d90c0e6345..dbb877ee64 100644
--- a/src/Umbraco.Tests/Umbraco.Tests.csproj
+++ b/src/Umbraco.Tests/Umbraco.Tests.csproj
@@ -163,6 +163,7 @@
+
diff --git a/src/Umbraco.Tests/UmbracoExamine/IndexInitializer.cs b/src/Umbraco.Tests/UmbracoExamine/IndexInitializer.cs
index 00e94ced63..f57b1af213 100644
--- a/src/Umbraco.Tests/UmbracoExamine/IndexInitializer.cs
+++ b/src/Umbraco.Tests/UmbracoExamine/IndexInitializer.cs
@@ -41,7 +41,8 @@ namespace Umbraco.Tests.UmbracoExamine
IDataTypeService dataTypeService = null,
IMemberService memberService = null,
IUserService userService = null,
- IContentTypeService contentTypeService = null)
+ IContentTypeService contentTypeService = null,
+ bool supportUnpublishedContent = false)
{
if (dataService == null)
{
@@ -185,7 +186,10 @@ namespace Umbraco.Tests.UmbracoExamine
userService,
contentTypeService,
analyzer,
- false);
+ false)
+ {
+ SupportUnpublishedContent = supportUnpublishedContent
+ };
//i.IndexSecondsInterval = 1;
diff --git a/src/Umbraco.Tests/UmbracoExamine/IndexTest.cs b/src/Umbraco.Tests/UmbracoExamine/IndexTest.cs
index 5d6812f94a..5f3f5525c1 100644
--- a/src/Umbraco.Tests/UmbracoExamine/IndexTest.cs
+++ b/src/Umbraco.Tests/UmbracoExamine/IndexTest.cs
@@ -22,211 +22,222 @@ namespace Umbraco.Tests.UmbracoExamine
[DatabaseTestBehavior(DatabaseBehavior.NewDbFileAndSchemaPerTest)]
[TestFixture, RequiresSTA]
public class IndexTest : ExamineBaseTest
- {
+ {
+ ///
+ /// Check that the node signalled as protected in the content service is not present in the index.
+ ///
+ [Test]
+ public void Index_Protected_Content_Not_Indexed()
+ {
- /////
- ///
- /// Check that the node signalled as protected in the content service is not present in the index.
- ///
- [Test]
- public void Index_Protected_Content_Not_Indexed()
- {
+ using (var luceneDir = new RAMDirectory())
+ using (var indexer = IndexInitializer.GetUmbracoIndexer(luceneDir))
+ using (var searcher = IndexInitializer.GetUmbracoSearcher(luceneDir))
+ {
+ indexer.RebuildIndex();
- var protectedQuery = new BooleanQuery();
- protectedQuery.Add(
- new BooleanClause(
- new TermQuery(new Term(LuceneIndexer.IndexTypeFieldName, IndexTypes.Content)),
- BooleanClause.Occur.MUST));
+ var protectedQuery = new BooleanQuery();
+ protectedQuery.Add(
+ new BooleanClause(
+ new TermQuery(new Term(LuceneIndexer.IndexTypeFieldName, IndexTypes.Content)),
+ BooleanClause.Occur.MUST));
- protectedQuery.Add(
- new BooleanClause(
- new TermQuery(new Term(LuceneIndexer.IndexNodeIdFieldName, TestContentService.ProtectedNode.ToString())),
- BooleanClause.Occur.MUST));
+ protectedQuery.Add(
+ new BooleanClause(
+ new TermQuery(new Term(LuceneIndexer.IndexNodeIdFieldName, TestContentService.ProtectedNode.ToString())),
+ BooleanClause.Occur.MUST));
- var collector = new AllHitsCollector(false, true);
- var s = _searcher.GetSearcher();
- s.Search(protectedQuery, collector);
+ var collector = new AllHitsCollector(false, true);
+ var s = searcher.GetSearcher();
+ s.Search(protectedQuery, collector);
- Assert.AreEqual(0, collector.Count, "Protected node should not be indexed");
+ Assert.AreEqual(0, collector.Count, "Protected node should not be indexed");
+ }
- }
+ }
- [Test]
- public void Index_Move_Media_From_Non_Indexable_To_Indexable_ParentID()
- {
- //change parent id to 1116
- var existingCriteria = _indexer.IndexerData;
- _indexer.IndexerData = new IndexCriteria(existingCriteria.StandardFields, existingCriteria.UserFields, existingCriteria.IncludeNodeTypes, existingCriteria.ExcludeNodeTypes,
- 1116);
-
- //rebuild so it excludes children unless they are under 1116
- _indexer.RebuildIndex();
+ [Test]
+ public void Index_Move_Media_From_Non_Indexable_To_Indexable_ParentID()
+ {
+ using (var luceneDir = new RAMDirectory())
+ using (var indexer = IndexInitializer.GetUmbracoIndexer(luceneDir))
+ using (var searcher = IndexInitializer.GetUmbracoSearcher(luceneDir))
+ {
+ indexer.RebuildIndex();
- //ensure that node 2112 doesn't exist
- var results = _searcher.Search(_searcher.CreateSearchCriteria().Id(2112).Compile());
- Assert.AreEqual(0, results.Count());
+ var mediaService = new TestMediaService();
- //get a node from the data repo (this one exists underneath 2222)
- var node = _mediaService.GetLatestMediaByXpath("//*[string-length(@id)>0 and number(@id)>0]")
- .Root
- .Elements()
- .Where(x => (int)x.Attribute("id") == 2112)
- .First();
+ //change parent id to 1116
+ var existingCriteria = indexer.IndexerData;
+ indexer.IndexerData = new IndexCriteria(existingCriteria.StandardFields, existingCriteria.UserFields, existingCriteria.IncludeNodeTypes, existingCriteria.ExcludeNodeTypes,
+ 1116);
- var currPath = (string)node.Attribute("path"); //should be : -1,1111,2222,2112
- Assert.AreEqual("-1,1111,2222,2112", currPath);
+ //rebuild so it excludes children unless they are under 1116
+ indexer.RebuildIndex();
- //now mimic moving 2112 to 1116
- //node.SetAttributeValue("path", currPath.Replace("2222", "1116"));
- node.SetAttributeValue("path", "-1,1116,2112");
- node.SetAttributeValue("parentID", "1116");
+ //ensure that node 2112 doesn't exist
+ var results = searcher.Search(searcher.CreateSearchCriteria().Id(2112).Compile());
+ Assert.AreEqual(0, results.Count());
- //now reindex the node, this should first delete it and then WILL add it because of the parent id constraint
- _indexer.ReIndexNode(node, IndexTypes.Media);
+ //get a node from the data repo (this one exists underneath 2222)
+ var node = mediaService.GetLatestMediaByXpath("//*[string-length(@id)>0 and number(@id)>0]")
+ .Root
+ .Elements()
+ .First(x => (int)x.Attribute("id") == 2112);
- //RESET the parent id
- existingCriteria = ((IndexCriteria)_indexer.IndexerData);
- _indexer.IndexerData = new IndexCriteria(existingCriteria.StandardFields, existingCriteria.UserFields, existingCriteria.IncludeNodeTypes, existingCriteria.ExcludeNodeTypes,
- null);
+ var currPath = (string)node.Attribute("path"); //should be : -1,1111,2222,2112
+ Assert.AreEqual("-1,1111,2222,2112", currPath);
- //now ensure it's deleted
- var newResults = _searcher.Search(_searcher.CreateSearchCriteria().Id(2112).Compile());
- Assert.AreEqual(1, newResults.Count());
- }
+ //now mimic moving 2112 to 1116
+ //node.SetAttributeValue("path", currPath.Replace("2222", "1116"));
+ node.SetAttributeValue("path", "-1,1116,2112");
+ node.SetAttributeValue("parentID", "1116");
- [Test]
+ //now reindex the node, this should first delete it and then WILL add it because of the parent id constraint
+ indexer.ReIndexNode(node, IndexTypes.Media);
+
+ //RESET the parent id
+ existingCriteria = ((IndexCriteria)indexer.IndexerData);
+ indexer.IndexerData = new IndexCriteria(existingCriteria.StandardFields, existingCriteria.UserFields, existingCriteria.IncludeNodeTypes, existingCriteria.ExcludeNodeTypes,
+ null);
+
+ //now ensure it's deleted
+ var newResults = searcher.Search(searcher.CreateSearchCriteria().Id(2112).Compile());
+ Assert.AreEqual(1, newResults.Count());
+ }
+
+
+ }
+
+ [Test]
[Ignore]
- public void Index_Move_Media_To_Non_Indexable_ParentID()
- {
- //get a node from the data repo (this one exists underneath 2222)
- var node = _mediaService.GetLatestMediaByXpath("//*[string-length(@id)>0 and number(@id)>0]")
- .Root
- .Elements()
- .Where(x => (int)x.Attribute("id") == 2112)
- .First();
+ public void Index_Move_Media_To_Non_Indexable_ParentID()
+ {
+ using (var luceneDir = new RAMDirectory())
+ using (var indexer = IndexInitializer.GetUmbracoIndexer(luceneDir))
+ using (var searcher = IndexInitializer.GetUmbracoSearcher(luceneDir))
+ {
+ indexer.RebuildIndex();
- var currPath = (string)node.Attribute("path"); //should be : -1,1111,2222,2112
- Assert.AreEqual("-1,1111,2222,2112", currPath);
+ var mediaService = new TestMediaService();
- //ensure it's indexed
- _indexer.ReIndexNode(node, IndexTypes.Media);
+ //get a node from the data repo (this one exists underneath 2222)
+ var node = mediaService.GetLatestMediaByXpath("//*[string-length(@id)>0 and number(@id)>0]")
+ .Root
+ .Elements()
+ .First(x => (int)x.Attribute("id") == 2112);
- //change the parent node id to be the one it used to exist under
- var existingCriteria = _indexer.IndexerData;
- _indexer.IndexerData = new IndexCriteria(existingCriteria.StandardFields, existingCriteria.UserFields, existingCriteria.IncludeNodeTypes, existingCriteria.ExcludeNodeTypes,
- 2222);
+ var currPath = (string)node.Attribute("path"); //should be : -1,1111,2222,2112
+ Assert.AreEqual("-1,1111,2222,2112", currPath);
- //now mimic moving the node underneath 1116 instead of 2222
- node.SetAttributeValue("path", currPath.Replace("2222", "1116"));
- node.SetAttributeValue("parentID", "1116");
+ //ensure it's indexed
+ indexer.ReIndexNode(node, IndexTypes.Media);
- //now reindex the node, this should first delete it and then NOT add it because of the parent id constraint
- _indexer.ReIndexNode(node, IndexTypes.Media);
+ //change the parent node id to be the one it used to exist under
+ var existingCriteria = indexer.IndexerData;
+ indexer.IndexerData = new IndexCriteria(existingCriteria.StandardFields, existingCriteria.UserFields, existingCriteria.IncludeNodeTypes, existingCriteria.ExcludeNodeTypes,
+ 2222);
- //RESET the parent id
- existingCriteria = ((IndexCriteria)_indexer.IndexerData);
- _indexer.IndexerData = new IndexCriteria(existingCriteria.StandardFields, existingCriteria.UserFields, existingCriteria.IncludeNodeTypes, existingCriteria.ExcludeNodeTypes,
- null);
+ //now mimic moving the node underneath 1116 instead of 2222
+ node.SetAttributeValue("path", currPath.Replace("2222", "1116"));
+ node.SetAttributeValue("parentID", "1116");
- //now ensure it's deleted
- var results = _searcher.Search(_searcher.CreateSearchCriteria().Id(2112).Compile());
- Assert.AreEqual(0, results.Count());
+ //now reindex the node, this should first delete it and then NOT add it because of the parent id constraint
+ indexer.ReIndexNode(node, IndexTypes.Media);
- }
+ //RESET the parent id
+ existingCriteria = ((IndexCriteria)indexer.IndexerData);
+ indexer.IndexerData = new IndexCriteria(existingCriteria.StandardFields, existingCriteria.UserFields, existingCriteria.IncludeNodeTypes, existingCriteria.ExcludeNodeTypes,
+ null);
+
+ //now ensure it's deleted
+ var results = searcher.Search(searcher.CreateSearchCriteria().Id(2112).Compile());
+ Assert.AreEqual(0, results.Count());
+ }
+ }
- ///
- /// This will ensure that all 'Content' (not media) is cleared from the index using the Lucene API directly.
- /// We then call the Examine method to re-index Content and do some comparisons to ensure that it worked correctly.
- ///
- [Test]
- public void Index_Reindex_Content()
- {
- var s = (IndexSearcher)_searcher.GetSearcher();
+ ///
+ /// This will ensure that all 'Content' (not media) is cleared from the index using the Lucene API directly.
+ /// We then call the Examine method to re-index Content and do some comparisons to ensure that it worked correctly.
+ ///
+ [Test]
+ public void Index_Reindex_Content()
+ {
+ using (var luceneDir = new RAMDirectory())
+ using (var indexer = IndexInitializer.GetUmbracoIndexer(luceneDir, supportUnpublishedContent:true))
+ using (var searcher = IndexInitializer.GetUmbracoSearcher(luceneDir))
+ {
+ indexer.RebuildIndex();
- //first delete all 'Content' (not media). This is done by directly manipulating the index with the Lucene API, not examine!
-
- var contentTerm = new Term(LuceneIndexer.IndexTypeFieldName, IndexTypes.Content);
- var writer = _indexer.GetIndexWriter();
- writer.DeleteDocuments(contentTerm);
- writer.Commit();
-
+ var s = (IndexSearcher)searcher.GetSearcher();
- //make sure the content is gone. This is done with lucene APIs, not examine!
- var collector = new AllHitsCollector(false, true);
- var query = new TermQuery(contentTerm);
- s = (IndexSearcher)_searcher.GetSearcher(); //make sure the searcher is up do date.
- s.Search(query, collector);
- Assert.AreEqual(0, collector.Count);
+ //first delete all 'Content' (not media). This is done by directly manipulating the index with the Lucene API, not examine!
- //call our indexing methods
- _indexer.IndexAll(IndexTypes.Content);
+ var contentTerm = new Term(LuceneIndexer.IndexTypeFieldName, IndexTypes.Content);
+ var writer = indexer.GetIndexWriter();
+ writer.DeleteDocuments(contentTerm);
+ writer.Commit();
+
+ //make sure the content is gone. This is done with lucene APIs, not examine!
+ var collector = new AllHitsCollector(false, true);
+ var query = new TermQuery(contentTerm);
+ s = (IndexSearcher)searcher.GetSearcher(); //make sure the searcher is up do date.
+ s.Search(query, collector);
+ Assert.AreEqual(0, collector.Count);
- collector = new AllHitsCollector(false, true);
- s = (IndexSearcher)_searcher.GetSearcher(); //make sure the searcher is up do date.
- s.Search(query, collector);
- //var ids = new List();
- //for (var i = 0; i < collector.Count;i++)
- //{
- // ids.Add(s.Doc(collector.GetDocId(i)).GetValues("__NodeId")[0]);
- //}
- Assert.AreEqual(20, collector.Count);
- }
+ //call our indexing methods
+ indexer.IndexAll(IndexTypes.Content);
- ///
- /// This will delete an item from the index and ensure that all children of the node are deleted too!
- ///
- [Test]
+ collector = new AllHitsCollector(false, true);
+ s = (IndexSearcher)searcher.GetSearcher(); //make sure the searcher is up do date.
+ s.Search(query, collector);
+ //var ids = new List();
+ //for (var i = 0; i < collector.Count;i++)
+ //{
+ // ids.Add(s.Doc(collector.GetDocId(i)).GetValues("__NodeId")[0]);
+ //}
+ Assert.AreEqual(21, collector.Count);
+ }
+ }
+
+ ///
+ /// This will delete an item from the index and ensure that all children of the node are deleted too!
+ ///
+ [Test]
[Ignore]
- public void Index_Delete_Index_Item_Ensure_Heirarchy_Removed()
- {
+ public void Index_Delete_Index_Item_Ensure_Heirarchy_Removed()
+ {
- //now delete a node that has children
+ using (var luceneDir = new RAMDirectory())
+ using (var indexer = IndexInitializer.GetUmbracoIndexer(luceneDir))
+ using (var searcher = IndexInitializer.GetUmbracoSearcher(luceneDir))
+ {
+ indexer.RebuildIndex();
- _indexer.DeleteFromIndex(1140.ToString());
- //this node had children: 1141 & 1142, let's ensure they are also removed
+ //now delete a node that has children
- var results = _searcher.Search(_searcher.CreateSearchCriteria().Id(1141).Compile());
- Assert.AreEqual(0, results.Count());
+ indexer.DeleteFromIndex(1140.ToString());
+ //this node had children: 1141 & 1142, let's ensure they are also removed
- results = _searcher.Search(_searcher.CreateSearchCriteria().Id(1142).Compile());
- Assert.AreEqual(0, results.Count());
+ var results = searcher.Search(searcher.CreateSearchCriteria().Id(1141).Compile());
+ Assert.AreEqual(0, results.Count());
- }
-
- #region Private methods and properties
-
- private readonly TestContentService _contentService = new TestContentService();
- private readonly TestMediaService _mediaService = new TestMediaService();
-
- private static UmbracoExamineSearcher _searcher;
- private static UmbracoContentIndexer _indexer;
-
- #endregion
-
- #region Initialize and Cleanup
-
- private Lucene.Net.Store.Directory _luceneDir;
-
- public override void TearDown()
- {
- base.TearDown();
- _luceneDir.Dispose();
+ results = searcher.Search(searcher.CreateSearchCriteria().Id(1142).Compile());
+ Assert.AreEqual(0, results.Count());
+ }
+ }
+
+ #region Initialize and Cleanup
+
+ public override void TearDown()
+ {
+ base.TearDown();
+
UmbracoExamineSearcher.DisableInitializationCheck = null;
BaseUmbracoIndexer.DisableInitializationCheck = null;
}
-
- public override void Initialize()
- {
- base.Initialize();
- _luceneDir = new RAMDirectory();
- _indexer = IndexInitializer.GetUmbracoIndexer(_luceneDir);
- _indexer.RebuildIndex();
- _searcher = IndexInitializer.GetUmbracoSearcher(_luceneDir);
- }
-
-
- #endregion
- }
+ #endregion
+ }
}
\ No newline at end of file
diff --git a/src/Umbraco.Tests/UmbracoExamine/SearchTests.cs b/src/Umbraco.Tests/UmbracoExamine/SearchTests.cs
index 9b8b3d50d7..7eb92ad49d 100644
--- a/src/Umbraco.Tests/UmbracoExamine/SearchTests.cs
+++ b/src/Umbraco.Tests/UmbracoExamine/SearchTests.cs
@@ -20,17 +20,14 @@ namespace Umbraco.Tests.UmbracoExamine
[Test]
public void Test_Sort_Order_Sorting()
{
- //var newIndexFolder = new DirectoryInfo(Path.Combine("App_Data\\SearchTests", Guid.NewGuid().ToString()));
- //System.IO.Directory.CreateDirectory(newIndexFolder.FullName);
-
using (var luceneDir = new RAMDirectory())
- //using (var luceneDir = new SimpleFSDirectory(newIndexFolder))
{
var indexer = IndexInitializer.GetUmbracoIndexer(luceneDir, null,
new TestDataService()
{
ContentService = new TestContentService(TestFiles.umbraco_sort)
- });
+ },
+ supportUnpublishedContent:true);
indexer.RebuildIndex();
var searcher = IndexInitializer.GetUmbracoSearcher(luceneDir);
@@ -69,25 +66,7 @@ namespace Umbraco.Tests.UmbracoExamine
currentSort = sort;
}
return true;
- }
-
- //[Test]
- //public void Test_Index_Type_With_German_Analyzer()
- //{
- // using (var luceneDir = new RAMDirectory())
- // {
- // var indexer = IndexInitializer.GetUmbracoIndexer(luceneDir,
- // new GermanAnalyzer());
- // indexer.RebuildIndex();
- // var searcher = IndexInitializer.GetUmbracoSearcher(luceneDir);
- // }
- //}
-
- //private readonly TestContentService _contentService = new TestContentService();
- //private readonly TestMediaService _mediaService = new TestMediaService();
- //private static UmbracoExamineSearcher _searcher;
- //private static UmbracoContentIndexer _indexer;
- //private Lucene.Net.Store.Directory _luceneDir;
+ }
}
}
diff --git a/src/Umbraco.Tests/UmbracoExamine/UmbracoContentIndexerTests.cs b/src/Umbraco.Tests/UmbracoExamine/UmbracoContentIndexerTests.cs
new file mode 100644
index 0000000000..3278130022
--- /dev/null
+++ b/src/Umbraco.Tests/UmbracoExamine/UmbracoContentIndexerTests.cs
@@ -0,0 +1,107 @@
+using System.Collections.Generic;
+using System.Linq;
+using System.Xml.Linq;
+using Moq;
+using NUnit.Framework;
+using Umbraco.Core;
+using Umbraco.Core.Models;
+using UmbracoExamine;
+
+namespace Umbraco.Tests.UmbracoExamine
+{
+ [TestFixture]
+ public class UmbracoContentIndexerTests : ExamineBaseTest
+ {
+ [Test]
+ public void Get_Serialized_Content_No_Published_Content()
+ {
+ var contentSet = new List
+ {
+ Mock.Of(c => c.Id == 1 && c.Path == "-1,1" && c.Published && c.Level == 1),
+ Mock.Of(c => c.Id == 2 && c.Path == "-1,2" && c.Published && c.Level == 1),
+ Mock.Of(c => c.Id == 3 && c.Path == "-1,3" && c.Published == false && c.Level == 1), // no
+ Mock.Of(c => c.Id == 4 && c.Path == "-1,4" && c.Published == false && c.Level == 1), // no
+
+ Mock.Of(c => c.Id == 5 && c.Path == "-1,1,5" && c.Published && c.Level == 2),
+ Mock.Of(c => c.Id == 6 && c.Path == "-1,2,6" && c.Published == false && c.Level == 2), // no
+ Mock.Of(c => c.Id == 7 && c.Path == "-1,3,7" && c.Published && c.Level == 2), // no
+ Mock.Of(c => c.Id == 8 && c.Path == "-1,4,8" && c.Published && c.Level == 2), // no
+ Mock.Of(c => c.Id == 9 && c.Path == "-1,4,9" && c.Published && c.Level == 2), // no
+
+ Mock.Of(c => c.Id == 10 && c.Path == "-1,1,5,10" && c.Published && c.Level == 3),
+ Mock.Of(c => c.Id == 15 && c.Path == "-1,1,5,15" && c.Published && c.Level == 3),
+ Mock.Of(c => c.Id == 11 && c.Path == "-1,2,6,11" && c.Published && c.Level == 3), // no
+ Mock.Of(c => c.Id == 16 && c.Path == "-1,2,6,16" && c.Published && c.Level == 3), // no
+ Mock.Of(c => c.Id == 12 && c.Path == "-1,3,7,12" && c.Published && c.Level == 3), // no
+ Mock.Of(c => c.Id == 17 && c.Path == "-1,3,7,17" && c.Published && c.Level == 3), // no
+ Mock.Of(c => c.Id == 13 && c.Path == "-1,4,8,13" && c.Published && c.Level == 3), // no
+ Mock.Of(c => c.Id == 18 && c.Path == "-1,4,8,18" && c.Published && c.Level == 3), // no
+ Mock.Of(c => c.Id == 14 && c.Path == "-1,4,9,14" && c.Published && c.Level == 3), // no
+ Mock.Of(c => c.Id == 19 && c.Path == "-1,4,9,19" && c.Published && c.Level == 3), // no
+ };
+
+ //ensure the rest of the required values are populted
+ foreach (var content in contentSet)
+ {
+ var mock = Mock.Get(content);
+ mock.Setup(x => x.ContentType).Returns(Mock.Of(type => type.Icon == "hello"));
+ }
+
+ contentSet.Sort((a, b) => Comparer.Default.Compare(a.Level, b.Level));
+
+ var published = new HashSet();
+
+ var result = UmbracoContentIndexer.GetSerializedContent(false, content => new XElement("test"), contentSet, published)
+ .WhereNotNull()
+ .ToArray();
+
+ Assert.AreEqual(5, result.Length);
+ }
+
+ [Test]
+ public void Get_Serialized_Content_With_Published_Content()
+ {
+ var contentSet = new List
+ {
+ Mock.Of(c => c.Id == 1 && c.Path == "-1,1" && c.Published && c.Level == 1),
+ Mock.Of(c => c.Id == 2 && c.Path == "-1,2" && c.Published && c.Level == 1),
+ Mock.Of(c => c.Id == 3 && c.Path == "-1,3" && c.Published == false && c.Level == 1),
+ Mock.Of(c => c.Id == 4 && c.Path == "-1,4" && c.Published == false && c.Level == 1),
+
+ Mock.Of(c => c.Id == 5 && c.Path == "-1,1,5" && c.Published && c.Level == 2),
+ Mock.Of(c => c.Id == 6 && c.Path == "-1,2,6" && c.Published == false && c.Level == 2),
+ Mock.Of(c => c.Id == 7 && c.Path == "-1,3,7" && c.Published && c.Level == 2),
+ Mock.Of(c => c.Id == 8 && c.Path == "-1,4,8" && c.Published && c.Level == 2),
+ Mock.Of(c => c.Id == 9 && c.Path == "-1,4,9" && c.Published && c.Level == 2),
+
+ Mock.Of(c => c.Id == 10 && c.Path == "-1,1,5,10" && c.Published && c.Level == 3),
+ Mock.Of(c => c.Id == 15 && c.Path == "-1,1,5,15" && c.Published && c.Level == 3),
+ Mock.Of(c => c.Id == 11 && c.Path == "-1,2,6,11" && c.Published && c.Level == 3),
+ Mock.Of(c => c.Id == 16 && c.Path == "-1,2,6,16" && c.Published && c.Level == 3),
+ Mock.Of(c => c.Id == 12 && c.Path == "-1,3,7,12" && c.Published && c.Level == 3),
+ Mock.Of(c => c.Id == 17 && c.Path == "-1,3,7,17" && c.Published && c.Level == 3),
+ Mock.Of(c => c.Id == 13 && c.Path == "-1,4,8,13" && c.Published && c.Level == 3),
+ Mock.Of(c => c.Id == 18 && c.Path == "-1,4,8,18" && c.Published && c.Level == 3),
+ Mock.Of(c => c.Id == 14 && c.Path == "-1,4,9,14" && c.Published && c.Level == 3),
+ Mock.Of(c => c.Id == 19 && c.Path == "-1,4,9,19" && c.Published && c.Level == 3),
+ };
+
+ //ensure the rest of the required values are populted
+ foreach (var content in contentSet)
+ {
+ var mock = Mock.Get(content);
+ mock.Setup(x => x.ContentType).Returns(Mock.Of(type => type.Icon == "hello"));
+ }
+
+ contentSet.Sort((a, b) => Comparer.Default.Compare(a.Level, b.Level));
+
+ var published = new HashSet();
+
+ var result = UmbracoContentIndexer.GetSerializedContent(true, content => new XElement("test"), contentSet, published)
+ .WhereNotNull()
+ .ToArray();
+
+ Assert.AreEqual(19, result.Length);
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/UmbracoExamine/BaseUmbracoIndexer.cs b/src/UmbracoExamine/BaseUmbracoIndexer.cs
index ebb718898b..9ff68b8685 100644
--- a/src/UmbracoExamine/BaseUmbracoIndexer.cs
+++ b/src/UmbracoExamine/BaseUmbracoIndexer.cs
@@ -95,7 +95,7 @@ namespace UmbracoExamine
/// Determines if the manager will call the indexing methods when content is saved or deleted as
/// opposed to cache being updated.
///
- public bool SupportUnpublishedContent { get; protected set; }
+ public bool SupportUnpublishedContent { get; protected internal set; }
///
/// The data service used for retreiving and submitting data to the cms
diff --git a/src/UmbracoExamine/UmbracoContentIndexer.cs b/src/UmbracoExamine/UmbracoContentIndexer.cs
index efc3e4a214..ff3e4617e0 100644
--- a/src/UmbracoExamine/UmbracoContentIndexer.cs
+++ b/src/UmbracoExamine/UmbracoContentIndexer.cs
@@ -359,12 +359,7 @@ namespace UmbracoExamine
}
#endregion
- #region Protected
-
- ///
- /// This is a static query, it's parameters don't change so store statically
- ///
- private IQuery _publishedQuery;
+ #region Protected
protected override void PerformIndexAll(string type)
{
@@ -395,11 +390,6 @@ namespace UmbracoExamine
}
else
{
- if (_publishedQuery == null)
- {
- _publishedQuery = Query.Builder.Where(x => x.Published == true);
- }
-
//get all paged records but order by level ascending, we need to do this because we need to track which nodes are not published so that we can determine
// which descendent nodes are implicitly not published
descendants = _contentService.GetPagedDescendants(contentParentId, pageIndex, pageSize, out total, "level", Direction.Ascending, true, (string)null);
@@ -415,7 +405,12 @@ namespace UmbracoExamine
{
content = descendants.ToArray();
}
- AddNodesToIndex(GetSerializedContent(content, notPublished).WhereNotNull(), type);
+
+ AddNodesToIndex(GetSerializedContent(
+ SupportUnpublishedContent,
+ c => _serializer.Serialize(_contentService, _dataTypeService, _userService, c),
+ content, notPublished).WhereNotNull(), type);
+
pageIndex++;
} while (content.Length == pageSize);
@@ -473,17 +468,22 @@ namespace UmbracoExamine
}
}
- private IEnumerable GetSerializedContent(IEnumerable content, ISet notPublished)
+ internal static IEnumerable GetSerializedContent(
+ bool supportUnpublishdContent,
+ Func serializer,
+ IEnumerable content,
+ ISet notPublished)
{
foreach (var c in content)
{
- if (SupportUnpublishedContent == false)
+ if (supportUnpublishdContent == false)
{
//if we don't support published content and this is not published then track it and return null
if (c.Published == false)
{
notPublished.Add(c.Path);
yield return null;
+ continue;
}
//if we don't support published content, check if this content item exists underneath any already tracked
@@ -491,14 +491,11 @@ namespace UmbracoExamine
if (notPublished.Any(path => c.Path.StartsWith(string.Format("{0},", path))))
{
yield return null;
+ continue;
}
- }
+ }
- var xml = _serializer.Serialize(
- _contentService,
- _dataTypeService,
- _userService,
- c);
+ var xml = serializer(c);
//add a custom 'icon' attribute
xml.Add(new XAttribute("icon", c.ContentType.Icon));