using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
using NPoco;
using Umbraco.Cms.Core;
using Umbraco.Cms.Core.Cache;
using Umbraco.Cms.Core.Events;
using Umbraco.Cms.Core.Models;
using Umbraco.Cms.Core.Models.Editors;
using Umbraco.Cms.Core.Persistence;
using Umbraco.Cms.Core.Persistence.Querying;
using Umbraco.Cms.Core.Persistence.Repositories;
using Umbraco.Cms.Core.PropertyEditors;
using Umbraco.Cms.Core.Serialization;
using Umbraco.Cms.Core.Services;
using Umbraco.Cms.Infrastructure.Persistence.Dtos;
using Umbraco.Cms.Infrastructure.Persistence.Factories;
using Umbraco.Cms.Infrastructure.Persistence.Querying;
using Umbraco.Core.Scoping;
using Umbraco.Extensions;
using static Umbraco.Cms.Core.Persistence.SqlExtensionsStatics;
namespace Umbraco.Cms.Infrastructure.Persistence.Repositories.Implement
{
internal sealed class ContentRepositoryBase
{
///
///
/// This is used for unit tests ONLY
///
public static bool ThrowOnWarning = false;
}
public abstract class ContentRepositoryBase : EntityRepositoryBase, IContentRepository
where TEntity : class, IContentBase
where TRepository : class, IRepository
{
private readonly Lazy _propertyEditors;
private readonly DataValueReferenceFactoryCollection _dataValueReferenceFactories;
///
///
///
///
///
///
///
///
/// Lazy property value collection - must be lazy because we have a circular dependency since some property editors require services, yet these services require property editors
///
protected ContentRepositoryBase(
IScopeAccessor scopeAccessor,
AppCaches cache,
ILogger> logger,
ILanguageRepository languageRepository,
IRelationRepository relationRepository,
IRelationTypeRepository relationTypeRepository,
Lazy propertyEditors,
DataValueReferenceFactoryCollection dataValueReferenceFactories,
IDataTypeService dataTypeService)
: base(scopeAccessor, cache, logger)
{
DataTypeService = dataTypeService;
LanguageRepository = languageRepository;
RelationRepository = relationRepository;
RelationTypeRepository = relationTypeRepository;
_propertyEditors = propertyEditors;
_dataValueReferenceFactories = dataValueReferenceFactories;
}
protected abstract TRepository This { get; }
protected ILanguageRepository LanguageRepository { get; }
protected IDataTypeService DataTypeService { get; }
protected IRelationRepository RelationRepository { get; }
protected IRelationTypeRepository RelationTypeRepository { get; }
protected PropertyEditorCollection PropertyEditors => _propertyEditors.Value;
#region Versions
// gets a specific version
public abstract TEntity GetVersion(int versionId);
// gets all versions, current first
public abstract IEnumerable GetAllVersions(int nodeId);
// gets all versions, current first
public virtual IEnumerable GetAllVersionsSlim(int nodeId, int skip, int take)
=> GetAllVersions(nodeId).Skip(skip).Take(take);
// gets all version ids, current first
public virtual IEnumerable GetVersionIds(int nodeId, int maxRows)
{
var template = SqlContext.Templates.Get(Cms.Core.Constants.SqlTemplates.VersionableRepository.GetVersionIds, tsql =>
tsql.Select(x => x.Id)
.From()
.Where(x => x.NodeId == SqlTemplate.Arg("nodeId"))
.OrderByDescending(x => x.Current) // current '1' comes before others '0'
.AndByDescending(x => x.VersionDate) // most recent first
);
return Database.Fetch(SqlSyntax.SelectTop(template.Sql(nodeId), maxRows));
}
// deletes a specific version
public virtual void DeleteVersion(int versionId)
{
// TODO: test object node type?
// get the version we want to delete
var template = SqlContext.Templates.Get(Cms.Core.Constants.SqlTemplates.VersionableRepository.GetVersion, tsql =>
tsql.Select().From().Where(x => x.Id == SqlTemplate.Arg("versionId"))
);
var versionDto = Database.Fetch(template.Sql(new { versionId })).FirstOrDefault();
// nothing to delete
if (versionDto == null)
return;
// don't delete the current version
if (versionDto.Current)
throw new InvalidOperationException("Cannot delete the current version.");
PerformDeleteVersion(versionDto.NodeId, versionId);
}
// deletes all versions of an entity, older than a date.
public virtual void DeleteVersions(int nodeId, DateTime versionDate)
{
// TODO: test object node type?
// get the versions we want to delete, excluding the current one
var template = SqlContext.Templates.Get(Cms.Core.Constants.SqlTemplates.VersionableRepository.GetVersions, tsql =>
tsql.Select().From().Where(x => x.NodeId == SqlTemplate.Arg("nodeId") && !x.Current && x.VersionDate < SqlTemplate.Arg("versionDate"))
);
var versionDtos = Database.Fetch(template.Sql(new { nodeId, versionDate }));
foreach (var versionDto in versionDtos)
PerformDeleteVersion(versionDto.NodeId, versionDto.Id);
}
// actually deletes a version
protected abstract void PerformDeleteVersion(int id, int versionId);
#endregion
#region Count
///
/// Count descendants of an item.
///
public int CountDescendants(int parentId, string contentTypeAlias = null)
{
var pathMatch = parentId == -1
? "-1,"
: "," + parentId + ",";
var sql = SqlContext.Sql()
.SelectCount()
.From();
if (contentTypeAlias.IsNullOrWhiteSpace())
{
sql
.Where(x => x.NodeObjectType == NodeObjectTypeId)
.Where(x => x.Path.Contains(pathMatch));
}
else
{
sql
.InnerJoin()
.On(left => left.NodeId, right => right.NodeId)
.InnerJoin()
.On(left => left.NodeId, right => right.ContentTypeId)
.Where(x => x.NodeObjectType == NodeObjectTypeId)
.Where(x => x.Path.Contains(pathMatch))
.Where(x => x.Alias == contentTypeAlias);
}
return Database.ExecuteScalar(sql);
}
///
/// Count children of an item.
///
public int CountChildren(int parentId, string contentTypeAlias = null)
{
var sql = SqlContext.Sql()
.SelectCount()
.From();
if (contentTypeAlias.IsNullOrWhiteSpace())
{
sql
.Where(x => x.NodeObjectType == NodeObjectTypeId)
.Where(x => x.ParentId == parentId);
}
else
{
sql
.InnerJoin()
.On(left => left.NodeId, right => right.NodeId)
.InnerJoin()
.On(left => left.NodeId, right => right.ContentTypeId)
.Where(x => x.NodeObjectType == NodeObjectTypeId)
.Where(x => x.ParentId == parentId)
.Where(x => x.Alias == contentTypeAlias);
}
return Database.ExecuteScalar(sql);
}
///
/// Count items.
///
public int Count(string contentTypeAlias = null)
{
var sql = SqlContext.Sql()
.SelectCount()
.From();
if (contentTypeAlias.IsNullOrWhiteSpace())
{
sql
.Where(x => x.NodeObjectType == NodeObjectTypeId);
}
else
{
sql
.InnerJoin()
.On(left => left.NodeId, right => right.NodeId)
.InnerJoin()
.On(left => left.NodeId, right => right.ContentTypeId)
.Where(x => x.NodeObjectType == NodeObjectTypeId)
.Where(x => x.Alias == contentTypeAlias);
}
return Database.ExecuteScalar(sql);
}
#endregion
#region Tags
///
/// Updates tags for an item.
///
protected void SetEntityTags(IContentBase entity, ITagRepository tagRepo, IJsonSerializer serializer)
{
foreach (var property in entity.Properties)
{
var tagConfiguration = property.GetTagConfiguration(PropertyEditors, DataTypeService);
if (tagConfiguration == null) continue; // not a tags property
if (property.PropertyType.VariesByCulture())
{
var tags = new List();
foreach (var pvalue in property.Values)
{
var tagsValue = property.GetTagsValue(PropertyEditors, DataTypeService, serializer, pvalue.Culture);
var languageId = LanguageRepository.GetIdByIsoCode(pvalue.Culture);
var cultureTags = tagsValue.Select(x => new Tag { Group = tagConfiguration.Group, Text = x, LanguageId = languageId });
tags.AddRange(cultureTags);
}
tagRepo.Assign(entity.Id, property.PropertyTypeId, tags);
}
else
{
var tagsValue = property.GetTagsValue(PropertyEditors, DataTypeService, serializer); // strings
var tags = tagsValue.Select(x => new Tag { Group = tagConfiguration.Group, Text = x });
tagRepo.Assign(entity.Id, property.PropertyTypeId, tags);
}
}
}
// TODO: should we do it when un-publishing? or?
///
/// Clears tags for an item.
///
protected void ClearEntityTags(IContentBase entity, ITagRepository tagRepo)
{
tagRepo.RemoveAll(entity.Id);
}
#endregion
private Sql PreparePageSql(Sql sql, Sql filterSql, Ordering ordering)
{
// non-filtering, non-ordering = nothing to do
if (filterSql == null && ordering.IsEmpty) return sql;
// preserve original
var psql = new Sql(sql.SqlContext, sql.SQL, sql.Arguments);
// apply filter
if (filterSql != null)
psql.Append(filterSql);
// non-sorting, we're done
if (ordering.IsEmpty)
return psql;
// else apply ordering
ApplyOrdering(ref psql, ordering);
// no matter what we always MUST order the result also by umbracoNode.id to ensure that all records being ordered by are unique.
// if we do not do this then we end up with issues where we are ordering by a field that has duplicate values (i.e. the 'text' column
// is empty for many nodes) - see: http://issues.umbraco.org/issue/U4-8831
var (dbfield, _) = SqlContext.VisitDto(x => x.NodeId);
if (ordering.IsCustomField || !ordering.OrderBy.InvariantEquals("id"))
{
psql.OrderBy(GetAliasedField(dbfield, sql));
}
// create prepared sql
// ensure it's single-line as NPoco PagingHelper has issues with multi-lines
psql = Sql(psql.SQL.ToSingleLine(), psql.Arguments);
// replace the magic culture parameter (see DocumentRepository.GetBaseQuery())
if (!ordering.Culture.IsNullOrWhiteSpace())
{
for (var i = 0; i < psql.Arguments.Length; i++)
{
if (psql.Arguments[i] is string s && s == "[[[ISOCODE]]]")
{
psql.Arguments[i] = ordering.Culture;
}
}
}
return psql;
}
private void ApplyOrdering(ref Sql sql, Ordering ordering)
{
if (sql == null) throw new ArgumentNullException(nameof(sql));
if (ordering == null) throw new ArgumentNullException(nameof(ordering));
var orderBy = ordering.IsCustomField
? ApplyCustomOrdering(ref sql, ordering)
: ApplySystemOrdering(ref sql, ordering);
// beware! NPoco paging code parses the query to isolate the ORDER BY fragment,
// using a regex that wants "([\w\.\[\]\(\)\s""`,]+)" - meaning that anything
// else in orderBy is going to break NPoco / not be detected
// beware! NPoco paging code (in PagingHelper) collapses everything [foo].[bar]
// to [bar] only, so we MUST use aliases, cannot use [table].[field]
// beware! pre-2012 SqlServer is using a convoluted syntax for paging, which
// includes "SELECT ROW_NUMBER() OVER (ORDER BY ...) poco_rn FROM SELECT (...",
// so anything added here MUST also be part of the inner SELECT statement, ie
// the original statement, AND must be using the proper alias, as the inner SELECT
// will hide the original table.field names entirely
if (ordering.Direction == Direction.Ascending)
sql.OrderBy(orderBy);
else
sql.OrderByDescending(orderBy);
}
protected virtual string ApplySystemOrdering(ref Sql sql, Ordering ordering)
{
// id is invariant
if (ordering.OrderBy.InvariantEquals("id"))
return GetAliasedField(SqlSyntax.GetFieldName(x => x.NodeId), sql);
// sort order is invariant
if (ordering.OrderBy.InvariantEquals("sortOrder"))
return GetAliasedField(SqlSyntax.GetFieldName(x => x.SortOrder), sql);
// path is invariant
if (ordering.OrderBy.InvariantEquals("path"))
return GetAliasedField(SqlSyntax.GetFieldName(x => x.Path), sql);
// note: 'owner' is the user who created the item as a whole,
// we don't have an 'owner' per culture (should we?)
if (ordering.OrderBy.InvariantEquals("owner"))
{
var joins = Sql()
.InnerJoin("ownerUser").On((node, user) => node.UserId == user.Id, aliasRight: "ownerUser");
// see notes in ApplyOrdering: the field MUST be selected + aliased
sql = Sql(InsertBefore(sql, "FROM", ", " + SqlSyntax.GetFieldName(x => x.UserName, "ownerUser") + " AS ordering "), sql.Arguments);
sql = InsertJoins(sql, joins);
return "ordering";
}
// note: each version culture variation has a date too,
// maybe we would want to use it instead?
if (ordering.OrderBy.InvariantEquals("versionDate") || ordering.OrderBy.InvariantEquals("updateDate"))
return GetAliasedField(SqlSyntax.GetFieldName(x => x.VersionDate), sql);
// create date is invariant (we don't keep each culture's creation date)
if (ordering.OrderBy.InvariantEquals("createDate"))
return GetAliasedField(SqlSyntax.GetFieldName(x => x.CreateDate), sql);
// name is variant
if (ordering.OrderBy.InvariantEquals("name"))
{
// no culture = can only work on the invariant name
// see notes in ApplyOrdering: the field MUST be aliased
if (ordering.Culture.IsNullOrWhiteSpace())
return GetAliasedField(SqlSyntax.GetFieldName(x => x.Text), sql);
// "variantName" alias is defined in DocumentRepository.GetBaseQuery
// TODO: what if it is NOT a document but a ... media or whatever?
// previously, we inserted the join+select *here* so we were sure to have it,
// but now that's not the case anymore!
return "variantName";
}
// content type alias is invariant
if (ordering.OrderBy.InvariantEquals("contentTypeAlias"))
{
var joins = Sql()
.InnerJoin("ctype").On((content, contentType) => content.ContentTypeId == contentType.NodeId, aliasRight: "ctype");
// see notes in ApplyOrdering: the field MUST be selected + aliased
sql = Sql(InsertBefore(sql, "FROM", ", " + SqlSyntax.GetFieldName(x => x.Alias, "ctype") + " AS ordering "), sql.Arguments);
sql = InsertJoins(sql, joins);
return "ordering";
}
// previously, we'd accept anything and just sanitize it - not anymore
throw new NotSupportedException($"Ordering by {ordering.OrderBy} not supported.");
}
private string ApplyCustomOrdering(ref Sql sql, Ordering ordering)
{
// sorting by a custom field, so set-up sub-query for ORDER BY clause to pull through value
// from 'current' content version for the given order by field
var sortedInt = string.Format(SqlContext.SqlSyntax.ConvertIntegerToOrderableString, "intValue");
var sortedDecimal = string.Format(SqlContext.SqlSyntax.ConvertDecimalToOrderableString, "decimalValue");
var sortedDate = string.Format(SqlContext.SqlSyntax.ConvertDateToOrderableString, "dateValue");
var sortedString = "COALESCE(varcharValue,'')"; // assuming COALESCE is ok for all syntaxes
// needs to be an outer join since there's no guarantee that any of the nodes have values for this property
var innerSql = Sql().Select($@"CASE
WHEN intValue IS NOT NULL THEN {sortedInt}
WHEN decimalValue IS NOT NULL THEN {sortedDecimal}
WHEN dateValue IS NOT NULL THEN {sortedDate}
ELSE {sortedString}
END AS customPropVal,
cver.nodeId AS customPropNodeId")
.From("cver")
.InnerJoin("opdata")
.On((version, pdata) => version.Id == pdata.VersionId, "cver", "opdata")
.InnerJoin("optype").On((pdata, ptype) => pdata.PropertyTypeId == ptype.Id, "opdata", "optype")
.LeftJoin().On((pdata, lang) => pdata.LanguageId == lang.Id, "opdata")
.Where(x => x.Current, "cver") // always query on current (edit) values
.Where(x => x.Alias == ordering.OrderBy, "optype")
.Where((opdata, lang) => opdata.LanguageId == null || lang.IsoCode == ordering.Culture, "opdata");
// merge arguments
var argsList = sql.Arguments.ToList();
var innerSqlString = ParameterHelper.ProcessParams(innerSql.SQL, innerSql.Arguments, argsList);
// create the outer join complete sql fragment
var outerJoinTempTable = $@"LEFT OUTER JOIN ({innerSqlString}) AS customPropData
ON customPropData.customPropNodeId = {Cms.Core.Constants.DatabaseSchema.Tables.Node}.id "; // trailing space is important!
// insert this just above the first WHERE
var newSql = InsertBefore(sql.SQL, "WHERE", outerJoinTempTable);
// see notes in ApplyOrdering: the field MUST be selected + aliased
newSql = InsertBefore(newSql, "FROM", ", customPropData.customPropVal AS ordering "); // trailing space is important!
// create the new sql
sql = Sql(newSql, argsList.ToArray());
// and order by the custom field
// this original code means that an ascending sort would first expose all NULL values, ie items without a value
return "ordering";
// note: adding an extra sorting criteria on
// "(CASE WHEN customPropData.customPropVal IS NULL THEN 1 ELSE 0 END")
// would ensure that items without a value always come last, both in ASC and DESC-ending sorts
}
public abstract IEnumerable GetPage(IQuery query,
long pageIndex, int pageSize, out long totalRecords,
IQuery filter,
Ordering ordering);
public ContentDataIntegrityReport CheckDataIntegrity(ContentDataIntegrityReportOptions options)
{
var report = new Dictionary();
var sql = SqlContext.Sql()
.Select()
.From()
.Where(x => x.NodeObjectType == NodeObjectTypeId)
.OrderBy(x => x.Level, x => x.ParentId, x => x.SortOrder);
var nodesToRebuild = new Dictionary>();
var validNodes = new Dictionary();
var rootIds = new[] {Cms.Core.Constants.System.Root, Cms.Core.Constants.System.RecycleBinContent, Cms.Core.Constants.System.RecycleBinMedia};
var currentParentIds = new HashSet(rootIds);
var prevParentIds = currentParentIds;
var lastLevel = -1;
// use a forward cursor (query)
foreach (var node in Database.Query(sql))
{
if (node.Level != lastLevel)
{
// changing levels
prevParentIds = currentParentIds;
currentParentIds = null;
lastLevel = node.Level;
}
if (currentParentIds == null)
{
// we're reset
currentParentIds = new HashSet();
}
currentParentIds.Add(node.NodeId);
// paths parts without the roots
var pathParts = node.Path.Split(',').Where(x => !rootIds.Contains(int.Parse(x))).ToArray();
if (!prevParentIds.Contains(node.ParentId))
{
// invalid, this will be because the level is wrong (which prob means path is wrong too)
report.Add(node.NodeId, new ContentDataIntegrityReportEntry(ContentDataIntegrityReport.IssueType.InvalidPathAndLevelByParentId));
AppendNodeToFix(nodesToRebuild, node);
}
else if (pathParts.Length == 0)
{
// invalid path
report.Add(node.NodeId, new ContentDataIntegrityReportEntry(ContentDataIntegrityReport.IssueType.InvalidPathEmpty));
AppendNodeToFix(nodesToRebuild, node);
}
else if (pathParts.Length != node.Level)
{
// invalid, either path or level is wrong
report.Add(node.NodeId, new ContentDataIntegrityReportEntry(ContentDataIntegrityReport.IssueType.InvalidPathLevelMismatch));
AppendNodeToFix(nodesToRebuild, node);
}
else if (pathParts[pathParts.Length - 1] != node.NodeId.ToString())
{
// invalid path
report.Add(node.NodeId, new ContentDataIntegrityReportEntry(ContentDataIntegrityReport.IssueType.InvalidPathById));
AppendNodeToFix(nodesToRebuild, node);
}
else if (!rootIds.Contains(node.ParentId) && pathParts[pathParts.Length - 2] != node.ParentId.ToString())
{
// invalid path
report.Add(node.NodeId, new ContentDataIntegrityReportEntry(ContentDataIntegrityReport.IssueType.InvalidPathByParentId));
AppendNodeToFix(nodesToRebuild, node);
}
else
{
// it's valid!
// don't track unless we are configured to fix
if (options.FixIssues)
validNodes.Add(node.NodeId, node);
}
}
var updated = new List();
if (options.FixIssues)
{
// iterate all valid nodes to see if these are parents for invalid nodes
foreach (var (nodeId, node) in validNodes)
{
if (!nodesToRebuild.TryGetValue(nodeId, out var invalidNodes)) continue;
// now we can try to rebuild the invalid paths.
foreach (var invalidNode in invalidNodes)
{
invalidNode.Level = (short)(node.Level + 1);
invalidNode.Path = node.Path + "," + invalidNode.NodeId;
updated.Add(invalidNode);
}
}
foreach (var node in updated)
{
Database.Update(node);
if (report.TryGetValue(node.NodeId, out var entry))
entry.Fixed = true;
}
}
return new ContentDataIntegrityReport(report);
}
private static void AppendNodeToFix(IDictionary> nodesToRebuild, NodeDto node)
{
if (nodesToRebuild.TryGetValue(node.ParentId, out var childIds))
childIds.Add(node);
else
nodesToRebuild[node.ParentId] = new List { node };
}
// here, filter can be null and ordering cannot
protected IEnumerable GetPage(IQuery query,
long pageIndex, int pageSize, out long totalRecords,
Func, IEnumerable> mapDtos,
Sql filter,
Ordering ordering)
{
if (ordering == null) throw new ArgumentNullException(nameof(ordering));
// start with base query, and apply the supplied IQuery
if (query == null) query = Query();
var sql = new SqlTranslator(GetBaseQuery(QueryType.Many), query).Translate();
// sort and filter
sql = PreparePageSql(sql, filter, ordering);
// get a page of DTOs and the total count
var pagedResult = Database.Page(pageIndex + 1, pageSize, sql);
totalRecords = Convert.ToInt32(pagedResult.TotalItems);
// map the DTOs and return
return mapDtos(pagedResult.Items);
}
protected IDictionary GetPropertyCollections(List> temps)
where T : class, IContentBase
{
var versions = new List();
foreach (var temp in temps)
{
versions.Add(temp.VersionId);
if (temp.PublishedVersionId > 0)
versions.Add(temp.PublishedVersionId);
}
if (versions.Count == 0) return new Dictionary();
// get all PropertyDataDto for all definitions / versions
var allPropertyDataDtos = Database.FetchByGroups(versions, 2000, batch =>
SqlContext.Sql()
.Select()
.From()
.WhereIn(x => x.VersionId, batch))
.ToList();
// get PropertyDataDto distinct PropertyTypeDto
var allPropertyTypeIds = allPropertyDataDtos.Select(x => x.PropertyTypeId).Distinct().ToList();
var allPropertyTypeDtos = Database.FetchByGroups(allPropertyTypeIds, 2000, batch =>
SqlContext.Sql()
.Select(r => r.Select(x => x.DataTypeDto))
.From()
.InnerJoin().On((left, right) => left.DataTypeId == right.NodeId)
.WhereIn(x => x.Id, batch));
// index the types for perfs, and assign to PropertyDataDto
var indexedPropertyTypeDtos = allPropertyTypeDtos.ToDictionary(x => x.Id, x => x);
foreach (var a in allPropertyDataDtos)
a.PropertyTypeDto = indexedPropertyTypeDtos[a.PropertyTypeId];
// now we have
// - the definitions
// - all property data dtos
// - tag editors (Actually ... no we don't since i removed that code, but we don't need them anyways it seems)
// and we need to build the proper property collections
return GetPropertyCollections(temps, allPropertyDataDtos);
}
private IDictionary GetPropertyCollections(List> temps, IEnumerable allPropertyDataDtos)
where T : class, IContentBase
{
var result = new Dictionary();
var compositionPropertiesIndex = new Dictionary();
// index PropertyDataDto per versionId for perfs
// merge edited and published dtos
var indexedPropertyDataDtos = new Dictionary>();
foreach (var dto in allPropertyDataDtos)
{
var versionId = dto.VersionId;
if (indexedPropertyDataDtos.TryGetValue(versionId, out var list) == false)
indexedPropertyDataDtos[versionId] = list = new List();
list.Add(dto);
}
foreach (var temp in temps)
{
// compositionProperties is the property types for the entire composition
// use an index for perfs
if (compositionPropertiesIndex.TryGetValue(temp.ContentType.Id, out var compositionProperties) == false)
compositionPropertiesIndex[temp.ContentType.Id] = compositionProperties = temp.ContentType.CompositionPropertyTypes.ToArray();
// map the list of PropertyDataDto to a list of Property
var propertyDataDtos = new List();
if (indexedPropertyDataDtos.TryGetValue(temp.VersionId, out var propertyDataDtos1))
{
propertyDataDtos.AddRange(propertyDataDtos1);
if (temp.VersionId == temp.PublishedVersionId) // dirty corner case
propertyDataDtos.AddRange(propertyDataDtos1.Select(x => x.Clone(-1)));
}
if (temp.VersionId != temp.PublishedVersionId && indexedPropertyDataDtos.TryGetValue(temp.PublishedVersionId, out var propertyDataDtos2))
propertyDataDtos.AddRange(propertyDataDtos2);
var properties = PropertyFactory.BuildEntities(compositionProperties, propertyDataDtos, temp.PublishedVersionId, LanguageRepository).ToList();
if (result.ContainsKey(temp.VersionId))
{
if (ContentRepositoryBase.ThrowOnWarning)
throw new InvalidOperationException($"The query returned multiple property sets for content {temp.Id}, {temp.ContentType.Name}");
Logger.LogWarning("The query returned multiple property sets for content {ContentId}, {ContentTypeName}", temp.Id, temp.ContentType.Name);
}
result[temp.VersionId] = new PropertyCollection(properties);
}
return result;
}
protected string InsertBefore(Sql s, string atToken, string insert)
=> InsertBefore(s.SQL, atToken, insert);
protected string InsertBefore(string s, string atToken, string insert)
{
var pos = s.InvariantIndexOf(atToken);
if (pos < 0) throw new Exception($"Could not find token \"{atToken}\".");
return s.Insert(pos, insert);
}
protected Sql InsertJoins(Sql sql, Sql joins)
{
var joinsSql = joins.SQL;
var args = sql.Arguments;
// merge args if any
if (joins.Arguments.Length > 0)
{
var argsList = args.ToList();
joinsSql = ParameterHelper.ProcessParams(joinsSql, joins.Arguments, argsList);
args = argsList.ToArray();
}
return Sql(InsertBefore(sql.SQL, "WHERE", joinsSql), args);
}
private string GetAliasedField(string field, Sql sql)
{
// get alias, if aliased
//
// regex looks for pattern "([\w+].[\w+]) AS ([\w+])" ie "(field) AS (alias)"
// and, if found & a group's field matches the field name, returns the alias
//
// so... if query contains "[umbracoNode].[nodeId] AS [umbracoNode__nodeId]"
// then GetAliased for "[umbracoNode].[nodeId]" returns "[umbracoNode__nodeId]"
var matches = SqlContext.SqlSyntax.AliasRegex.Matches(sql.SQL);
var match = matches.Cast().FirstOrDefault(m => m.Groups[1].Value.InvariantEquals(field));
return match == null ? field : match.Groups[2].Value;
}
protected string GetQuotedFieldName(string tableName, string fieldName)
{
return SqlContext.SqlSyntax.GetQuotedTableName(tableName) + "." + SqlContext.SqlSyntax.GetQuotedColumnName(fieldName);
}
#region UnitOfWork Events
/*
* TODO: The reason these events are in the repository is for legacy, the events should exist at the service
* level now since we can fire these events within the transaction...
* The reason these events 'need' to fire in the transaction is to ensure data consistency with Nucache (currently
* the only thing that uses them). For example, if the transaction succeeds and NuCache listened to ContentService.Saved
* and then NuCache failed at persisting data after the trans completed, then NuCache would be out of sync. This way
* the entire trans is rolled back if NuCache files. This is part of the discussion about removing the static events,
* possibly there's 3 levels of eventing, "ing", "scoped" (in trans) and "ed" (after trans).
* These particular events can be moved to the service level. However, see the notes below, it seems the only event we
* really need is the ScopedEntityRefresh. The only tricky part with moving that to the service level is that the
* handlers of that event will need to deal with the data a little differently because it seems that the
* "Published" flag on the content item matters and this event is raised before that flag is switched. Weird.
* We have the ability with IContent to see if something "WasPublished", etc.. so i think we could still use that.
*/
public class ScopedEntityEventArgs : EventArgs
{
public ScopedEntityEventArgs(IScope scope, TEntity entity)
{
Scope = scope;
Entity = entity;
}
public IScope Scope { get; }
public TEntity Entity { get; }
}
public class ScopedVersionEventArgs : EventArgs
{
///
/// Initializes a new instance of the class.
///
public ScopedVersionEventArgs(IScope scope, int entityId, int versionId)
{
Scope = scope;
EntityId = entityId;
VersionId = versionId;
}
///
/// Gets the current
///
public IScope Scope { get; }
///
/// Gets the entity id
///
public int EntityId { get; }
///
/// Gets the version id
///
public int VersionId { get; }
}
///
/// Occurs when an is created or updated from within the (transaction)
///
public static event TypedEventHandler ScopedEntityRefresh;
///
/// Occurs when an is being deleted from within the (transaction)
///
///
/// TODO: This doesn't seem to be necessary at all, the service "Deleting" events for this would work just fine
/// since they are raised before the item is actually deleted just like this event.
///
public static event TypedEventHandler ScopeEntityRemove;
///
/// Occurs when a version for an is being deleted from within the (transaction)
///
///
/// TODO: This doesn't seem to be necessary at all, the service "DeletingVersions" events for this would work just fine
/// since they are raised before the item is actually deleted just like this event.
///
public static event TypedEventHandler ScopeVersionRemove;
// used by tests to clear events
internal static void ClearScopeEvents()
{
ScopedEntityRefresh = null;
ScopeEntityRemove = null;
ScopeVersionRemove = null;
}
///
/// Raises the event
///
protected void OnUowRefreshedEntity(ScopedEntityEventArgs args)
=> ScopedEntityRefresh.RaiseEvent(args, This);
///
/// Raises the event
///
protected void OnUowRemovingEntity(ScopedEntityEventArgs args)
=> ScopeEntityRemove.RaiseEvent(args, This);
///
/// Raises the event
///
protected void OnUowRemovingVersion(ScopedVersionEventArgs args)
=> ScopeVersionRemove.RaiseEvent(args, This);
#endregion
#region Classes
protected class TempContent
{
public TempContent(int id, int versionId, int publishedVersionId, IContentTypeComposition contentType)
{
Id = id;
VersionId = versionId;
PublishedVersionId = publishedVersionId;
ContentType = contentType;
}
///
/// Gets or sets the identifier of the content.
///
public int Id { get; set; }
///
/// Gets or sets the version identifier of the content.
///
public int VersionId { get; set; }
///
/// Gets or sets the published version identifier of the content.
///
public int PublishedVersionId { get; set; }
///
/// Gets or sets the content type.
///
public IContentTypeComposition ContentType { get; set; }
///
/// Gets or sets the identifier of the template 1 of the content.
///
public int? Template1Id { get; set; }
///
/// Gets or sets the identifier of the template 2 of the content.
///
public int? Template2Id { get; set; }
}
protected class TempContent : TempContent
where T : class, IContentBase
{
public TempContent(int id, int versionId, int publishedVersionId, IContentTypeComposition contentType, T content = null)
: base(id, versionId, publishedVersionId, contentType)
{
Content = content;
}
///
/// Gets or sets the associated actual content.
///
public T Content { get; set; }
}
///
/// For Paging, repositories must support returning different query for the query type specified
///
///
///
protected abstract Sql GetBaseQuery(QueryType queryType);
#endregion
#region Utilities
protected virtual string EnsureUniqueNodeName(int parentId, string nodeName, int id = 0)
{
var template = SqlContext.Templates.Get(Cms.Core.Constants.SqlTemplates.VersionableRepository.EnsureUniqueNodeName, tsql => tsql
.Select(x => Alias(x.NodeId, "id"), x => Alias(x.Text, "name"))
.From()
.Where(x => x.NodeObjectType == SqlTemplate.Arg("nodeObjectType") && x.ParentId == SqlTemplate.Arg("parentId"))
);
var sql = template.Sql(NodeObjectTypeId, parentId);
var names = Database.Fetch(sql);
return SimilarNodeName.GetUniqueName(names, id, nodeName);
}
protected virtual int GetNewChildSortOrder(int parentId, int first)
{
var template = SqlContext.Templates.Get(Cms.Core.Constants.SqlTemplates.VersionableRepository.GetSortOrder, tsql => tsql
.Select("MAX(sortOrder)")
.From()
.Where(x => x.NodeObjectType == SqlTemplate.Arg("nodeObjectType") && x.ParentId == SqlTemplate.Arg("parentId"))
);
var sql = template.Sql(NodeObjectTypeId, parentId);
var sortOrder = Database.ExecuteScalar(sql);
return (sortOrder + 1) ?? first;
}
protected virtual NodeDto GetParentNodeDto(int parentId)
{
var template = SqlContext.Templates.Get(Cms.Core.Constants.SqlTemplates.VersionableRepository.GetParentNode, tsql => tsql
.Select()
.From()
.Where(x => x.NodeId == SqlTemplate.Arg("parentId"))
);
var sql = template.Sql(parentId);
var nodeDto = Database.First(sql);
return nodeDto;
}
protected virtual int GetReservedId(Guid uniqueId)
{
var template = SqlContext.Templates.Get(Cms.Core.Constants.SqlTemplates.VersionableRepository.GetReservedId, tsql => tsql
.Select(x => x.NodeId)
.From()
.Where(x => x.UniqueId == SqlTemplate.Arg("uniqueId") && x.NodeObjectType == Cms.Core.Constants.ObjectTypes.IdReservation)
);
var sql = template.Sql(new { uniqueId });
var id = Database.ExecuteScalar(sql);
return id ?? 0;
}
#endregion
#region Recycle bin
public abstract int RecycleBinId { get; }
public virtual IEnumerable GetRecycleBin()
{
return Get(Query().Where(entity => entity.Trashed));
}
#endregion
protected void PersistRelations(TEntity entity)
{
// Get all references from our core built in DataEditors/Property Editors
// Along with seeing if deverlopers want to collect additional references from the DataValueReferenceFactories collection
var trackedRelations = new List();
trackedRelations.AddRange(_dataValueReferenceFactories.GetAllReferences(entity.Properties, PropertyEditors));
//First delete all auto-relations for this entity
RelationRepository.DeleteByParent(entity.Id, Cms.Core.Constants.Conventions.RelationTypes.AutomaticRelationTypes);
if (trackedRelations.Count == 0) return;
trackedRelations = trackedRelations.Distinct().ToList();
var udiToGuids = trackedRelations.Select(x => x.Udi as GuidUdi)
.ToDictionary(x => (Udi)x, x => x.Guid);
//lookup in the DB all INT ids for the GUIDs and chuck into a dictionary
var keyToIds = Database.Fetch(Sql().Select(x => x.NodeId, x => x.UniqueId).From().WhereIn(x => x.UniqueId, udiToGuids.Values))
.ToDictionary(x => x.UniqueId, x => x.NodeId);
var allRelationTypes = RelationTypeRepository.GetMany(Array.Empty())
.ToDictionary(x => x.Alias, x => x);
var toSave = trackedRelations.Select(rel =>
{
if (!allRelationTypes.TryGetValue(rel.RelationTypeAlias, out var relationType))
throw new InvalidOperationException($"The relation type {rel.RelationTypeAlias} does not exist");
if (!udiToGuids.TryGetValue(rel.Udi, out var guid))
return null; // This shouldn't happen!
if (!keyToIds.TryGetValue(guid, out var id))
return null; // This shouldn't happen!
return new ReadOnlyRelation(entity.Id, id, relationType.Id);
}).WhereNotNull();
// Save bulk relations
RelationRepository.SaveBulk(toSave);
}
///
/// Inserts property values for the content entity
///
///
///
///
///
///
/// Used when creating a new entity
///
protected void InsertPropertyValues(TEntity entity, int publishedVersionId, out bool edited, out HashSet editedCultures)
{
// persist the property data
var propertyDataDtos = PropertyFactory.BuildDtos(entity.ContentType.Variations, entity.VersionId, publishedVersionId, entity.Properties, LanguageRepository, out edited, out editedCultures);
foreach (var propertyDataDto in propertyDataDtos)
{
Database.Insert(propertyDataDto);
}
// TODO: we can speed this up: Use BulkInsert and then do one SELECT to re-retrieve the property data inserted with assigned IDs.
// This is a perfect thing to benchmark with Benchmark.NET to compare perf between Nuget releases.
}
///
/// Used to atomically replace the property values for the entity version specified
///
///
///
///
///
///
protected void ReplacePropertyValues(TEntity entity, int versionId, int publishedVersionId, out bool edited, out HashSet editedCultures)
{
// Replace the property data.
// Lookup the data to update with a UPDLOCK (using ForUpdate()) this is because we need to be atomic
// and handle DB concurrency. Doing a clear and then re-insert is prone to concurrency issues.
var propDataSql = SqlContext.Sql().Select("*").From().Where(x => x.VersionId == versionId).ForUpdate();
var existingPropData = Database.Fetch(propDataSql);
var propertyTypeToPropertyData = new Dictionary<(int propertyTypeId, int versionId, int? languageId, string segment), PropertyDataDto>();
var existingPropDataIds = new List();
foreach (var p in existingPropData)
{
existingPropDataIds.Add(p.Id);
propertyTypeToPropertyData[(p.PropertyTypeId, p.VersionId, p.LanguageId, p.Segment)] = p;
}
var propertyDataDtos = PropertyFactory.BuildDtos(entity.ContentType.Variations, entity.VersionId, publishedVersionId, entity.Properties, LanguageRepository, out edited, out editedCultures);
foreach (var propertyDataDto in propertyDataDtos)
{
// Check if this already exists and update, else insert a new one
if (propertyTypeToPropertyData.TryGetValue((propertyDataDto.PropertyTypeId, propertyDataDto.VersionId, propertyDataDto.LanguageId, propertyDataDto.Segment), out var propData))
{
propertyDataDto.Id = propData.Id;
Database.Update(propertyDataDto);
}
else
{
// TODO: we can speed this up: Use BulkInsert and then do one SELECT to re-retrieve the property data inserted with assigned IDs.
// This is a perfect thing to benchmark with Benchmark.NET to compare perf between Nuget releases.
Database.Insert(propertyDataDto);
}
// track which ones have been processed
existingPropDataIds.Remove(propertyDataDto.Id);
}
// For any remaining that haven't been processed they need to be deleted
if (existingPropDataIds.Count > 0)
{
Database.Execute(SqlContext.Sql().Delete().WhereIn(x => x.Id, existingPropDataIds));
}
}
private class NodeIdKey
{
[Column("id")]
public int NodeId { get; set; }
[Column("uniqueId")]
public Guid UniqueId { get; set; }
}
}
}