Skip to content

Commit

Permalink
Merge pull request #2001 from aml-org/W-15847817/minor-performance-im…
Browse files Browse the repository at this point in the history
…provements

W-15847817. Minor performance improvements
  • Loading branch information
jisoldi authored May 29, 2024
2 parents 031eeb9 + 53679b7 commit 5324d04
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 25 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -673,12 +673,14 @@ private[resolution] class MinShapeAlgorithm()(implicit val resolver: ShapeNormal
case DataType.Integer => true
case _ => false
}

private def isNumeric(dataType: String): Boolean = dataType match {
case DataType.Float | DataType.Double | DataType.Number | DataType.Integer | DataType.Long => true
case _ => false
}

private def areSameShape(child: Shape, parent: Shape): Boolean = child.id == parent.id && child.id != null

def computeMinShape(child: Shape, parent: Shape): Shape = {
if (areSameShape(child, parent)) return child

Expand Down Expand Up @@ -916,25 +918,24 @@ private[resolution] class MinShapeAlgorithm()(implicit val resolver: ShapeNormal
val superProperties = superNode.properties
val baseProperties = baseNode.properties

// Calculate which properties are overwritten by the baseNode
type IsOverridden = Boolean
type PropertyPath = String

val commonProps: mutable.HashMap[PropertyPath, IsOverridden] = mutable.HashMap()
val propsToOverride: mutable.HashMap[PropertyPath, IsOverridden] = mutable.HashMap()

superProperties.foreach(p => commonProps.put(p.path.value(), false))
superProperties.foreach(p => propsToOverride.put(p.path.value(), false))
baseProperties.foreach { p =>
if (commonProps.contains(p.path.value())) {
commonProps.put(p.path.value(), true)
} else {
commonProps.put(p.path.value(), false)
}
val isOverridden = propsToOverride.contains(p.path.value())
propsToOverride.put(p.path.value(), isOverridden)
}

val minProps = commonProps.map {
val minProps = propsToOverride.map {
case (path, true) =>
val child = baseProperties.find(_.path.is(path)).get
val parent = superProperties.find(_.path.is(path)).get
if (parent.id != child.id) {
// It returns true because the prop is present in both nodes
val childProp = baseProperties.find(_.path.is(path)).get
val parentProp = superProperties.find(_.path.is(path)).get
if (parentProp.id != childProp.id) {

/** We need to ensure the child property "belongs" to the current node in order to create a new inheritance.
* This is because the child property might have been inherited from another parent and we end up mutating
Expand All @@ -948,12 +949,11 @@ private[resolution] class MinShapeAlgorithm()(implicit val resolver: ShapeNormal
*
* The range of Child.a is [Node1, Node2] while Parent1.a is just Node1
*/
val childCopy = child.copyShape()
childCopy.withId(child.id)
val childPropCopy = childProp.copyShape().withId(childProp.id)

createNewInheritanceAndQueue(childCopy, parent)
createNewInheritanceAndQueue(childPropCopy, parentProp)
} else {
child
childProp
}

case (path, false) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,13 @@ import amf.shapes.client.scala.model.domain._
import amf.shapes.internal.domain.metamodel._
import amf.shapes.internal.validation.definitions.ShapeResolutionSideValidations.InvalidTypeInheritanceWarningSpecification

import scala.collection.immutable.Queue
import scala.collection.mutable

case class ShapeNormalizationInheritanceResolver(context: NormalizationContext) {

private val algorithm: MinShapeAlgorithm = new MinShapeAlgorithm()(this)
private var queue: Seq[Shape] = Seq.empty
private var queue: Queue[Shape] = Queue()

def log(msg: String): Unit = context.logger.log(msg)
def getCached(shape: Shape): Option[Shape] = context.resolvedInheritanceIndex.get(shape.id)
Expand Down Expand Up @@ -62,13 +63,9 @@ case class ShapeNormalizationInheritanceResolver(context: NormalizationContext)
private def addToCache(shape: Shape, id: String) = context.resolvedInheritanceIndex += (shape, id)
private def addToCache(shape: Shape) = context.resolvedInheritanceIndex += shape

def removeFromQueue(shape: Shape): Unit = {
log(s"removing from queue: ${shape.debugInfo()}")
queue = queue.filterNot(_ == shape)
}
def queue(shape: Shape): Unit = {
log(s"queueing: ${shape.debugInfo()}")
queue = queue :+ shape
queue = queue enqueue shape
}

def normalize(shape: Shape, skipQueue: Boolean = false): Shape = {
Expand All @@ -82,9 +79,9 @@ case class ShapeNormalizationInheritanceResolver(context: NormalizationContext)
addToCache(result)

while (queue.nonEmpty && !skipQueue) {
val next = queue.head
val (next, newQueue) = queue.dequeue
log(s"queue is not empty ----- ")
queue = queue.tail
queue = newQueue
normalize(next, skipQueue = true) // do not nest queued normalizations
}

Expand Down Expand Up @@ -141,14 +138,17 @@ case class ShapeNormalizationInheritanceResolver(context: NormalizationContext)
}
}

private def inheritFromSuperTypes(shape: Shape, superTypes: Seq[Shape]) = {
private def inheritFromSuperTypes(shape: Shape, superTypes: Seq[Shape]): Shape = {
shape.fields.removeField(ShapeModel.Inherits)
superTypes.fold(shape) { (accShape, superType) =>
// go up the inheritance chain before applying type. We want to apply inheritance with the accumulated super type
// Go up the inheritance chain before applying type. We want to apply inheritance with the accumulated super type
log(s"inherit from super type: ${superType.debugInfo()}")

// Runs normalize for super shape
context.logger.addPadding()
val normalizedSuperType = normalize(superType, skipQueue = true)
context.logger.removePadding()

if (detectedRecursion) accShape
else {

Expand Down

0 comments on commit 5324d04

Please sign in to comment.