diff --git a/bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/MemoryGDPRResourceProvider.java b/bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/MemoryGDPRResourceProvider.java
index e35d447..7087051 100644
--- a/bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/MemoryGDPRResourceProvider.java
+++ b/bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/MemoryGDPRResourceProvider.java
@@ -14,7 +14,7 @@ public MemoryGDPRResourceProvider(GDPRModelBuilder modelBuilder) {
}
@Override
- public LegalAssessmentFacts getModel() {
+ public LegalAssessmentFacts getGDPRModel() {
return modelBuilder.getGdprModel();
}
diff --git a/bundles/mdpa.gdpr.analysis/META-INF/MANIFEST.MF b/bundles/mdpa.gdpr.analysis/META-INF/MANIFEST.MF
index 6f30d15..50cd97c 100644
--- a/bundles/mdpa.gdpr.analysis/META-INF/MANIFEST.MF
+++ b/bundles/mdpa.gdpr.analysis/META-INF/MANIFEST.MF
@@ -7,15 +7,17 @@ Export-Package: mdpa.gdpr.analysis,
mdpa.gdpr.analysis.core,
mdpa.gdpr.analysis.core.resource,
mdpa.gdpr.analysis.dfd
+Import-Package: org.antlr.runtime;version="3.2.0",
+ org.eclipse.xtext.parser.antlr
Require-Bundle: mdpa.gdpr.dfdconverter;bundle-version="1.0.0",
mdpa.gdpr.dfdconverter.tracemodel;bundle-version="0.1.0",
- org.dataflowanalysis.analysis;bundle-version="2.0.0",
+ org.dataflowanalysis.analysis,
mdpa.gdpr.metamodel.contextproperties;bundle-version="0.1.0",
- org.dataflowanalysis.analysis.dfd;bundle-version="2.0.0",
+ org.dataflowanalysis.analysis.dfd,
org.apache.log4j;bundle-version="1.2.24",
- org.eclipse.core.runtime;bundle-version="3.26.100",
- tools.mdsd.library.standalone.initialization;bundle-version="0.3.0",
- tools.mdsd.library.standalone.initialization.log4j;bundle-version="0.3.0",
- org.eclipse.emf.ecore.xmi;bundle-version="2.18.0"
+ org.eclipse.core.runtime,
+ tools.mdsd.library.standalone.initialization,
+ tools.mdsd.library.standalone.initialization.log4j,
+ org.eclipse.emf.ecore.xmi
Automatic-Module-Name: mdpa.gdpr.analysis
Bundle-RequiredExecutionEnvironment: JavaSE-17
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/DFDUtils.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/DFDUtils.java
new file mode 100644
index 0000000..e51b88f
--- /dev/null
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/DFDUtils.java
@@ -0,0 +1,68 @@
+package mdpa.gdpr.analysis;
+
+import org.dataflowanalysis.dfd.datadictionary.AbstractAssignment;
+import org.dataflowanalysis.dfd.datadictionary.Assignment;
+import org.dataflowanalysis.dfd.datadictionary.Behavior;
+import org.dataflowanalysis.dfd.datadictionary.Pin;
+import org.dataflowanalysis.dfd.datadictionary.datadictionaryFactory;
+import org.dataflowanalysis.dfd.dataflowdiagram.External;
+import org.dataflowanalysis.dfd.dataflowdiagram.Node;
+import org.dataflowanalysis.dfd.dataflowdiagram.Store;
+import org.dataflowanalysis.dfd.dataflowdiagram.dataflowdiagramFactory;
+
+import java.util.Objects;
+
+public class DFDUtils {
+ public DFDUtils() {
+ }
+
+ /**
+ * Deep copies the given Node
+ *
+ * @param oldNode Given node that should be copied
+ * @return Copy of the given node
+ */
+ public Node copyNode(Node oldNode) {
+ Node node;
+ if (oldNode instanceof External) {
+ node = dataflowdiagramFactory.eINSTANCE.createExternal();
+ } else if (oldNode instanceof Store) {
+ node = dataflowdiagramFactory.eINSTANCE.createStore();
+ } else {
+ node = dataflowdiagramFactory.eINSTANCE.createProcess();
+ }
+ node.setId(oldNode.getId());
+ node.setEntityName(oldNode.getEntityName());
+ node.getProperties().addAll(oldNode.getProperties());
+ Behavior behavior = datadictionaryFactory.eINSTANCE.createBehavior();
+ behavior.getInPin().addAll(oldNode.getBehavior().getInPin().stream().map(pin -> {
+ Pin copy = datadictionaryFactory.eINSTANCE.createPin();
+ copy.setId(pin.getId());
+ copy.setEntityName(pin.getEntityName());
+ return copy;
+ }).toList());
+ behavior.getOutPin().addAll(oldNode.getBehavior().getOutPin().stream().map(pin -> {
+ Pin copy = datadictionaryFactory.eINSTANCE.createPin();
+ copy.setId(pin.getId());
+ copy.setEntityName(pin.getEntityName());
+ return copy;
+ }).toList());
+ behavior.getAssignment().forEach(it -> {
+ AbstractAssignment copy;
+ if (it instanceof Assignment oldAssignment) {
+ Assignment newAssignment = datadictionaryFactory.eINSTANCE.createAssignment();
+ for (Pin inputPin : oldAssignment.getInputPins()) {
+ newAssignment.getInputPins().add(behavior.getInPin().stream().filter(pin -> Objects.equals(inputPin.getId(), pin.getId())).findFirst().orElseThrow());
+ }
+ copy = newAssignment;
+ } else {
+ copy = datadictionaryFactory.eINSTANCE.createForwardingAssignment();
+ }
+ copy.setId(it.getId());
+ copy.setEntityName(it.getEntityName());
+ copy.setOutputPin(behavior.getInPin().stream().filter(pin -> Objects.equals(it.getOutputPin().getId(), pin.getId())).findFirst().orElseThrow());
+ });
+ node.setBehavior(behavior);
+ return node;
+ }
+}
\ No newline at end of file
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/GDPRLegalAssessmentAnalysis.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/GDPRLegalAssessmentAnalysis.java
index 88b493b..06d325a 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/GDPRLegalAssessmentAnalysis.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/GDPRLegalAssessmentAnalysis.java
@@ -12,6 +12,14 @@
import tools.mdsd.library.standalone.initialization.StandaloneInitializationException;
import tools.mdsd.library.standalone.initialization.StandaloneInitializerBuilder;
+/**
+ * Extension of the {@link DataFlowConfidentialityAnalysis} for usage with the GDPR metamodel that is able to resolve
+ * uncertain context dependent attributes.
+ *
+ * Inputs to the analysis are a metamodel instance of the GDPR model and the context properties model
+ *
+ * Note: Do not create an instance of this class manually, use the {@link GDPRLegalAssessmentAnalysisBuilder} instead
+ */
public class GDPRLegalAssessmentAnalysis extends DataFlowConfidentialityAnalysis {
public static final String PLUGIN_PATH = "mdpa.gdpr.analysis";
@@ -21,6 +29,16 @@ public class GDPRLegalAssessmentAnalysis extends DataFlowConfidentialityAnalysis
private final Optional> modelProjectActivator;
private final String modelProjectName;
+ /**
+ * Create a new {@link GDPRLegalAssessmentAnalysis} with the given resource provider and optionally a modelling project
+ * with a plugin activator
+ *
+ * Note: Do not create an instance of this class manually, use the {@link GDPRLegalAssessmentAnalysisBuilder} instead
+ * @param resourceProvider {@link GDPRResourceProvider} providing a metamodel instance of the GDPR and Context Property
+ * model
+ * @param modelProjectActivator Optional model project activator
+ * @param modelProjectName Optional model project name
+ */
public GDPRLegalAssessmentAnalysis(GDPRResourceProvider resourceProvider, Optional> modelProjectActivator,
String modelProjectName) {
this.resourceProvider = resourceProvider;
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/GDPRLegalAssessmentAnalysisBuilder.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/GDPRLegalAssessmentAnalysisBuilder.java
index a1020ff..4ed3106 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/GDPRLegalAssessmentAnalysisBuilder.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/GDPRLegalAssessmentAnalysisBuilder.java
@@ -8,6 +8,13 @@
import org.dataflowanalysis.analysis.utils.ResourceUtils;
import org.eclipse.core.runtime.Plugin;
+/**
+ * Extension of the {@link DataFlowAnalysisBuilder} responsible for creating a valid {@link GDPRLegalAssessmentAnalysis}
+ * from the following:
+ * - A valid path to a .gdpr metamodel instance
+ * - A valid path to a .contextproperties metamodel
+ * instance
+ */
public class GDPRLegalAssessmentAnalysisBuilder extends DataFlowAnalysisBuilder {
private final Logger logger = Logger.getLogger(GDPRLegalAssessmentAnalysisBuilder.class);
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/UncertaintyUtils.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/UncertaintyUtils.java
index d1bee40..193ff1b 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/UncertaintyUtils.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/UncertaintyUtils.java
@@ -2,285 +2,181 @@
import java.util.ArrayList;
import java.util.List;
+
import mdpa.gdpr.analysis.core.ContextDependentAttributeScenario;
import mdpa.gdpr.analysis.core.ContextDependentAttributeSource;
import mdpa.gdpr.analysis.dfd.DFDGDPRVertex;
import mdpa.gdpr.metamodel.GDPR.Data;
import mdpa.gdpr.metamodel.GDPR.NaturalPerson;
import mdpa.gdpr.metamodel.GDPR.PersonalData;
-import mdpa.gdpr.metamodel.contextproperties.ContextDefinition;
-import mdpa.gdpr.metamodel.contextproperties.GDPRContextElement;
-import mdpa.gdpr.metamodel.contextproperties.PropertyValue;
-import org.apache.log4j.Level;
+import mdpa.gdpr.metamodel.contextproperties.Expression;
+import mdpa.gdpr.metamodel.contextproperties.LAFScopeElement;
+import mdpa.gdpr.metamodel.contextproperties.Scope;
+
import org.apache.log4j.Logger;
+import org.dataflowanalysis.analysis.utils.LoggerManager;
import org.dataflowanalysis.dfd.datadictionary.Assignment;
-import org.dataflowanalysis.dfd.datadictionary.Behavior;
import org.dataflowanalysis.dfd.datadictionary.DataDictionary;
import org.dataflowanalysis.dfd.datadictionary.Label;
import org.dataflowanalysis.dfd.datadictionary.LabelType;
import org.dataflowanalysis.dfd.datadictionary.Pin;
import org.dataflowanalysis.dfd.datadictionary.datadictionaryFactory;
-import org.eclipse.emf.ecore.util.EcoreUtil;
+import org.dataflowanalysis.dfd.dataflowdiagram.Node;
public class UncertaintyUtils {
- private static final Logger logger = Logger.getLogger(UncertaintyUtils.class);
+ private static final Logger logger = LoggerManager.getLogger(UncertaintyUtils.class);
- public static Behavior createBehavior(DFDGDPRVertex impactedElement, DataDictionary dd, ContextDependentAttributeSource source,
- ContextDependentAttributeScenario scenario, Data targetedData) {
- logger.setLevel(Level.INFO);
+ /**
+ * Modify the behavior for an impacted element using the given CDA Source and Scenario on the given impacted data.
+ * @param element Original DFD {@link Node} that is impacted
+ * @param impactedElement Impacted {@link DFDGDPRVertex}
+ * @param dataDictionary Data dictionary required to determine the labels that should be applied
+ * @param source {@link ContextDependentAttributeSource} that is impacting the element
+ * @param scenario {@link ContextDependentAttributeScenario} that is impacting the element
+ * @param targetedPerson {@link NaturalPerson} element from the GDPR model that is targeted by the CDA
+ */
+ public static void impactBehavior(Node element, DFDGDPRVertex impactedElement, DataDictionary dataDictionary, ContextDependentAttributeSource source,
+ ContextDependentAttributeScenario scenario, NaturalPerson targetedPerson) {
+ logger.debug("Modifying behavior for impacted element " + element.getEntityName());
- if (impactedElement.getReferencedElement()
- .getBehavior()
- .getOutPin()
+ List assignments = new ArrayList<>();
+ List targetedData = impactedElement.getOutgoingData()
.stream()
- .noneMatch(it -> it.getEntityName()
- .equals(targetedData.getEntityName()))) {
- logger.info("Scenario" + scenario.getName() + " does not impact " + impactedElement.getName());
- return impactedElement.getReferencedElement()
- .getBehavior();
- }
+ .filter(PersonalData.class::isInstance)
+ .map(PersonalData.class::cast)
+ .distinct()
+ .filter(it -> it.getDataReferences()
+ .contains(targetedPerson))
+ .toList();
- logger.debug("Impacting element " + impactedElement.getReferencedElement()
- .getEntityName());
- Behavior behaviour = datadictionaryFactory.eINSTANCE.createBehavior();
- dd.getBehavior()
- .add(behaviour);
+ for (PersonalData targetData : targetedData) {
+ UncertaintyUtils.impactBehavior(element, impactedElement, dataDictionary, source, scenario, targetData);
+ }
+ }
- if (impactedElement.getReferencedElement()
- .getBehavior()
- .getOutPin()
- .isEmpty()) {
- return behaviour;
+ /**
+ * Modify the behavior for an impacted element using the given CDA Source and Scenario on the given impacted data.
+ * @param element Original DFD {@link Node} that is impacted
+ * @param impactedElement Impacted {@link DFDGDPRVertex}
+ * @param dataDictionary Data dictionary required to determine the labels that should be applied
+ * @param source {@link ContextDependentAttributeSource} that is impacting the element
+ * @param scenario {@link ContextDependentAttributeScenario} that is impacting the element
+ * @param targetedData {@link Data} element from the GDPR model that is targeted by the CDA
+ */
+ public static void impactBehavior(Node element, DFDGDPRVertex impactedElement, DataDictionary dataDictionary, ContextDependentAttributeSource source,
+ ContextDependentAttributeScenario scenario, Data targetedData) {
+ if (element.getBehavior().getOutPin().stream()
+ .noneMatch(it -> it.getEntityName().equals(targetedData.getEntityName()))) {
+ logger.info("Scenario" + scenario.getName() + " does not impact " + impactedElement.getName());
+ return;
}
- behaviour.getInPin()
- .addAll(impactedElement.getReferencedElement()
- .getBehavior()
- .getInPin()
- .stream()
- .map(it -> EcoreUtil.copy(it))
- .toList());
- behaviour.getOutPin()
- .addAll(impactedElement.getReferencedElement()
- .getBehavior()
- .getOutPin()
- .stream()
- .map(it -> EcoreUtil.copy(it))
- .toList());
+ logger.debug("Modifying behavior for impacted element " + element.getEntityName());
- LabelType type = dd.getLabelTypes()
- .stream()
- .filter(it -> it.getEntityName()
- .equals(source.getPropertyType()
- .getEntityName()))
- .findAny()
- .orElseThrow();
- List values = new ArrayList<>();
- for (PropertyValue propertyValue : scenario.getPropertyValues()) {
- Label value = type.getLabel()
- .stream()
- .filter(it -> it.getEntityName()
- .equals(propertyValue.getEntityName()))
- .findAny()
- .orElseThrow();
- values.add(value);
+ if (element.getBehavior().getOutPin().isEmpty()) {
+ logger.debug("Behavior for the element will not be modified, as it does not output any data!");
+ return;
}
- List inputPins = behaviour.getInPin()
- .stream()
- .map(it -> EcoreUtil.copy(it))
- .toList();
- Pin outputPin = behaviour.getOutPin()
- .stream()
- .filter(it -> it.getEntityName()
+ Pin outputPin = element.getBehavior().getOutPin().stream().filter(it -> it.getEntityName()
.equals(targetedData.getEntityName()))
- .map(it -> EcoreUtil.copy(it))
.findAny()
.orElseThrow();
+ element.getBehavior().getAssignment().add(UncertaintyUtils.createImpactAssignment(element, outputPin, scenario, source, dataDictionary));
- List assignments = new ArrayList<>();
- Assignment attributeAssignment = datadictionaryFactory.eINSTANCE.createAssignment();
- attributeAssignment.setTerm(datadictionaryFactory.eINSTANCE.createTRUE());
- attributeAssignment.getInputPins()
- .addAll(inputPins);
- attributeAssignment.setOutputPin(outputPin);
- attributeAssignment.getOutputLabels()
- .addAll(values);
- assignments.add(attributeAssignment);
-
- // Set Natural Person Data attributes
for (Data data : impactedElement.getOutgoingData()) {
if (!(data instanceof PersonalData personalData)) {
continue;
}
- Assignment assignment = datadictionaryFactory.eINSTANCE.createAssignment();
- assignment.getInputPins()
- .addAll(inputPins);
- Pin dataOutputPin = impactedElement.getReferencedElement()
- .getBehavior()
- .getOutPin()
- .stream()
+ Pin dataOutputPin = element.getBehavior().getOutPin().stream()
.filter(pin -> pin.getEntityName()
.equals(personalData.getEntityName()))
.findAny()
- .orElseThrow();
- assignment.setOutputPin(dataOutputPin);
- assignment.setTerm(datadictionaryFactory.eINSTANCE.createTRUE());
- personalData.getDataReferences()
- .forEach(person -> {
- LabelType labelType = dd.getLabelTypes()
- .stream()
- .filter(it -> it.getEntityName()
- .equals("NaturalPerson"))
- .findAny()
- .orElseThrow();
- Label label = labelType.getLabel()
- .stream()
- .filter(it -> it.getEntityName()
- .equals(person.getEntityName()))
- .findAny()
- .orElseThrow();
- assignment.getOutputLabels()
- .add(label);
- });
- assignment.setEntityName("Send " + personalData.getEntityName());
- assignments.add(assignment);
+ .orElseThrow(() -> new IllegalArgumentException("Element does not have output pin named after outgoing personal data!"));
+ element.getBehavior().getAssignment().add(UncertaintyUtils.createPersonAssignment(element, dataOutputPin, personalData, dataDictionary));
}
- behaviour.getAssignment()
- .addAll(assignments);
- return behaviour;
}
- public static Behavior createBehavior(DFDGDPRVertex impactedElement, DataDictionary dd, ContextDependentAttributeSource source,
- ContextDependentAttributeScenario scenario, NaturalPerson targetedPerson) {
- logger.setLevel(Level.WARN);
- logger.debug("Impacting element " + impactedElement.getReferencedElement()
- .getEntityName());
- Behavior behaviour = datadictionaryFactory.eINSTANCE.createBehavior();
- dd.getBehavior()
- .add(behaviour);
-
- if (impactedElement.getReferencedElement()
- .getBehavior()
- .getOutPin()
- .isEmpty()) {
- return behaviour;
- }
+ /**
+ * Creates an assignment that applies the labels of a given source and scenario to the given element at the given pin
+ * @param element Element that the source and scenario is applied to
+ * @param outputPin Output pin of the element that is impacted
+ * @param scenario {@link ContextDependentAttributeScenario} that is applied
+ * @param source {@link ContextDependentAttributeSource} that is applied
+ * @param dataDictionary Data dictionary used to resolve the labels from the CDA source and scenario
+ * @return Returns an assignment that set the labels that the CDA source and scenario should set
+ */
+ private static Assignment createImpactAssignment(Node element, Pin outputPin, ContextDependentAttributeScenario scenario, ContextDependentAttributeSource source, DataDictionary dataDictionary) {
+ List values = UncertaintyUtils.getAppliedLabel(source, scenario, dataDictionary);
+ Assignment attributeAssignment = datadictionaryFactory.eINSTANCE.createAssignment();
+ attributeAssignment.setTerm(datadictionaryFactory.eINSTANCE.createTRUE());
+ attributeAssignment.getInputPins().addAll(element.getBehavior().getInPin());
+ attributeAssignment.setOutputPin(outputPin);
+ attributeAssignment.getOutputLabels().addAll(values);
+ return attributeAssignment;
+ }
- behaviour.getInPin()
- .addAll(impactedElement.getReferencedElement()
- .getBehavior()
- .getInPin()
- .stream()
- .map(it -> EcoreUtil.copy(it))
- .toList());
- behaviour.getOutPin()
- .addAll(impactedElement.getReferencedElement()
- .getBehavior()
- .getOutPin()
- .stream()
- .map(it -> EcoreUtil.copy(it))
- .toList());
+ /**
+ * Creates an assignment for a data pin that set the labels of the personal data and its corresponding natural person
+ * @param element Element that the assignment should be applied to
+ * @param dataOutputPin Output pin that the assignment should be applied to
+ * @param personalData Personal data that references natural persons
+ * @param dataDictionary Data dictionary used to resolve labels
+ * @return Returns an assignment that sets label for the natural persons for a given piece of personal data
+ */
+ private static Assignment createPersonAssignment(Node element, Pin dataOutputPin, PersonalData personalData, DataDictionary dataDictionary) {
+ Assignment assignment = datadictionaryFactory.eINSTANCE.createAssignment();
+ assignment.getInputPins().addAll(element.getBehavior().getInPin());
+ assignment.setOutputPin(dataOutputPin);
+ assignment.setTerm(datadictionaryFactory.eINSTANCE.createTRUE());
+ personalData.getDataReferences()
+ .forEach(person -> {
+ assignment.getOutputLabels().add(UncertaintyUtils.getLabelForNaturalPerson(dataDictionary, person));
+ });
+ assignment.setEntityName("Send " + personalData.getEntityName());
+ return assignment;
+ }
- List assignments = new ArrayList<>();
- List targetedData = impactedElement.getOutgoingData()
+ /**
+ * Determines the label for a given natural person
+ * @param dataDictionary Data dictionary used to resolve the labels
+ * @param person Natural person that the label should correspond to
+ * @return Returns the label that corresponds to the given natural person
+ */
+ private static Label getLabelForNaturalPerson(DataDictionary dataDictionary, NaturalPerson person) {
+ LabelType labelType = dataDictionary.getLabelTypes()
.stream()
- .filter(PersonalData.class::isInstance)
- .map(PersonalData.class::cast)
- .distinct()
- .filter(it -> it.getDataReferences()
- .contains(targetedPerson))
- .toList();
-
- for (PersonalData targetData : targetedData) {
- LabelType type = dd.getLabelTypes()
- .stream()
- .filter(it -> it.getEntityName()
- .equals(source.getPropertyType()
- .getEntityName()))
- .findAny()
- .orElseThrow();
- List values = new ArrayList<>();
- for (PropertyValue propertyValue : scenario.getPropertyValues()) {
- Label value = type.getLabel()
- .stream()
- .filter(it -> it.getEntityName()
- .equals(propertyValue.getEntityName()))
- .findAny()
- .orElseThrow();
- values.add(value);
- }
-
- List inputPins = behaviour.getInPin();
- Pin outputPin = behaviour.getOutPin()
- .stream()
- .filter(it -> it.getEntityName()
- .equals(targetData.getEntityName()))
- .findAny()
- .orElseThrow();
- Assignment attributeAssignment = datadictionaryFactory.eINSTANCE.createAssignment();
- attributeAssignment.setTerm(datadictionaryFactory.eINSTANCE.createTRUE());
- attributeAssignment.getInputPins()
- .addAll(inputPins);
- attributeAssignment.setOutputPin(outputPin);
- attributeAssignment.getOutputLabels()
- .addAll(values);
- assignments.add(attributeAssignment);
-
- // Set Natural Person Data attributes
- for (Data data : impactedElement.getOutgoingData()) {
- if (!(data instanceof PersonalData personalData)) {
- continue;
- }
- Assignment assignment = datadictionaryFactory.eINSTANCE.createAssignment();
- assignment.getInputPins()
- .addAll(inputPins);
- Pin dataOutputPin = impactedElement.getReferencedElement()
- .getBehavior()
- .getOutPin()
- .stream()
- .filter(pin -> pin.getEntityName()
- .equals(personalData.getEntityName()))
- .findAny()
- .orElseThrow();
- assignment.setOutputPin(dataOutputPin);
- assignment.setTerm(datadictionaryFactory.eINSTANCE.createTRUE());
- personalData.getDataReferences()
- .forEach(person -> {
- LabelType labelType = dd.getLabelTypes()
- .stream()
- .filter(it -> it.getEntityName()
- .equals("NaturalPerson"))
- .findAny()
- .orElseThrow();
- Label label = labelType.getLabel()
- .stream()
- .filter(it -> it.getEntityName()
- .equals(person.getEntityName()))
- .findAny()
- .orElseThrow();
- assignment.getOutputLabels()
- .add(label);
- });
- assignment.setEntityName("Send " + personalData.getEntityName());
- assignments.add(assignment);
- }
- behaviour.getAssignment()
- .addAll(assignments);
- }
- return behaviour;
+ .filter(it -> it.getEntityName().equals("NaturalPerson"))
+ .findAny()
+ .orElseThrow();
+ return labelType.getLabel()
+ .stream()
+ .filter(it -> it.getEntityName().equals(person.getEntityName()))
+ .findAny()
+ .orElseThrow();
}
- public static boolean matchesContextDefinition(DFDGDPRVertex vertex, ContextDefinition contextDefinition) {
- return contextDefinition.getGdprElements()
+ /**
+ * Determines whether the given scope is matched by the given vertex element
+ * @param vertex Vertex element that the scope is matched against
+ * @param scope Scope that is checked
+ * @return Returns true, if the scope is applicable at the given vertex. Otherwis,e the method returns false.
+ */
+ public static boolean scopeApplicable(DFDGDPRVertex vertex, Scope scope) {
+ return scope.getLafScopeElements()
.stream()
- .allMatch(it -> UncertaintyUtils.matchesContextElement(vertex, it));
+ .allMatch(it -> UncertaintyUtils.scopeElementApplicable(vertex, it));
}
- public static boolean matchesContextElement(DFDGDPRVertex vertex, GDPRContextElement contextElement) {
- boolean matches = vertex.getRelatedElements()
- .contains(contextElement.getGdprElement());
- if (contextElement.isNegated()) {
+ /**
+ * Determines whether a scope element is matched by the given vertex
+ * @param vertex Given vertex element that the scope element is matched against
+ * @param scopeElement Scope element that is checked
+ * @return Returns true, if the given scope element matches the vertex. Otherwise, the method returns false.
+ */
+ public static boolean scopeElementApplicable(DFDGDPRVertex vertex, LAFScopeElement scopeElement) {
+ boolean matches = vertex.getRelatedElements().contains(scopeElement.getLafElement());
+ if (scopeElement.isNegated()) {
return !matches;
} else {
return matches;
@@ -288,10 +184,13 @@ public static boolean matchesContextElement(DFDGDPRVertex vertex, GDPRContextEle
}
/**
- * Determines when a CDA should be reapplied at a vertex This happens in the following cases> - Initial match of the CDA
- * - Context change
- * @param vertex
- * @return
+ * Determines when a CDA should be reapplied at a vertex.
+ * This happens in the following cases:
+ * - Initial application of the CDA
+ * - Context between vertices has changed
+ * @param matchingVertices Total list of vertices that have been matched
+ * @param vertex Vertex that checked
+ * @return Returns true, if the CDA should be reapplied at the given vertex. Otherwise, the method returns false.
*/
public static boolean shouldReapply(List matchingVertices, DFDGDPRVertex vertex) {
if (vertex.getPreviousElements()
@@ -299,14 +198,39 @@ public static boolean shouldReapply(List matchingVertices, DFDGDP
.noneMatch(matchingVertices::contains)) {
return true;
}
- if (vertex.getPreviousElements()
+ return vertex.getPreviousElements()
.stream()
.filter(DFDGDPRVertex.class::isInstance)
.map(DFDGDPRVertex.class::cast)
.noneMatch(it -> it.getResponsibilityRole()
- .equals(vertex.getResponsibilityRole()))) {
- return true;
+ .equals(vertex.getResponsibilityRole()));
+ }
+
+ /**
+ * Returns a list of Labels that should be applied for the given CDA source and scenario
+ * @param source {@link ContextDependentAttributeSource} that is applied
+ * @param scenario {@link ContextDependentAttributeScenario} that is applied
+ * @param dataDictionary Data dictionary used to resolve the labels
+ * @return Returns a list of labels that are applied by the given CDA source and scenario
+ */
+ public static List getAppliedLabel(ContextDependentAttributeSource source, ContextDependentAttributeScenario scenario, DataDictionary dataDictionary) {
+ LabelType labelType = dataDictionary.getLabelTypes()
+ .stream()
+ .filter(it -> it.getEntityName()
+ .equals(source.getScopeDependentAssessmentFact()
+ .getEntityName()))
+ .findAny()
+ .orElseThrow();
+ List labels = new ArrayList<>();
+ for (Expression expression : scenario.getExpressions()) {
+ Label label = labelType.getLabel()
+ .stream()
+ .filter(it -> it.getEntityName()
+ .equals(expression.getEntityName()))
+ .findAny()
+ .orElseThrow();
+ labels.add(label);
}
- return false;
+ return labels;
}
}
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextAttributeState.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextAttributeState.java
index 2c91eac..3706205 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextAttributeState.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextAttributeState.java
@@ -1,31 +1,42 @@
package mdpa.gdpr.analysis.core;
+import mdpa.gdpr.analysis.dfd.DFDGDPRVertex;
+
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
-public class ContextAttributeState {
- private final List selectedScenarios;
-
+/**
+ * This class models a state for a {@link mdpa.gdpr.analysis.dfd.DFDGDPRTransposeFlowGraph} that has selected the stored
+ * {@link ContextDependentAttributeScenario}
+ */
+public record ContextAttributeState(List selectedScenarios) {
+ /**
+ * Creates a new {@link ContextAttributeState} using the given list of selected
+ * {@link ContextDependentAttributeScenario}
+ * @param selectedScenarios List of selected {@link ContextDependentAttributeScenario}
+ */
public ContextAttributeState(List selectedScenarios) {
this.selectedScenarios = new ArrayList<>(selectedScenarios);
}
- public ContextAttributeState(ContextDependentAttributeScenario... selectedScenarios) {
- this(List.of(selectedScenarios));
- }
-
- public List getContextAttributeSources() {
- return this.selectedScenarios.stream()
- .map(ContextDependentAttributeScenario::getContextDependentAttributeSource)
- .toList();
- }
-
- public List getSelectedScenarios() {
+ /**
+ * Returns the selected {@link ContextDependentAttributeScenario} that are selected by the {@link ContextAttributeState}
+ * @return Returns selected {@link ContextDependentAttributeScenario}
+ */
+ @Override
+ public List selectedScenarios() {
return Collections.unmodifiableList(this.selectedScenarios);
}
+ /**
+ * Create all possible {@link ContextAttributeState} that are possible to create from the given list of
+ * {@link ContextDependentAttributeSource}
+ * @param contextDependentAttributeSources Given list of {@link ContextDependentAttributeSource} that are used in
+ * finding all {@link ContextAttributeState}
+ * @return Returns a list of all possible {@link ContextAttributeState}
+ */
public static List createAllContextAttributeStates(
List contextDependentAttributeSources) {
List> scenarios = new ArrayList<>();
@@ -38,6 +49,23 @@ public static List createAllContextAttributeStates(
.toList();
}
+ /**
+ * Determines whether the context attribute state cannot handle the given vertex.
+ * This is the case, when all stored scenarios cannot be applied to the vertex.
+ * @param vertex Given vertex
+ * @return Returns true, if the state cannot handle the vertex. Otherwise, the method returns false.
+ */
+ public boolean doesNotHandle(DFDGDPRVertex vertex) {
+ return this.selectedScenarios.stream()
+ .noneMatch(it -> it.applicable(vertex));
+ }
+
+ /**
+ * Calculates the cartesian product between the given lists
+ * @param lists List of lists that should be used when calculating the Cartesian product
+ * @param Type of the list elements
+ * @return Returns the Cartesian product of the provided lists
+ */
private static List> cartesianProduct(List> lists) {
List> result = new ArrayList<>();
if (lists == null || lists.isEmpty()) {
@@ -63,7 +91,7 @@ private static List> cartesianProduct(List> lists) {
@Override
public String toString() {
String scenarios = this.selectedScenarios.stream()
- .map(it -> it.getName())
+ .map(ContextDependentAttributeScenario::getName)
.collect(Collectors.joining(","));
return "[" + scenarios + "]";
}
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextDependentAttributeScenario.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextDependentAttributeScenario.java
index f9d216f..84b468d 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextDependentAttributeScenario.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextDependentAttributeScenario.java
@@ -4,66 +4,92 @@
import mdpa.gdpr.analysis.UncertaintyUtils;
import mdpa.gdpr.analysis.dfd.DFDGDPRTransposeFlowGraph;
import mdpa.gdpr.analysis.dfd.DFDGDPRVertex;
-import mdpa.gdpr.metamodel.contextproperties.ContextAnnotation;
-import mdpa.gdpr.metamodel.contextproperties.ContextDefinition;
-import mdpa.gdpr.metamodel.contextproperties.PropertyValue;
+import mdpa.gdpr.metamodel.contextproperties.Expression;
+import mdpa.gdpr.metamodel.contextproperties.Scope;
+import mdpa.gdpr.metamodel.contextproperties.ScopeSet;
import org.apache.log4j.Logger;
+import org.dataflowanalysis.analysis.utils.LoggerManager;
+/**
+ * Models a Context Dependent Attribute Scenario that applies the given list of property values.
+ *
+ * As a Context Dependent Attribute Scenario can occur in two scenarios we differentiate:
+ */
public class ContextDependentAttributeScenario {
- private final Logger logger = Logger.getLogger(ContextDependentAttributeScenario.class);
+ private final Logger logger = LoggerManager.getLogger(ContextDependentAttributeScenario.class);
private final String name;
- private final TransformationManager transformationManager;
- private final List propertyValues;
- private final List context;
+ private final List expressions;
+ private final List scopes;
private final List sources;
private final ContextDependentAttributeSource contextDependentAttributeSource;
private final boolean resolvedUncertainty;
- public ContextDependentAttributeScenario(ContextAnnotation contextAnnotation, ContextDependentAttributeSource contextDependentAttributeSource) {
- this.name = contextAnnotation.getEntityName();
- this.transformationManager = new TransformationManager();
- this.propertyValues = contextAnnotation.getPropertyvalue();
- this.context = contextAnnotation.getContextdefinition();
+ /**
+ * Creates a new context dependent attribute scenario that matches a specific context. Therefore, it does not resolve an
+ * uncertain CDA
+ * @param scopeSet {@link ScopeSet} the Scenario requires
+ * @param contextDependentAttributeSource Corresponding {@link ContextDependentAttributeSource}
+ */
+ public ContextDependentAttributeScenario(ScopeSet scopeSet, ContextDependentAttributeSource contextDependentAttributeSource) {
+ this.name = scopeSet.getEntityName();
+ this.expressions = scopeSet.getExpression();
+ this.scopes = scopeSet.getScope();
this.sources = List.of();
this.contextDependentAttributeSource = contextDependentAttributeSource;
this.resolvedUncertainty = false;
}
- public ContextDependentAttributeScenario(PropertyValue propertyValue, ContextDependentAttributeSource contextDependentAttributeSource,
+ /**
+ * Creates a new {@link ContextDependentAttributeScenario} that is resolving an uncertainty. Therefore, it requires a
+ * expression that is applied, the corresponding {@link ContextDependentAttributeSource} and a list of other
+ * {@link ContextDependentAttributeSource} that contradict the uncertain CDA
+ * @param expression Expression that is applied, when this scenario is applied
+ * @param contextDependentAttributeSource Corresponding {@link ContextDependentAttributeSource}
+ * @param sources Other {@link ContextDependentAttributeSource} that must not be true
+ */
+ public ContextDependentAttributeScenario(Expression expression, ContextDependentAttributeSource contextDependentAttributeSource,
List sources) {
- this.name = propertyValue.getEntityName() + "@" + contextDependentAttributeSource.getName();
- this.transformationManager = new TransformationManager();
- this.propertyValues = List.of(propertyValue);
- this.context = List.of();
+ this.name = expression.getEntityName() + "@" + contextDependentAttributeSource.getName();
+ this.expressions = List.of(expression);
+ this.scopes = List.of();
this.sources = sources;
this.contextDependentAttributeSource = contextDependentAttributeSource;
this.resolvedUncertainty = true;
}
+ /**
+ * Returns whether the {@link ContextDependentAttributeScenario} is applicable to the given vertex
+ * @param vertex {@link DFDGDPRVertex} that is checked
+ * @return Returns true, if the scenario should be applied to the vertex. Otherwise, the method returns false
+ */
public boolean applicable(DFDGDPRVertex vertex) {
- logger.info("Determining whether " + this.name + " can be applied to " + vertex);
+ logger.trace("Determining whether " + this.name + " can be applied to " + vertex);
+ if (!vertex.getRelatedElements().contains(this.contextDependentAttributeSource.getAnnotation().getAnnotatedElement())) {
+ logger.trace("Cannot apply " + this.name + " to vertex, as it does not have the needed elements in context!");
+ return false;
+ }
if (this.resolvedUncertainty) {
if (!this.contextDependentAttributeSource.applicable(vertex)) {
return false;
}
- logger.info("Context Depdendent Attribute Scenario is resolved with uncertainties!");
+ logger.trace("Context Dependent Attribute Scenario is resolved with uncertainties!");
return this.sources.stream()
- .noneMatch(it -> {
- logger.info("Should not match: " + it.getContextDependentAttributeScenarios()
- .get(0)
- .getName());
- var scenario = it.getContextDependentAttributeScenarios()
- .get(0);
- logger.info("Result: " + scenario.applicable(vertex));
- return scenario.applicable(vertex);
- });
+ .map(it -> it.getContextDependentAttributeScenarios().get(0))
+ .noneMatch(it -> it.applicable(vertex));
}
- return this.context.stream()
- .anyMatch(it -> UncertaintyUtils.matchesContextDefinition(vertex, it));
+ return this.scopes.stream()
+ .anyMatch(it -> UncertaintyUtils.scopeApplicable(vertex, it));
}
+ /**
+ * Determines whether the {@link ContextDependentAttributeScenario} is applicable to any of the nodes in the given
+ * transpose flow graph
+ * @param transposeFlowGraph {@link DFDGDPRTransposeFlowGraph} that is checked
+ * @return Returns true, if the {@link ContextDependentAttributeScenario} can be applied to any of the vertices in the
+ * TFG. Otherwise, the method returns false
+ */
public boolean applicable(DFDGDPRTransposeFlowGraph transposeFlowGraph) {
if (this.resolvedUncertainty) {
return this.sources.stream()
@@ -77,21 +103,29 @@ public boolean applicable(DFDGDPRTransposeFlowGraph transposeFlowGraph) {
.stream()
.filter(DFDGDPRVertex.class::isInstance)
.map(DFDGDPRVertex.class::cast)
- .anyMatch(it -> this.applicable(it));
- }
-
- public List getPropertyValues() {
- return this.propertyValues;
+ .anyMatch(this::applicable);
}
- public boolean resolvedByUncertainty() {
- return this.resolvedUncertainty;
+ /**
+ * Returns the property values that are applied, when this scenario is fulfilled
+ * @return Returns a list of {@link Expression} that are applied in the case the scenario is true
+ */
+ public List getExpressions() {
+ return this.expressions;
}
+ /**
+ * Retrieves the {@link ContextDependentAttributeSource} that this scenario is a part of
+ * @return Returns the parent {@link ContextDependentAttributeSource}
+ */
public ContextDependentAttributeSource getContextDependentAttributeSource() {
return contextDependentAttributeSource;
}
+ /**
+ * Returns the name of the {@link ContextDependentAttributeScenario}
+ * @return The name of the {@link ContextDependentAttributeScenario}
+ */
public String getName() {
return name;
}
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextDependentAttributeSource.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextDependentAttributeSource.java
index aa818c1..0cd22ef 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextDependentAttributeSource.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextDependentAttributeSource.java
@@ -2,60 +2,99 @@
import java.util.Collection;
import java.util.List;
+
import mdpa.gdpr.analysis.UncertaintyUtils;
import mdpa.gdpr.analysis.dfd.DFDGDPRVertex;
import mdpa.gdpr.metamodel.GDPR.AbstractGDPRElement;
-import mdpa.gdpr.metamodel.contextproperties.ContextAnnotation;
-import mdpa.gdpr.metamodel.contextproperties.ContextDefinition;
-import mdpa.gdpr.metamodel.contextproperties.Property;
-import mdpa.gdpr.metamodel.contextproperties.PropertyAnnotation;
-import mdpa.gdpr.metamodel.contextproperties.PropertyValue;
+import mdpa.gdpr.metamodel.contextproperties.Expression;
+import mdpa.gdpr.metamodel.contextproperties.SAFAnnotation;
+import mdpa.gdpr.metamodel.contextproperties.Scope;
+import mdpa.gdpr.metamodel.contextproperties.ScopeDependentAssessmentFact;
+import mdpa.gdpr.metamodel.contextproperties.ScopeSet;
+/**
+ * This class models an application of a context dependent attribute on an element in the GDPR model. The different
+ * values it can take are saved in one or multiple child {@link ContextDependentAttributeScenario}.
+ */
public class ContextDependentAttributeSource {
private final String name;
private final AbstractGDPRElement annotatedElement;
- private final Property propertyType;
+ private final ScopeDependentAssessmentFact scopeDependentAssessmentFact;
private final List contextDependentAttributeScenarios;
- private final List context;
+ private final SAFAnnotation annotation;
+ private final List scopes;
private final List sources;
private final boolean resolvedUncertainty;
- public ContextDependentAttributeSource(PropertyAnnotation propertyAnnotation, ContextAnnotation contextAnnotation) {
- this.name = contextAnnotation.getEntityName() + "@" + propertyAnnotation.getEntityName();
- this.annotatedElement = propertyAnnotation.getAnnotatedElement();
- this.propertyType = propertyAnnotation.getProperty();
- this.contextDependentAttributeScenarios = List.of(new ContextDependentAttributeScenario(contextAnnotation, this));
- this.context = contextAnnotation.getContextdefinition();
+ /**
+ * Creates a new {@link ContextDependentAttributeSource} with the given property annotation containing the information
+ * about the annotated element and value and the context annotation describing the context of the
+ * {@link ContextDependentAttributeSource}
+ * @param safAnnotation {@link SAFAnnotation} describing where the CDA is applied
+ * @param scopeSet {@link ScopeSet} describing which scenarios the source has
+ */
+ public ContextDependentAttributeSource(SAFAnnotation safAnnotation, ScopeSet scopeSet) {
+ this.name = scopeSet.getEntityName() + "@" + safAnnotation.getEntityName();
+ this.annotation = safAnnotation;
+ this.annotatedElement = safAnnotation.getAnnotatedElement();
+ this.scopeDependentAssessmentFact = safAnnotation.getScopeDependentAssessmentFact();
+ this.contextDependentAttributeScenarios = List.of(new ContextDependentAttributeScenario(scopeSet, this));
+ this.scopes = scopeSet.getScope();
this.sources = List.of();
this.resolvedUncertainty = false;
}
- public ContextDependentAttributeSource(PropertyAnnotation propertyAnnotation, List values,
+ /**
+ * Creates a new {@link ContextDependentAttributeSource} that needs to be resolved with uncertain CDAs. Resolves an
+ * uncertainty regarding the value of an {@link ContextDependentAttributeSource} by creating a scenario for each passed
+ * {@link Expression}. Additionally, the given list of other {@link ContextDependentAttributeSource} denotes where
+ * this source cannot apply
+ * @param safAnnotation {@link SAFAnnotation} containing information about the annotated element and value
+ * @param expressions Different {@link Expression} that are resolved by the uncertainty
+ * @param sources List of {@link ContextDependentAttributeSource} that cannot be applied at the same time
+ */
+ public ContextDependentAttributeSource(SAFAnnotation safAnnotation, List expressions,
List sources) {
- this.name = "Unknown@" + propertyAnnotation.getEntityName();
- this.annotatedElement = propertyAnnotation.getAnnotatedElement();
- this.propertyType = propertyAnnotation.getProperty();
- this.contextDependentAttributeScenarios = values.stream()
+ this.name = "Unknown@" + safAnnotation.getEntityName();
+ this.annotation = safAnnotation;
+ this.annotatedElement = safAnnotation.getAnnotatedElement();
+ this.scopeDependentAssessmentFact = safAnnotation.getScopeDependentAssessmentFact();
+ this.contextDependentAttributeScenarios = expressions.stream()
.map(it -> new ContextDependentAttributeScenario(it, this, sources))
.toList();
- this.context = List.of();
+ this.scopes = List.of();
this.sources = sources;
this.resolvedUncertainty = true;
}
+ /**
+ * Determines whether this {@link ContextDependentAttributeSource} is applicable to the given list of vertices
+ * @param vertices Given list of vertices
+ * @return Returns true, if this {@link ContextDependentAttributeSource} is applicable at least one of the vertices
+ * Otherwise, the method returns false.
+ */
public boolean applicable(Collection vertices) {
if (!vertices.stream()
- .map(it -> it.getRelatedElements())
+ .map(DFDGDPRVertex::getRelatedElements)
.flatMap(List::stream)
.toList()
.contains(this.annotatedElement)) {
return false;
}
return vertices.stream()
- .anyMatch(it -> this.applicable(it));
+ .anyMatch(this::applicable);
}
+ /**
+ * Determines whether the {@link ContextDependentAttributeSource} is applicable at the given vertex.
+ *
+ * This is the case, it the vertex has the annotated element in its context. If this
+ * {@link ContextDependentAttributeSource} is resolving an uncertainty, the other saved sources must not match. If this
+ * source is not resolving an uncertainty, it must match at least one context definition
+ * @param vertex Given {@link DFDGDPRVertex} that is checked
+ * @return Returns true, if the source is applicable to the vertex. Otherwise, the method returns false
+ */
public boolean applicable(DFDGDPRVertex vertex) {
if (!vertex.getRelatedElements()
.contains(this.annotatedElement)) {
@@ -65,26 +104,50 @@ public boolean applicable(DFDGDPRVertex vertex) {
return this.sources.stream()
.noneMatch(it -> it.applicable(vertex));
}
- return this.context.stream()
- .anyMatch(it -> UncertaintyUtils.matchesContextDefinition(vertex, it));
+ return this.scopes.stream()
+ .anyMatch(it -> UncertaintyUtils.scopeApplicable(vertex, it));
}
- public Property getPropertyType() {
- return propertyType;
+ /**
+ * Returns the property type that will be applied if the source is applicable
+ * @return Returns the applied {@link ScopeDependentAssessmentFact}
+ */
+ public ScopeDependentAssessmentFact getScopeDependentAssessmentFact() {
+ return scopeDependentAssessmentFact;
}
+ /**
+ * Returns the different possible {@link ContextDependentAttributeScenario} of this source
+ * @return List of possible {@link ContextDependentAttributeScenario}
+ */
public List getContextDependentAttributeScenarios() {
return contextDependentAttributeScenarios;
}
+ /**
+ * Returns the {@link AbstractGDPRElement} the source is annotated to
+ * @return Annotated {@link AbstractGDPRElement}
+ */
public AbstractGDPRElement getAnnotatedElement() {
return annotatedElement;
}
+ /**
+ * Returns the name of the {@link ContextDependentAttributeSource}
+ * @return Returns the name of the source
+ */
public String getName() {
return name;
}
+ /**
+ * Returns the {@link SAFAnnotation} that the {@link ContextDependentAttributeSource} corresponds to
+ * @return Corresponding {@link SAFAnnotation}
+ */
+ public SAFAnnotation getAnnotation() {
+ return annotation;
+ }
+
@Override
public String toString() {
return this.getName();
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/TransformationManager.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/TransformationManager.java
index a958666..4e4db64 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/TransformationManager.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/TransformationManager.java
@@ -5,139 +5,137 @@
import java.util.List;
import java.util.Map;
import java.util.Optional;
+
import mdpa.gdpr.analysis.dfd.DataFlowDiagramAndDataDictionary;
import mdpa.gdpr.dfdconverter.GDPR2DFD;
-import mdpa.gdpr.metamodel.GDPR.AbstractGDPRElement;
-import mdpa.gdpr.metamodel.GDPR.Collecting;
+import mdpa.gdpr.dfdconverter.tracemodel.tracemodel.NodeTrace;
+import mdpa.gdpr.dfdconverter.tracemodel.tracemodel.TraceModel;
import mdpa.gdpr.metamodel.GDPR.LegalAssessmentFacts;
import mdpa.gdpr.metamodel.GDPR.Processing;
-import mdpa.gdpr.metamodel.GDPR.Storing;
-import mdpa.gdpr.metamodel.contextproperties.ContextDependentProperties;
-import mdpa.gdpr.metamodel.contextproperties.Property;
-import mdpa.gdpr.metamodel.contextproperties.PropertyAnnotation;
-import mdpa.gdpr.metamodel.contextproperties.PropertyValue;
+import mdpa.gdpr.metamodel.contextproperties.Expression;
+import mdpa.gdpr.metamodel.contextproperties.SAFAnnotation;
+import mdpa.gdpr.metamodel.contextproperties.ScopeDependentAssessmentFact;
+import mdpa.gdpr.metamodel.contextproperties.ScopeDependentAssessmentFacts;
import org.apache.log4j.Logger;
import org.dataflowanalysis.dfd.datadictionary.DataDictionary;
-import org.dataflowanalysis.dfd.datadictionary.ForwardingAssignment;
import org.dataflowanalysis.dfd.datadictionary.Label;
import org.dataflowanalysis.dfd.datadictionary.LabelType;
import org.dataflowanalysis.dfd.datadictionary.datadictionaryFactory;
-import org.dataflowanalysis.dfd.dataflowdiagram.DataFlowDiagram;
import org.dataflowanalysis.dfd.dataflowdiagram.Node;
+/**
+ * Manages the transformation from GDPR to DFD that is required to find
+ * {@link org.dataflowanalysis.analysis.core.AbstractTransposeFlowGraph} for the analysis.
+ */
public class TransformationManager {
private final Logger logger = Logger.getLogger(TransformationManager.class);
- private final Map> relatedElementMapping;
- private final Map gdprToDFDMapping;
private final Map dfdToGDPRMapping;
private final List contextDependentAttributes;
+ /**
+ * Creates a new empty {@link TransformationManager}
+ */
public TransformationManager() {
- this.relatedElementMapping = new HashMap<>();
- this.gdprToDFDMapping = new HashMap<>();
this.dfdToGDPRMapping = new HashMap<>();
this.contextDependentAttributes = new ArrayList<>();
}
/**
- * Converts model to DFD and saves tracemodel
- * @param gdprModel
- * @return
+ * Converts model to DFD and saves trace model
+ * @param gdprModel Input GDPR Model
+ * @param scopeDependentAssessmentFacts Input context property model
+ * @return Returns the data flow diagram and data dictionary of the converted model
*/
- public DataFlowDiagramAndDataDictionary transform(LegalAssessmentFacts gdprModel, ContextDependentProperties contextDependentProperties) {
+ public DataFlowDiagramAndDataDictionary transform(LegalAssessmentFacts gdprModel, ScopeDependentAssessmentFacts scopeDependentAssessmentFacts) {
GDPR2DFD converter = new GDPR2DFD(gdprModel);
converter.transform();
- processTransformation(converter.getDataFlowDiagram(), converter.getDataDictionary(), gdprModel);
- processContextDependentAttributes(contextDependentProperties, converter.getDataDictionary());
+ this.processTraceModel(converter.getGDPR2DFDTrace());
+ this.generateAssessmentFactLabels(scopeDependentAssessmentFacts, converter.getDataDictionary());
+ this.processContextDependentAttributes(scopeDependentAssessmentFacts);
return new DataFlowDiagramAndDataDictionary(converter.getDataFlowDiagram(), converter.getDataDictionary());
}
- private void processTransformation(DataFlowDiagram dfd, DataDictionary dd, LegalAssessmentFacts gdprModel) {
- List nodes = dfd.getNodes();
- for (Node node : nodes) {
- Processing gdprElement = gdprModel.getProcessing()
- .stream()
- .filter(it -> it.getId()
- .equals(node.getId()))
- .findAny()
- .orElseThrow();
- this.addMapping(gdprElement, node);
+ /**
+ * Uses the information from the trace model to generate important mappings used in the analysis
+ * @param traceModel Produced trace model from the transformation
+ */
+ private void processTraceModel(TraceModel traceModel) {
+ for (NodeTrace nodeTrace : traceModel.getNodeTraces()) {
+ this.addMapping(nodeTrace.getGdprProcessing(), nodeTrace.getDfdNode());
}
}
- private void processContextDependentAttributes(ContextDependentProperties propertyModel, DataDictionary dd) {
- for (Property property : propertyModel.getProperty()) {
- LabelType type = datadictionaryFactory.eINSTANCE.createLabelType();
- type.setEntityName(property.getEntityName());
- type.setId(property.getId());
- dd.getLabelTypes()
- .add(type);
- for (PropertyValue propertyValue : property.getPropertyvalue()) {
- Label label = datadictionaryFactory.eINSTANCE.createLabel();
- label.setEntityName(propertyValue.getEntityName());
- label.setId(propertyValue.getId());
- type.getLabel()
- .add(label);
- }
- }
- for (PropertyAnnotation propertyAnnotation : propertyModel.getPropertyannotation()) {
- if (propertyAnnotation.getContextannotation()
+ /**
+ * Creates the {@link ContextDependentAttributeSource}s and {@link ContextDependentAttributeScenario} for the context
+ * property model
+ * @param scopeDependentAssessmentFacts Context Property Model of the transformation
+ */
+ private void processContextDependentAttributes(ScopeDependentAssessmentFacts scopeDependentAssessmentFacts) {
+ for (SAFAnnotation safAnnotation : scopeDependentAssessmentFacts.getSafAnnotation()) {
+ if (safAnnotation.getScopeSet()
.isEmpty()) {
- this.contextDependentAttributes.add(new ContextDependentAttributeSource(propertyAnnotation, propertyAnnotation.getProperty()
- .getPropertyvalue(), List.of()));
+ this.contextDependentAttributes.add(new ContextDependentAttributeSource(safAnnotation, safAnnotation.getScopeDependentAssessmentFact()
+ .getExpression(), List.of()));
} else {
List sources = new ArrayList<>();
- propertyAnnotation.getContextannotation()
- .stream()
+ safAnnotation.getScopeSet()
.forEach(it -> {
- var source = new ContextDependentAttributeSource(propertyAnnotation, it);
+ var source = new ContextDependentAttributeSource(safAnnotation, it);
sources.add(source);
this.contextDependentAttributes.add(source);
});
- this.contextDependentAttributes.add(new ContextDependentAttributeSource(propertyAnnotation, propertyAnnotation.getProperty()
- .getPropertyvalue(), sources));
+ this.contextDependentAttributes.add(new ContextDependentAttributeSource(safAnnotation, safAnnotation.getScopeDependentAssessmentFact()
+ .getExpression(), sources));
}
}
logger.info("Parsed " + this.contextDependentAttributes.size() + " CDA!");
}
- private void addAssignments(DataFlowDiagram dfd, DataDictionary dd) {
- for (Node node : dfd.getNodes()) {
- Processing gdprElement = this.getElement(node)
- .orElseThrow();
- if (!(gdprElement instanceof Storing) && !(gdprElement instanceof Collecting)) {
- ForwardingAssignment assignment = datadictionaryFactory.eINSTANCE.createForwardingAssignment();
- assignment.getInputPins()
- .addAll(node.getBehavior()
- .getInPin());
- if (!node.getBehavior()
- .getOutPin()
- .isEmpty()) {
- assignment.setOutputPin(node.getBehavior()
- .getOutPin()
- .get(0));
- }
- node.getBehavior()
- .getAssignment()
- .add(assignment);
+ /**
+ * Generate the required labels for the given {@link ScopeDependentAssessmentFacts} in the given {@link DataDictionary}
+ * @param scopeDependentAssessmentFacts Scope Dependent Assessment Facts used in determining the required label
+ * @param dataDictionary Destination data dictionary into which the labels are created
+ */
+ private void generateAssessmentFactLabels(ScopeDependentAssessmentFacts scopeDependentAssessmentFacts, DataDictionary dataDictionary) {
+ for (ScopeDependentAssessmentFact scopeDependentAssessmentFact : scopeDependentAssessmentFacts.getScopeDependentAssessmentFact()) {
+ LabelType type = datadictionaryFactory.eINSTANCE.createLabelType();
+ type.setEntityName(scopeDependentAssessmentFact.getEntityName());
+ type.setId(scopeDependentAssessmentFact.getId());
+ dataDictionary.getLabelTypes()
+ .add(type);
+ for (Expression expression : scopeDependentAssessmentFact.getExpression()) {
+ Label label = datadictionaryFactory.eINSTANCE.createLabel();
+ label.setEntityName(expression.getEntityName());
+ label.setId(expression.getId());
+ type.getLabel()
+ .add(label);
}
}
}
+ /**
+ * Adds a new mapping between the given GDPR {@link Processing} element an the DFD {@link Node}
+ * @param gdprElement Given {@link Processing} element
+ * @param dfdElement Given {@link Node} element
+ */
private void addMapping(Processing gdprElement, Node dfdElement) {
- this.gdprToDFDMapping.put(gdprElement, dfdElement);
this.dfdToGDPRMapping.put(dfdElement, gdprElement);
}
+ /**
+ * Returns the GDPR {@link Processing} element that corresponds to the given node
+ * @param node Given DFD {@link Node}
+ * @return Returns the {@link Processing} element, if one exits
+ */
public Optional getElement(Node node) {
return Optional.ofNullable(this.dfdToGDPRMapping.get(node));
}
- public Optional getElement(Processing gdprElement) {
- return Optional.ofNullable(this.gdprToDFDMapping.get(gdprElement));
- }
-
+ /**
+ * Returns the list of {@link ContextDependentAttributeSource} that were parsed by the transformation
+ * @return Returns the list of parsed context dependent attributes from the metamodel instance
+ */
public List getContextDependentAttributes() {
return this.contextDependentAttributes;
}
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/resource/GDPRResourceProvider.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/resource/GDPRResourceProvider.java
index 7da908e..e43ee15 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/resource/GDPRResourceProvider.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/resource/GDPRResourceProvider.java
@@ -1,24 +1,53 @@
package mdpa.gdpr.analysis.core.resource;
import mdpa.gdpr.analysis.core.TransformationManager;
+import mdpa.gdpr.metamodel.GDPR.GDPRPackage;
import mdpa.gdpr.metamodel.GDPR.LegalAssessmentFacts;
-import mdpa.gdpr.metamodel.contextproperties.ContextDependentProperties;
+import mdpa.gdpr.metamodel.contextproperties.ContextpropertiesPackage;
+import mdpa.gdpr.metamodel.contextproperties.ScopeDependentAssessmentFacts;
import org.dataflowanalysis.analysis.resource.ResourceProvider;
+import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl;
+/**
+ * A {@link ResourceProvider} providing the necessary resources to run a
+ * {@link mdpa.gdpr.analysis.GDPRLegalAssessmentAnalysis}
+ */
public abstract class GDPRResourceProvider extends ResourceProvider {
@Override
public void setupResources() {
-
+ this.resources.getPackageRegistry()
+ .put(GDPRPackage.eNS_URI, GDPRPackage.eINSTANCE);
+ this.resources.getResourceFactoryRegistry()
+ .getExtensionToFactoryMap()
+ .put(GDPRPackage.eNAME, new XMIResourceFactoryImpl());
+ this.resources.getPackageRegistry()
+ .put(ContextpropertiesPackage.eNS_URI, ContextpropertiesPackage.eINSTANCE);
+ this.resources.getResourceFactoryRegistry()
+ .getExtensionToFactoryMap()
+ .put(ContextpropertiesPackage.eNAME, new XMIResourceFactoryImpl());
}
- public abstract LegalAssessmentFacts getModel();
+ /**
+ * Returns the loaded GDPR model
+ * @return Returns the GDPR model that is loaded by the resource provider
+ */
+ public abstract LegalAssessmentFacts getGDPRModel();
- public abstract ContextDependentProperties getContextDependentProperties();
+ /**
+ * Returns the {@link ScopeDependentAssessmentFacts} metamodel that is required to run a
+ * {@link mdpa.gdpr.analysis.GDPRLegalAssessmentAnalysis}
+ * @return Returns the loaded Context Property model
+ */
+ public abstract ScopeDependentAssessmentFacts getScopeDependentAssessmentFacts();
+ /**
+ * Returns the transformation manager that should be used for the transformation from gdpr to dfd
+ * @return Returns the {@link TransformationManager} of the running analysis
+ */
public abstract TransformationManager getTransformationManager();
@Override
public boolean sufficientResourcesLoaded() {
- return this.getModel() != null;
+ return this.getGDPRModel() != null;
}
}
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/resource/GDPRURIResourceProvider.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/resource/GDPRURIResourceProvider.java
index 74b493c..6f9ca7c 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/resource/GDPRURIResourceProvider.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/resource/GDPRURIResourceProvider.java
@@ -3,46 +3,40 @@
import java.util.ArrayList;
import java.util.List;
import mdpa.gdpr.analysis.core.TransformationManager;
-import mdpa.gdpr.metamodel.GDPR.GDPRPackage;
import mdpa.gdpr.metamodel.GDPR.LegalAssessmentFacts;
-import mdpa.gdpr.metamodel.contextproperties.ContextDependentProperties;
-import mdpa.gdpr.metamodel.contextproperties.ContextpropertiesPackage;
+import mdpa.gdpr.metamodel.contextproperties.ScopeDependentAssessmentFacts;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.util.EcoreUtil;
-import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl;
+/**
+ * Implementation of an {@link GDPRResourceProvider} using modeling project URIs to load the required models
+ */
public class GDPRURIResourceProvider extends GDPRResourceProvider {
private final URI modelURI;
private final URI propertyURI;
private LegalAssessmentFacts model;
- private ContextDependentProperties contextDependentProperties;
+ private ScopeDependentAssessmentFacts scopeDependentAssessmentFacts;
private final TransformationManager transformationManager;
+ /**
+ * Creates a new {@link GDPRURIResourceProvider} using the provided URIs to the models
+ *
+ * Usually, the resource provider will be created automatically when using
+ * {@link mdpa.gdpr.analysis.GDPRLegalAssessmentAnalysisBuilder}
+ * @param modelURI URI path to the GDPR model
+ * @param propertyURI URI path to the context property model
+ */
public GDPRURIResourceProvider(URI modelURI, URI propertyURI) {
this.modelURI = modelURI;
this.propertyURI = propertyURI;
this.transformationManager = new TransformationManager();
}
- @Override
- public void setupResources() {
- this.resources.getPackageRegistry()
- .put(GDPRPackage.eNS_URI, GDPRPackage.eINSTANCE);
- this.resources.getResourceFactoryRegistry()
- .getExtensionToFactoryMap()
- .put(GDPRPackage.eNAME, new XMIResourceFactoryImpl());
- this.resources.getPackageRegistry()
- .put(ContextpropertiesPackage.eNS_URI, ContextpropertiesPackage.eINSTANCE);
- this.resources.getResourceFactoryRegistry()
- .getExtensionToFactoryMap()
- .put(ContextpropertiesPackage.eNAME, new XMIResourceFactoryImpl());
- }
-
@Override
public void loadRequiredResources() {
this.model = (LegalAssessmentFacts) this.loadModelContent(modelURI);
- this.contextDependentProperties = (ContextDependentProperties) this.loadModelContent(propertyURI);
+ this.scopeDependentAssessmentFacts = (ScopeDependentAssessmentFacts) this.loadModelContent(propertyURI);
List loadedResources;
do {
loadedResources = new ArrayList<>(this.resources.getResources());
@@ -52,13 +46,13 @@ public void loadRequiredResources() {
}
@Override
- public LegalAssessmentFacts getModel() {
+ public LegalAssessmentFacts getGDPRModel() {
return this.model;
}
@Override
- public ContextDependentProperties getContextDependentProperties() {
- return this.contextDependentProperties;
+ public ScopeDependentAssessmentFacts getScopeDependentAssessmentFacts() {
+ return this.scopeDependentAssessmentFacts;
}
@Override
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRFlowGraphCollection.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRFlowGraphCollection.java
index add84be..2111e23 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRFlowGraphCollection.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRFlowGraphCollection.java
@@ -9,12 +9,9 @@
import mdpa.gdpr.analysis.core.ContextDependentAttributeSource;
import mdpa.gdpr.analysis.core.resource.GDPRResourceProvider;
import mdpa.gdpr.metamodel.GDPR.AbstractGDPRElement;
-import mdpa.gdpr.metamodel.GDPR.Collecting;
import mdpa.gdpr.metamodel.GDPR.PersonalData;
+import mdpa.gdpr.metamodel.GDPR.Processing;
import mdpa.gdpr.metamodel.GDPR.Role;
-import mdpa.gdpr.metamodel.GDPR.Storing;
-import mdpa.gdpr.metamodel.GDPR.Transferring;
-import mdpa.gdpr.metamodel.GDPR.Usage;
import org.apache.log4j.Logger;
import org.dataflowanalysis.analysis.core.AbstractTransposeFlowGraph;
import org.dataflowanalysis.analysis.core.FlowGraphCollection;
@@ -25,13 +22,28 @@
import org.dataflowanalysis.dfd.datadictionary.DataDictionary;
import org.dataflowanalysis.dfd.datadictionary.Pin;
+/**
+ * Models a collection of {@link DFDGDPRTransposeFlowGraph}s for use with the
+ * {@link mdpa.gdpr.analysis.GDPRLegalAssessmentAnalysis}
+ */
public class DFDGDPRFlowGraphCollection extends FlowGraphCollection {
private final Logger logger = Logger.getLogger(DFDGDPRFlowGraphCollection.class);
+ private DataDictionary dataDictionary;
+ /**
+ * Creates a new {@link DFDGDPRFlowGraphCollection} using the provided resource provider
+ * @param resourceProvider {@link ResourceProvider} that provides the necessary models
+ */
public DFDGDPRFlowGraphCollection(ResourceProvider resourceProvider) {
super(resourceProvider);
}
+ /**
+ * Creates a new {@link DFDGDPRFlowGraphCollection} with the given list of transpose flow graphs and a given resource
+ * provider
+ * @param transposeFlowGraphs List of {@link DFDGDPRTransposeFlowGraph}s that are stored in the flow graph collection
+ * @param resourceProvider {@link ResourceProvider} that has the relevant model elements loaded
+ */
public DFDGDPRFlowGraphCollection(List transposeFlowGraphs, ResourceProvider resourceProvider) {
super(transposeFlowGraphs, resourceProvider);
}
@@ -43,31 +55,58 @@ public List extends AbstractTransposeFlowGraph> findTransposeFlowGraphs() {
throw new IllegalArgumentException();
}
DataFlowDiagramAndDataDictionary dfd = gdprResourceProvider.getTransformationManager()
- .transform(gdprResourceProvider.getModel(), gdprResourceProvider.getContextDependentProperties());
+ .transform(gdprResourceProvider.getGDPRModel(), gdprResourceProvider.getScopeDependentAssessmentFacts());
+ this.dataDictionary = dfd.dataDictionary();
DFDTransposeFlowGraphFinder finder = new DFDTransposeFlowGraphFinder(dfd.dataDictionary(), dfd.dataFlowDiagram());
List completeFlowGraphs = finder.findTransposeFlowGraphs()
.stream()
.map(it -> this.transformFlowGraph((DFDTransposeFlowGraph) it, dfd.dataDictionary()))
.toList();
- List result = new ArrayList<>(completeFlowGraphs);
- /*
- * result.addAll(completeFlowGraphs.stream() .map(it -> this.getPartialTransposeFlowGraphs(it, dfd.dataDictionary()))
- * .flatMap(List::stream) .toList());
- */
- return result;
+ return new ArrayList<>(completeFlowGraphs);
+ }
+
+ /**
+ * Finds the partial responsibility flow graphs for the contained flow graphs and creates a new
+ * {@link DFDGDPRFlowGraphCollection} containing the previous {@link DFDGDPRTransposeFlowGraph} with the additional
+ * partial responsibility flow graphs
+ * @return Returns a new {@link DFDGDPRFlowGraphCollection} containing additional partial responsibility flow graphs
+ */
+ public DFDGDPRFlowGraphCollection findResponsibilityFlowGraphs() {
+ List completeFlowGraphs = this.getTransposeFlowGraphs()
+ .stream()
+ .filter(DFDGDPRTransposeFlowGraph.class::isInstance)
+ .map(DFDGDPRTransposeFlowGraph.class::cast)
+ .toList();
+ List flowGraphs = new ArrayList<>(completeFlowGraphs);
+ flowGraphs.addAll(completeFlowGraphs.stream()
+ .map(this::getPartialTransposeFlowGraphs)
+ .flatMap(List::stream)
+ .toList());
+ return new DFDGDPRFlowGraphCollection(flowGraphs, this.resourceProvider);
}
- private DFDGDPRTransposeFlowGraph transformFlowGraph(DFDTransposeFlowGraph transposeFlowGraph, DataDictionary dd) {
+ /**
+ * Transforms the given DFD-based {@link DFDTransposeFlowGraph} to an {@link DFDGDPRTransposeFlowGraph}
+ * @param transposeFlowGraph Given {@link DFDTransposeFlowGraph}
+ * @param dataDictionary Data dictionary containing the labels for CDAs
+ * @return Returns the corresponding {@link DFDGDPRTransposeFlowGraph}
+ */
+ private DFDGDPRTransposeFlowGraph transformFlowGraph(DFDTransposeFlowGraph transposeFlowGraph, DataDictionary dataDictionary) {
Map mapping = new IdentityHashMap<>();
transposeFlowGraph.getVertices()
.stream()
.map(DFDVertex.class::cast)
.forEach(vertex -> mapping.put(vertex, this.getDFDGDPRVertex(vertex, new IdentityHashMap<>())));
return new DFDGDPRTransposeFlowGraph(mapping.get((DFDVertex) transposeFlowGraph.getSink()),
- this.determineContextDependentAttributes(transposeFlowGraph, mapping.values()), dd);
+ this.determineContextDependentAttributes(mapping.values()), dataDictionary);
}
- private List getPartialTransposeFlowGraphs(DFDGDPRTransposeFlowGraph transposeFlowGraph, DataDictionary dd) {
+ /**
+ * Determines the list of partial responsibility flow graphs for the given {@link DFDGDPRTransposeFlowGraph}
+ * @param transposeFlowGraph Given {@link DFDGDPRTransposeFlowGraph}
+ * @return Returns the list of responsibility transpose flow graphs for the input {@link DFDGDPRTransposeFlowGraph}
+ */
+ private List getPartialTransposeFlowGraphs(DFDGDPRTransposeFlowGraph transposeFlowGraph) {
List result = new ArrayList<>();
Map> roleMap = new HashMap<>();
transposeFlowGraph.getVertices()
@@ -96,27 +135,37 @@ private List getPartialTransposeFlowGraphs(DFDGDPRTra
.filter(it -> !previousElements.contains(it))
.toList();
for (DFDGDPRVertex sink : sinks) {
- Map mapping = new HashMap<>();
- result.add(new DFDGDPRTransposeFlowGraph(this.getMappingForSink(sink, roleVertices, mapping),
- new ArrayList<>(transposeFlowGraph.getContextDependentAttributeSources()), dd));
+ result.add(new DFDGDPRTransposeFlowGraph(this.getMappingForSink(sink, roleVertices),
+ new ArrayList<>(transposeFlowGraph.getContextDependentAttributeSources()), this.dataDictionary));
}
}
return result;
}
- private DFDGDPRVertex getMappingForSink(DFDGDPRVertex sink, List roleVertices, Map mapping) {
+ /**
+ * Determines the {@link DFDGDPRVertex} that denotes the sink of the partial responsibility transpose flow graph
+ * @param sink {@link DFDGDPRVertex} that is the current sink
+ * @param roleVertices List of {@link DFDGDPRVertex} that denote the beginnings of each responsibility segment
+ * @return Returns the {@link DFDGDPRVertex} sink of the partial responsibility TFG
+ */
+ private DFDGDPRVertex getMappingForSink(DFDGDPRVertex sink, List roleVertices) {
Map pinVertexMap = new HashMap<>();
sink.getPinDFDVertexMap()
.entrySet()
.stream()
+ .filter(it -> it.getValue() instanceof DFDGDPRVertex)
.filter(it -> roleVertices.contains(it.getValue()))
- .forEach(it -> {
- pinVertexMap.put(it.getKey(), this.getMappingForSink((DFDGDPRVertex) it.getValue(), roleVertices, mapping));
- });
+ .forEach(it -> pinVertexMap.put(it.getKey(), this.getMappingForSink((DFDGDPRVertex) it.getValue(), roleVertices)));
return new DFDGDPRVertex(sink.getReferencedElement(), pinVertexMap, new HashMap<>(sink.getPinFlowMap()),
new ArrayList<>(sink.getRelatedElements()));
}
+ /**
+ * Creates the {@link DFDGDPRVertex} for a corresponding {@link DFDVertex} and a given mapping
+ * @param vertex Given {@link DFDVertex}
+ * @param mapping Mapping between {@link DFDVertex}
+ * @return Returns the corresponding {@link DFDGDPRVertex} of the {@link DFDVertex}
+ */
private DFDGDPRVertex getDFDGDPRVertex(DFDVertex vertex, Map mapping) {
if (!(this.resourceProvider instanceof GDPRResourceProvider gdprResourceProvider)) {
this.logger.error("Resource provider is not a GDPR resource provider!");
@@ -136,96 +185,45 @@ private DFDGDPRVertex getDFDGDPRVertex(DFDVertex vertex, Map(vertex.getPinFlowMap()), relatedElements);
}
+ /**
+ * Determines the relevant elements for the given {@link AbstractGDPRElement}
+ * @param gdprElement Given {@link AbstractGDPRElement}
+ * @return Related elements for the given {@link AbstractGDPRElement}
+ */
private List determineRelatedElements(AbstractGDPRElement gdprElement) {
List result = new ArrayList<>();
result.add(gdprElement);
- if (gdprElement instanceof Collecting collecting) {
- result.addAll(collecting.getInputData());
- result.addAll(collecting.getInputData()
- .stream()
- .filter(PersonalData.class::isInstance)
- .map(PersonalData.class::cast)
- .map(PersonalData::getDataReferences)
- .flatMap(List::stream)
- .toList());
- result.addAll(collecting.getOutputData());
- result.addAll(collecting.getOutputData()
- .stream()
- .filter(PersonalData.class::isInstance)
- .map(PersonalData.class::cast)
- .map(PersonalData::getDataReferences)
- .flatMap(List::stream)
- .toList());
- result.addAll(collecting.getPurpose());
- result.addAll(collecting.getOnTheBasisOf());
- result.add(collecting.getResponsible());
- } else if (gdprElement instanceof Usage usage) {
- result.addAll(usage.getInputData());
- result.addAll(usage.getInputData()
- .stream()
- .filter(PersonalData.class::isInstance)
- .map(PersonalData.class::cast)
- .map(PersonalData::getDataReferences)
- .flatMap(List::stream)
- .toList());
- result.addAll(usage.getOutputData());
- result.addAll(usage.getOutputData()
- .stream()
- .filter(PersonalData.class::isInstance)
- .map(PersonalData.class::cast)
- .map(PersonalData::getDataReferences)
- .flatMap(List::stream)
- .toList());
- result.addAll(usage.getPurpose());
- result.addAll(usage.getOnTheBasisOf());
- result.add(usage.getResponsible());
- } else if (gdprElement instanceof Transferring transferring) {
- result.addAll(transferring.getInputData());
- result.addAll(transferring.getInputData()
- .stream()
- .filter(PersonalData.class::isInstance)
- .map(PersonalData.class::cast)
- .map(PersonalData::getDataReferences)
- .flatMap(List::stream)
- .toList());
- result.addAll(transferring.getOutputData());
- result.addAll(transferring.getOutputData()
- .stream()
- .filter(PersonalData.class::isInstance)
- .map(PersonalData.class::cast)
- .map(PersonalData::getDataReferences)
- .flatMap(List::stream)
- .toList());
- result.addAll(transferring.getPurpose());
- result.addAll(transferring.getOnTheBasisOf());
- result.add(transferring.getResponsible());
-
- } else if (gdprElement instanceof Storing storing) {
- result.addAll(storing.getInputData());
- result.addAll(storing.getInputData()
+ if (gdprElement instanceof Processing processing) {
+ result.addAll(processing.getInputData());
+ result.addAll(processing.getInputData()
.stream()
.filter(PersonalData.class::isInstance)
.map(PersonalData.class::cast)
.map(PersonalData::getDataReferences)
.flatMap(List::stream)
.toList());
- result.addAll(storing.getOutputData());
- result.addAll(storing.getOutputData()
+ result.addAll(processing.getOutputData());
+ result.addAll(processing.getOutputData()
.stream()
.filter(PersonalData.class::isInstance)
.map(PersonalData.class::cast)
.map(PersonalData::getDataReferences)
.flatMap(List::stream)
.toList());
- result.addAll(storing.getPurpose());
- result.addAll(storing.getOnTheBasisOf());
- result.add(storing.getResponsible());
+ result.addAll(processing.getPurpose());
+ result.addAll(processing.getOnTheBasisOf());
+ result.add(processing.getResponsible());
}
return result;
}
- private List determineContextDependentAttributes(DFDTransposeFlowGraph transposeFlowGraph,
- Collection vertices) {
+ /**
+ * Determines the context dependent attribute sources for the given collection of vertices using the
+ * {@link mdpa.gdpr.analysis.core.TransformationManager}
+ * @param vertices List of vertices of which the {@link ContextDependentAttributeSource}s should be determined
+ * @return List of {@link ContextDependentAttributeSource} that are applicable to the given list of vertices
+ */
+ private List determineContextDependentAttributes(Collection vertices) {
if (!(this.resourceProvider instanceof GDPRResourceProvider gdprResourceProvider)) {
this.logger.error("Resource provider is not a GDPR resource provider!");
throw new IllegalArgumentException();
@@ -237,6 +235,12 @@ private List determineContextDependentAttribute
.toList();
}
+ /**
+ * Returns the flow graph collection that contains the final flow graph with resolved context dependent attributes.
+ *
+ * Note: This will create a new instance of the {@link DFDGDPRFlowGraphCollection}
+ * @return Returns a new {@link DFDGDPRFlowGraphCollection} containing the resolved {@link DFDGDPRTransposeFlowGraph}s
+ */
public DFDGDPRFlowGraphCollection resolveContextDependentAttributes() {
List resultingTransposeFlowGraphs = this.getTransposeFlowGraphs()
.stream()
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRTransposeFlowGraph.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRTransposeFlowGraph.java
index 40e6232..9299e28 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRTransposeFlowGraph.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRTransposeFlowGraph.java
@@ -7,6 +7,9 @@
import java.util.List;
import java.util.Map;
import java.util.Optional;
+import java.util.stream.Stream;
+
+import mdpa.gdpr.analysis.DFDUtils;
import mdpa.gdpr.analysis.UncertaintyUtils;
import mdpa.gdpr.analysis.core.ContextAttributeState;
import mdpa.gdpr.analysis.core.ContextDependentAttributeScenario;
@@ -14,16 +17,14 @@
import mdpa.gdpr.metamodel.GDPR.Data;
import mdpa.gdpr.metamodel.GDPR.NaturalPerson;
import mdpa.gdpr.metamodel.GDPR.PersonalData;
-import mdpa.gdpr.metamodel.contextproperties.PropertyValue;
+import mdpa.gdpr.metamodel.contextproperties.Expression;
import org.apache.log4j.Logger;
import org.dataflowanalysis.analysis.core.AbstractTransposeFlowGraph;
import org.dataflowanalysis.analysis.core.AbstractVertex;
import org.dataflowanalysis.analysis.dfd.core.DFDTransposeFlowGraph;
import org.dataflowanalysis.analysis.dfd.core.DFDVertex;
-import org.dataflowanalysis.dfd.datadictionary.Behavior;
import org.dataflowanalysis.dfd.datadictionary.DataDictionary;
import org.dataflowanalysis.dfd.datadictionary.Label;
-import org.dataflowanalysis.dfd.datadictionary.LabelType;
import org.dataflowanalysis.dfd.datadictionary.Pin;
import org.dataflowanalysis.dfd.dataflowdiagram.Node;
import org.eclipse.emf.ecore.util.EcoreUtil;
@@ -31,19 +32,21 @@
public class DFDGDPRTransposeFlowGraph extends DFDTransposeFlowGraph {
private final Logger logger = Logger.getLogger(DFDGDPRTransposeFlowGraph.class);
private final List relevantContextDependentAttributes;
- private final DataDictionary dd;
+ private final DataDictionary dataDictionary;
private final Optional contextAttributeState;
+ private final DFDUtils DFDUtils = new DFDUtils();
/**
* Creates a new dfd transpose flow graph with the given sink that induces the transpose flow graph
* @param sink Sink vertex that induces the transpose flow graph
*/
- public DFDGDPRTransposeFlowGraph(AbstractVertex> sink, List contextDependentAttributes, DataDictionary dd) {
+ public DFDGDPRTransposeFlowGraph(AbstractVertex> sink, List contextDependentAttributes,
+ DataDictionary dataDictionary) {
super(sink);
this.relevantContextDependentAttributes = contextDependentAttributes;
this.contextAttributeState = Optional.empty();
- this.dd = dd;
+ this.dataDictionary = dataDictionary;
}
/**
@@ -51,203 +54,235 @@ public DFDGDPRTransposeFlowGraph(AbstractVertex> sink, List sink, List contextDependentAttributes,
- ContextAttributeState contextAttributeState, DataDictionary dd) {
+ ContextAttributeState contextAttributeState, DataDictionary dataDictionary) {
super(sink);
this.relevantContextDependentAttributes = contextDependentAttributes;
this.contextAttributeState = Optional.of(contextAttributeState);
- this.dd = dd;
+ this.dataDictionary = dataDictionary;
}
+ /**
+ * Determine the alternate flow graphs that are caused due to the resolving of CDAs
+ * @return Returns a list of transpose flow graphs that each handle different applications of CDAs for this TFG
+ */
public List determineAlternateFlowGraphs() {
List result = new ArrayList<>();
- List states = ContextAttributeState.createAllContextAttributeStates(this.relevantContextDependentAttributes);
- Map, List> unmatchedStates = new HashMap<>();
+ List states = new ArrayList<>(ContextAttributeState.createAllContextAttributeStates(this.relevantContextDependentAttributes));
for (ContextAttributeState state : states) {
- if (state.getSelectedScenarios()
- .stream()
- .noneMatch(it -> it.applicable(this))) {
- logger.warn("State not applicable to transpose flow graph, skipping");
- continue;
- }
- DFDGDPRTransposeFlowGraph currentTransposeFlowGraph = (DFDGDPRTransposeFlowGraph) this.copy(new HashMap<>(), state);
+ result.addAll(this.determineAlternateFlowGraphForState(state));
+ }
+ return result;
+ }
- for (ContextDependentAttributeScenario scenario : state.getSelectedScenarios()) {
- ContextDependentAttributeSource source = scenario.getContextDependentAttributeSource();
- Optional matchingVertex = currentTransposeFlowGraph.getVertices()
- .stream()
- .filter(DFDGDPRVertex.class::isInstance)
- .map(DFDGDPRVertex.class::cast)
- .filter(source::applicable)
- .findFirst();
- if (matchingVertex.isEmpty()) {
- logger.warn("Could not find matching vertex for context dependent attribute");
- continue;
- }
- if (!scenario.applicable(matchingVertex.get())) {
- // Scenario must not be resolved by uncertainty
- logger.warn("Scenario not applicable to vertex!");
- continue;
- }
+ /**
+ * Recursively handles the alternate flow graphs for a given state
+ * @param state Given state of context dependent attributes
+ * @return Returns a list of all alternate flow graphs for the given state
+ */
+ public List determineAlternateFlowGraphForState(ContextAttributeState state) {
+ List result = new ArrayList<>();
+ if (state.selectedScenarios()
+ .stream()
+ .noneMatch(it -> it.applicable(this))) {
+ logger.warn("State not applicable to transpose flow graph, skipping");
+ return List.of();
+ }
+ DFDGDPRTransposeFlowGraph currentTransposeFlowGraph = (DFDGDPRTransposeFlowGraph) this.copy(new HashMap<>(), state);
- if (source.getAnnotatedElement() instanceof NaturalPerson person) {
- // Insert Data Characteristic
- List targetedVertices = currentTransposeFlowGraph.getVertices()
- .stream()
- .filter(DFDGDPRVertex.class::isInstance)
- .map(DFDGDPRVertex.class::cast)
- .filter(scenario::applicable)
- .toList();
- List finalTargetedVertices = targetedVertices;
- targetedVertices = targetedVertices.stream()
- .filter(it -> UncertaintyUtils.shouldReapply(finalTargetedVertices, it))
- .toList();
- logger.info("Applying state to vertices: " + targetedVertices.toString());
+ for (ContextDependentAttributeScenario scenario : state.selectedScenarios()) {
+ var scenarioResult = this.handleScenario(scenario, currentTransposeFlowGraph, state);
+ if (scenarioResult.states().isEmpty()) {
+ currentTransposeFlowGraph = scenarioResult.transposeFlowGraph().orElse(currentTransposeFlowGraph);
+ } else {
+ return scenarioResult.states().stream().map(this::determineAlternateFlowGraphForState).flatMap(List::stream).toList();
+ }
+ }
+ return List.of(currentTransposeFlowGraph);
+ }
- for (DFDGDPRVertex targetVertex : targetedVertices) {
- DFDGDPRVertex currentTargetVertex = currentTransposeFlowGraph.getVertices()
- .stream()
- .filter(DFDGDPRVertex.class::isInstance)
- .map(DFDGDPRVertex.class::cast)
- .filter(it -> it.getReferencedElement()
- .getId()
- .equals(targetVertex.getReferencedElement()
- .getId()))
- .filter(it -> it.getReferencedElement()
- .getEntityName()
- .equals(targetVertex.getReferencedElement()
- .getEntityName()))
- .findAny()
- .orElseThrow();
- DFDGDPRVertex impactedElement = currentTargetVertex.getPreviousElements()
- .stream()
- .filter(DFDGDPRVertex.class::isInstance)
- .map(DFDGDPRVertex.class::cast)
- .filter(it -> {
- return it.getOutgoingData()
- .stream()
- .filter(PersonalData.class::isInstance)
- .map(PersonalData.class::cast)
- .anyMatch(data -> data.getDataReferences()
- .contains(person));
- })
- .findAny()
- .orElse(currentTargetVertex);
- Behavior replacingBehavior = UncertaintyUtils.createBehavior(impactedElement, dd, source, scenario, person);
- Node replacingNode = EcoreUtil.copy(impactedElement.getReferencedElement());
- replacingNode.setBehavior(replacingBehavior);
- DFDGDPRVertex replacingVertex = this.copyVertex(impactedElement, replacingNode);
- List scenarios = new ArrayList<>(impactedElement.getContextDependentAttributes());
- scenarios.add(scenario);
- replacingVertex.setContextDependentAttributes(scenarios);
- Map mapping = new HashMap<>();
- mapping.put(impactedElement, replacingVertex);
- currentTransposeFlowGraph = (DFDGDPRTransposeFlowGraph) currentTransposeFlowGraph.copy(mapping, state);
- }
+ /**
+ * Handles the given {@link ContextDependentAttributeScenario} scenario using the given current transpose flow graph.
+ * Additionally, each newly created transpose flow graph gets the given state.
+ * @param scenario {@link ContextDependentAttributeScenario} that is applied to the given transpose flow graph
+ * @param currentTransposeFlowGraph Transpose flow graph that is modified
+ * @param state {@link ContextAttributeState} that the resulting transpose flow graphs should have
+ * @return Returns a {@link ScenarioResult} either containing new states to be explored, or an alternate flow graph with the applied scenario
+ */
+ private ScenarioResult handleScenario(ContextDependentAttributeScenario scenario,
+ DFDGDPRTransposeFlowGraph currentTransposeFlowGraph, ContextAttributeState state) {
+ ContextDependentAttributeSource source = scenario.getContextDependentAttributeSource();
+ Optional matchingVertex = currentTransposeFlowGraph.getVertices()
+ .stream()
+ .filter(DFDGDPRVertex.class::isInstance)
+ .map(DFDGDPRVertex.class::cast)
+ .filter(source::applicable)
+ .findFirst();
+ if (matchingVertex.isEmpty()) {
+ logger.warn("Could not find matching vertex for context dependent attribute");
+ return new ScenarioResult(Optional.empty(), List.of());
+ }
+ if (!scenario.applicable(matchingVertex.get())) {
+ // Scenario must not be resolved by uncertainty
+ logger.warn("Scenario not applicable to vertex!");
+ return new ScenarioResult(Optional.empty(), List.of());
+ }
- } else if (source.getAnnotatedElement() instanceof Data data) {
- // Insert Data Characteristic
- List targetedVertices = currentTransposeFlowGraph.getVertices()
- .stream()
- .filter(DFDGDPRVertex.class::isInstance)
- .map(DFDGDPRVertex.class::cast)
- .filter(scenario::applicable)
- .toList();
- List finalTargetedVertices = targetedVertices;
- targetedVertices = targetedVertices.stream()
- .filter(it -> UncertaintyUtils.shouldReapply(finalTargetedVertices, it))
- .toList();
- logger.info("Applying state to vertices: " + targetedVertices.toString());
+ if (source.getAnnotatedElement() instanceof NaturalPerson person) {
+ // Insert Data Characteristic
+ List targetedVertices = this.determineTargetedVertices(currentTransposeFlowGraph, scenario);
- for (DFDGDPRVertex targetVertex : targetedVertices) {
- DFDGDPRVertex currentTargetVertex = currentTransposeFlowGraph.getVertices()
- .stream()
- .filter(DFDGDPRVertex.class::isInstance)
- .map(DFDGDPRVertex.class::cast)
- .filter(it -> it.getReferencedElement()
- .getId()
- .equals(targetVertex.getReferencedElement()
- .getId()))
- .filter(it -> it.getReferencedElement()
- .getEntityName()
- .equals(targetVertex.getReferencedElement()
- .getEntityName()))
- .findAny()
- .orElseThrow();
- DFDGDPRVertex impactedElement = currentTargetVertex.getPreviousElements()
- .stream()
- .filter(DFDGDPRVertex.class::isInstance)
- .map(DFDGDPRVertex.class::cast)
- .filter(it -> it.getOutgoingData()
- .contains(data))
- .findAny()
- .orElse(currentTargetVertex);
- Behavior replacingBehavior = UncertaintyUtils.createBehavior(impactedElement, dd, source, scenario, data);
- Node replacingNode = EcoreUtil.copy(impactedElement.getReferencedElement());
- replacingNode.setBehavior(replacingBehavior);
- DFDGDPRVertex replacingVertex = this.copyVertex(impactedElement, replacingNode);
- List scenarios = new ArrayList<>(impactedElement.getContextDependentAttributes());
- scenarios.add(scenario);
- replacingVertex.setContextDependentAttributes(scenarios);
- Map mapping = new HashMap<>();
- mapping.put(impactedElement, replacingVertex);
- currentTransposeFlowGraph = (DFDGDPRTransposeFlowGraph) currentTransposeFlowGraph.copy(mapping, state);
+ for (DFDGDPRVertex targetVertex : targetedVertices) {
+ if (!source.applicable(targetVertex) && state.doesNotHandle(targetVertex)) {
+ List additionalStates = new ArrayList<>();
+ ContextDependentAttributeSource additionalSource = new ContextDependentAttributeSource(source.getAnnotation(), source.getScopeDependentAssessmentFact().getExpression(), List.of(source));
+ for (Expression expression : source.getScopeDependentAssessmentFact().getExpression()) {
+ ContextDependentAttributeScenario additionalScenario = new ContextDependentAttributeScenario(expression, additionalSource, List.of(source));
+ ContextAttributeState additionalState = new ContextAttributeState(Stream.concat(state.selectedScenarios().stream(), Stream.of(additionalScenario)).toList());
+ additionalStates.add(additionalState);
}
- } else {
- // Insert Node Characteristics at all matching vertices
- List matchingVertices = currentTransposeFlowGraph.getVertices()
- .stream()
- .filter(DFDGDPRVertex.class::isInstance)
- .map(DFDGDPRVertex.class::cast)
- .filter(it -> source.applicable(it))
- .filter(it -> scenario.applicable(it))
- .map(it -> it.getReferencedElement()
- .getId())
- .toList();
- for (String targetVertexID : matchingVertices) {
- DFDGDPRVertex targetVertex = currentTransposeFlowGraph.getVertices()
+ logger.warn("Explore with Uncertainty!");
+ return new ScenarioResult(Optional.empty(), additionalStates);
+ }
+ DFDGDPRVertex currentTargetVertex = currentTransposeFlowGraph.getVertices()
+ .stream()
+ .filter(DFDGDPRVertex.class::isInstance)
+ .map(DFDGDPRVertex.class::cast)
+ .filter(it -> it.getReferencedElement()
+ .getId()
+ .equals(targetVertex.getReferencedElement()
+ .getId()))
+ .filter(it -> it.getReferencedElement()
+ .getEntityName()
+ .equals(targetVertex.getReferencedElement()
+ .getEntityName()))
+ .findAny()
+ .orElseThrow();
+ DFDGDPRVertex impactedElement = currentTargetVertex.getPreviousElements()
+ .stream()
+ .filter(DFDGDPRVertex.class::isInstance)
+ .map(DFDGDPRVertex.class::cast)
+ .filter(it -> it.getOutgoingData()
.stream()
- .filter(DFDGDPRVertex.class::isInstance)
- .map(DFDGDPRVertex.class::cast)
- .filter(it -> it.getReferencedElement()
- .getId()
- .equals(targetVertexID))
- .findFirst()
- .orElseThrow();
- Node replacingNode = EcoreUtil.copy(targetVertex.getReferencedElement());
+ .filter(PersonalData.class::isInstance)
+ .map(PersonalData.class::cast)
+ .anyMatch(data -> data.getDataReferences()
+ .contains(person)))
+ .findAny()
+ .orElse(currentTargetVertex);
+ Node replacingNode = DFDUtils.copyNode(impactedElement.getReferencedElement());
+ UncertaintyUtils.impactBehavior(replacingNode, impactedElement, dataDictionary, source, scenario, person);
+ DFDGDPRVertex replacingVertex = this.copyVertex(impactedElement, replacingNode);
+ List scenarios = new ArrayList<>(impactedElement.getContextDependentAttributes());
+ scenarios.add(scenario);
+ replacingVertex.setContextDependentAttributes(scenarios);
+ Map mapping = new HashMap<>();
+ mapping.put(impactedElement, replacingVertex);
+ currentTransposeFlowGraph = (DFDGDPRTransposeFlowGraph) currentTransposeFlowGraph.copy(mapping, state);
+ }
+ return new ScenarioResult(Optional.of(currentTransposeFlowGraph), List.of());
- LabelType labelType = dd.getLabelTypes()
- .stream()
- .filter(it -> it.getEntityName()
- .equals(source.getPropertyType()
- .getEntityName()))
- .findAny()
- .orElseThrow();
- List labels = new ArrayList<>();
- for (PropertyValue propertyValue : scenario.getPropertyValues()) {
- Label label = labelType.getLabel()
- .stream()
- .filter(it -> it.getEntityName()
- .equals(propertyValue.getEntityName()))
- .findAny()
- .orElseThrow();
- labels.add(label);
- }
- replacingNode.getProperties()
- .addAll(labels);
+ } else if (source.getAnnotatedElement() instanceof Data data) {
+ // Insert Data Characteristic
+ List targetedVertices = this.determineTargetedVertices(currentTransposeFlowGraph, scenario);
- DFDGDPRVertex replacingVertex = this.copyVertex(targetVertex, replacingNode);
- List scenarios = new ArrayList<>(targetVertex.getContextDependentAttributes());
- scenarios.add(scenario);
- replacingVertex.setContextDependentAttributes(scenarios);
- Map mapping = new HashMap<>();
- mapping.put(targetVertex, replacingVertex);
- currentTransposeFlowGraph = (DFDGDPRTransposeFlowGraph) currentTransposeFlowGraph.copy(mapping, state);
- }
+ for (DFDGDPRVertex targetVertex : targetedVertices) {
+ if (!source.applicable(targetVertex) && state.doesNotHandle(targetVertex)) {
+ List additionalStates = new ArrayList<>();
+ ContextDependentAttributeSource additionalSource = new ContextDependentAttributeSource(source.getAnnotation(), source.getScopeDependentAssessmentFact().getExpression(), List.of(source));
+ for (Expression expression : source.getScopeDependentAssessmentFact().getExpression()) {
+ ContextDependentAttributeScenario additionalScenario = new ContextDependentAttributeScenario(expression, additionalSource, List.of(source));
+ ContextAttributeState additionalState = new ContextAttributeState(Stream.concat(state.selectedScenarios().stream(), Stream.of(additionalScenario)).toList());
+ additionalStates.add(additionalState);
+ }
+ logger.warn("Explore with Uncertainty!");
+ return new ScenarioResult(Optional.empty(), additionalStates);
}
+ DFDGDPRVertex currentTargetVertex = currentTransposeFlowGraph.getVertices()
+ .stream()
+ .filter(DFDGDPRVertex.class::isInstance)
+ .map(DFDGDPRVertex.class::cast)
+ .filter(it -> it.getReferencedElement()
+ .getId()
+ .equals(targetVertex.getReferencedElement()
+ .getId()))
+ .filter(it -> it.getReferencedElement()
+ .getEntityName()
+ .equals(targetVertex.getReferencedElement()
+ .getEntityName()))
+ .findAny()
+ .orElseThrow();
+ DFDGDPRVertex impactedElement = currentTargetVertex.getPreviousElements()
+ .stream()
+ .filter(DFDGDPRVertex.class::isInstance)
+ .map(DFDGDPRVertex.class::cast)
+ .filter(it -> it.getOutgoingData()
+ .contains(data))
+ .findAny()
+ .orElse(currentTargetVertex);
+ Node replacingNode = DFDUtils.copyNode(impactedElement.getReferencedElement());
+ UncertaintyUtils.impactBehavior(replacingNode, impactedElement, dataDictionary, source, scenario, data);
+ DFDGDPRVertex replacingVertex = this.copyVertex(impactedElement, replacingNode);
+ List scenarios = new ArrayList<>(impactedElement.getContextDependentAttributes());
+ scenarios.add(scenario);
+ replacingVertex.setContextDependentAttributes(scenarios);
+ Map mapping = new HashMap<>();
+ mapping.put(impactedElement, replacingVertex);
+ currentTransposeFlowGraph = (DFDGDPRTransposeFlowGraph) currentTransposeFlowGraph.copy(mapping, state);
}
- result.add(currentTransposeFlowGraph);
+ return new ScenarioResult(Optional.of(currentTransposeFlowGraph), List.of());
+ } else {
+ // Insert Node Characteristics at all matching vertices
+ List matchingVertices = currentTransposeFlowGraph.getVertices()
+ .stream()
+ .filter(DFDGDPRVertex.class::isInstance)
+ .map(DFDGDPRVertex.class::cast)
+ .filter(source::applicable)
+ .filter(scenario::applicable)
+ .map(it -> it.getReferencedElement()
+ .getId())
+ .toList();
+ for (String targetVertexID : matchingVertices) {
+ DFDGDPRVertex targetVertex = currentTransposeFlowGraph.getVertices()
+ .stream()
+ .filter(DFDGDPRVertex.class::isInstance)
+ .map(DFDGDPRVertex.class::cast)
+ .filter(it -> it.getReferencedElement()
+ .getId()
+ .equals(targetVertexID))
+ .findFirst()
+ .orElseThrow();
+ Node replacingNode = EcoreUtil.copy(targetVertex.getReferencedElement());
+
+ List labels = UncertaintyUtils.getAppliedLabel(source, scenario, dataDictionary);
+ replacingNode.getProperties()
+ .addAll(labels);
+
+ DFDGDPRVertex replacingVertex = this.copyVertex(targetVertex, replacingNode);
+ List scenarios = new ArrayList<>(targetVertex.getContextDependentAttributes());
+ scenarios.add(scenario);
+ replacingVertex.setContextDependentAttributes(scenarios);
+ Map mapping = new HashMap<>();
+ mapping.put(targetVertex, replacingVertex);
+ currentTransposeFlowGraph = (DFDGDPRTransposeFlowGraph) currentTransposeFlowGraph.copy(mapping, state);
+ }
+ return new ScenarioResult(Optional.of(currentTransposeFlowGraph), List.of());
}
- return result;
+ }
+
+
+ private List determineTargetedVertices(DFDGDPRTransposeFlowGraph currentTransposeFlowGraph,
+ ContextDependentAttributeScenario scenario) {
+ List targetedVertices = currentTransposeFlowGraph.getVertices()
+ .stream()
+ .filter(DFDGDPRVertex.class::isInstance)
+ .map(DFDGDPRVertex.class::cast)
+ .filter(scenario::applicable)
+ .toList();
+ List finalTargetedVertices = targetedVertices;
+ targetedVertices = targetedVertices.stream()
+ .filter(it -> UncertaintyUtils.shouldReapply(finalTargetedVertices, it))
+ .toList();
+ return targetedVertices;
}
@Override
@@ -263,7 +298,7 @@ public AbstractTransposeFlowGraph evaluate() {
DFDGDPRVertex newSink = dfdSink.copy(new IdentityHashMap<>());
newSink.unify(new HashSet<>());
newSink.evaluateDataFlow();
- return new DFDGDPRTransposeFlowGraph(newSink, this.relevantContextDependentAttributes, this.contextAttributeState.get(), this.dd);
+ return new DFDGDPRTransposeFlowGraph(newSink, this.relevantContextDependentAttributes, this.contextAttributeState.get(), this.dataDictionary);
}
public List getContextDependentAttributeSources() {
@@ -290,29 +325,30 @@ private DFDGDPRVertex copyVertex(DFDGDPRVertex vertex, Node replacingElement) {
@Override
public AbstractTransposeFlowGraph copy(Map mapping) {
DFDGDPRVertex copiedSink;
- if (mapping.containsKey(this.sink)) {
+ if (mapping.containsKey((DFDVertex) this.sink)) {
copiedSink = (DFDGDPRVertex) mapping.get(this.sink);
} else {
copiedSink = ((DFDGDPRVertex) sink).copy(mapping);
}
copiedSink.unify(new HashSet<>());
return this.contextAttributeState
- .map(attributeState -> new DFDGDPRTransposeFlowGraph(copiedSink, this.relevantContextDependentAttributes, attributeState, this.dd))
- .orElseGet(() -> new DFDGDPRTransposeFlowGraph(copiedSink, this.relevantContextDependentAttributes, this.dd));
+ .map(attributeState -> new DFDGDPRTransposeFlowGraph(copiedSink, this.relevantContextDependentAttributes, attributeState,
+ this.dataDictionary))
+ .orElseGet(() -> new DFDGDPRTransposeFlowGraph(copiedSink, this.relevantContextDependentAttributes, this.dataDictionary));
}
public AbstractTransposeFlowGraph copy(Map mapping, ContextAttributeState contextAttributeState) {
DFDGDPRVertex copiedSink;
- if (mapping.containsKey(this.sink)) {
+ if (mapping.containsKey((DFDVertex) this.sink)) {
copiedSink = (DFDGDPRVertex) mapping.get(this.sink);
} else {
copiedSink = ((DFDGDPRVertex) sink).copy(mapping);
}
copiedSink.unify(new HashSet<>());
- return new DFDGDPRTransposeFlowGraph(copiedSink, this.relevantContextDependentAttributes, contextAttributeState, this.dd);
+ return new DFDGDPRTransposeFlowGraph(copiedSink, this.relevantContextDependentAttributes, contextAttributeState, this.dataDictionary);
}
public ContextAttributeState getContextAttributeState() {
- return contextAttributeState.get();
+ return contextAttributeState.orElseThrow();
}
}
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRVertex.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRVertex.java
index c2d7f71..5eed14f 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRVertex.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRVertex.java
@@ -15,6 +15,7 @@
import org.dataflowanalysis.dfd.datadictionary.Pin;
import org.dataflowanalysis.dfd.dataflowdiagram.Flow;
import org.dataflowanalysis.dfd.dataflowdiagram.Node;
+import org.eclipse.emf.ecore.util.EcoreUtil;
public class DFDGDPRVertex extends DFDVertex {
private final List relatedElements;
@@ -37,11 +38,9 @@ public DFDGDPRVertex(Node node, Map pinDFDVertexMap, Map mapping) {
- Map copiedPinDFDVertexMap = new HashMap<>();
- this.pinDFDVertexMap.keySet()
- .forEach(key -> copiedPinDFDVertexMap.put(key, mapping.getOrDefault(this.pinDFDVertexMap.get(key), this.pinDFDVertexMap.get(key)
- .copy(mapping))));
- DFDGDPRVertex copy = new DFDGDPRVertex(this.referencedElement, copiedPinDFDVertexMap, new HashMap<>(this.pinFlowMap),
+ Map copiedPinDFDVertexMap = this.copyPinDFDVertexMap(mapping);
+ Map copiedPinFlowMap = this.copyPinFlowMap(copiedPinDFDVertexMap);
+ DFDGDPRVertex copy = new DFDGDPRVertex(this.referencedElement, copiedPinDFDVertexMap, copiedPinFlowMap,
new ArrayList<>(this.relatedElements));
if (!this.contextDependentAttributes.isEmpty()) {
copy.setContextDependentAttributes(this.contextDependentAttributes);
@@ -49,6 +48,40 @@ public DFDGDPRVertex copy(Map mapping) {
return copy;
}
+ /**
+ * Returns the Map from a pin on the vertex to the given predecessor {@link DFDVertex}, while adhering to the given mapping
+ * @param mapping Mapping that should be applied to the mapping process
+ * @return Returns a new copied pin to vertex map that adheres to the given mapping
+ */
+ private Map copyPinDFDVertexMap(Map mapping) {
+ Map copiedPinDFDVertexMap = new HashMap<>();
+ this.pinDFDVertexMap.keySet()
+ .forEach(key -> copiedPinDFDVertexMap.put(key, mapping.getOrDefault(this.pinDFDVertexMap.get(key), this.pinDFDVertexMap.get(key)
+ .copy(mapping))));
+ return copiedPinDFDVertexMap;
+ }
+
+ /**
+ * Returns the Map from pin to outgoing flow that references the new correct DFD Vertex that is given by the mapping
+ * @param pinDFDVertexMap Given mapping from pin to dfd vertex that each entry should respect
+ * @return Returns a new map from pin to flow that is correct in the context of the given pin to vertex map
+ */
+ private Map copyPinFlowMap(Map pinDFDVertexMap) {
+ Map copiedPinFlowMap = new HashMap<>();
+ this.pinFlowMap.keySet()
+ .forEach(key -> {
+ Pin correspondingPin = pinDFDVertexMap.get(key).getReferencedElement().getBehavior().getOutPin().stream()
+ .filter(it -> it.getEntityName().equals(key.getEntityName()))
+ .findAny()
+ .orElseThrow();
+ Flow flow = EcoreUtil.copy(this.pinFlowMap.get(key));
+ flow.setSourcePin(correspondingPin);
+ flow.setSourceNode(pinDFDVertexMap.get(key).getReferencedElement());
+ copiedPinFlowMap.put(key, flow);
+ });
+ return copiedPinFlowMap;
+ }
+
public void setContextDependentAttributes(List contextDependentAttributes) {
this.contextDependentAttributes = contextDependentAttributes;
}
@@ -93,7 +126,7 @@ public Role getResponsibilityRole() {
return this.relatedElements.stream()
.filter(Processing.class::isInstance)
.map(Processing.class::cast)
- .map(it -> it.getResponsible())
+ .map(Processing::getResponsible)
.findAny()
.orElseThrow();
}
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DataFlowDiagramAndDataDictionary.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DataFlowDiagramAndDataDictionary.java
index 8f037e5..d71b062 100644
--- a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DataFlowDiagramAndDataDictionary.java
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DataFlowDiagramAndDataDictionary.java
@@ -1,28 +1,64 @@
package mdpa.gdpr.analysis.dfd;
-import java.io.IOException;
-import java.util.Map;
import org.dataflowanalysis.dfd.datadictionary.DataDictionary;
import org.dataflowanalysis.dfd.dataflowdiagram.DataFlowDiagram;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.resource.Resource;
-import org.eclipse.emf.ecore.xmi.impl.XMIResourceImpl;
+import org.eclipse.emf.ecore.resource.ResourceSet;
+import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
+import org.eclipse.emf.ecore.xmi.XMLResource;
+import org.eclipse.emf.ecore.xmi.impl.XMLResourceFactoryImpl;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.util.Map;
+/**
+ * Contains a full metamodel required to run a DFD-based
+ * {@link org.dataflowanalysis.analysis.DataFlowConfidentialityAnalysis}
+ * @param dataFlowDiagram Data Flow Diagram of the model
+ * @param dataDictionary Data Dictionary of the model
+ */
public record DataFlowDiagramAndDataDictionary(DataFlowDiagram dataFlowDiagram, DataDictionary dataDictionary) {
- public void save(String path) {
- URI uriDFD = URI.createURI(path + "test.dataflowdiagram");
- URI uriDD = URI.createURI(path + "test.datadictionary");
- Resource resourceDFD = new XMIResourceImpl(uriDFD);
- resourceDFD.getContents()
- .add(dataFlowDiagram);
- Resource resourceDD = new XMIResourceImpl(uriDD);
- resourceDD.getContents()
- .add(dataDictionary);
+ private Resource createResource(String outputFile, String[] fileExtensions, ResourceSet resourceSet) {
+ for (String fileExtension : fileExtensions) {
+ resourceSet.getResourceFactoryRegistry()
+ .getExtensionToFactoryMap()
+ .put(fileExtension, new XMLResourceFactoryImpl());
+ }
+ URI uri = URI.createFileURI(outputFile);
+ return resourceSet.createResource(uri);
+ }
+
+ private void saveResource(Resource resource) {
+ Map saveOptions = ((XMLResource) resource).getDefaultSaveOptions();
try {
- resourceDFD.save(Map.of());
- resourceDD.save(Map.of());
+ resource.save(saveOptions);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
+
+ /**
+ * Saves the given {@link DataFlowDiagramAndDataDictionary} at the given file path with the given name
+ * @param filePath File path the {@link DataFlowDiagramAndDataDictionary} should be saved at
+ * @param fileName File name (without extension) that the files should have
+ */
+ public void save(String filePath, String fileName) {
+ ResourceSet resourceSet = new ResourceSetImpl();
+ Path basePath = Path.of(filePath, fileName)
+ .toAbsolutePath()
+ .normalize();
+
+ Resource dfdResource = createResource(basePath + ".dataflowdiagram", new String[] {"dataflowdiagram"}, resourceSet);
+ Resource ddResource = createResource(basePath + ".datadictionary", new String[] {"datadictionary"}, resourceSet);
+
+ dfdResource.getContents()
+ .add(dataFlowDiagram);
+ ddResource.getContents()
+ .add(dataDictionary);
+
+ saveResource(dfdResource);
+ saveResource(ddResource);
+ }
}
diff --git a/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/ScenarioResult.java b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/ScenarioResult.java
new file mode 100644
index 0000000..66a79dc
--- /dev/null
+++ b/bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/ScenarioResult.java
@@ -0,0 +1,17 @@
+package mdpa.gdpr.analysis.dfd;
+
+import mdpa.gdpr.analysis.core.ContextAttributeState;
+
+import java.util.List;
+import java.util.Optional;
+
+/**
+ * Represents the result from evaluating a {@link mdpa.gdpr.analysis.core.ContextDependentAttributeScenario} for a TFG.
+ * It can either return a transpose flow graph that is the result of applying the scenario to the TFG,
+ * or return a list of states that need to be explored due to failing to find a fitting {@link mdpa.gdpr.analysis.core.ContextDependentAttributeScenario} for a {@link mdpa.gdpr.analysis.core.ContextDependentAttributeSource}
+ * @param transposeFlowGraph Optional transpose flow graph that is produced from the evaluation
+ * @param states List of states that need to be explored in order to evaluate the scenario correctly
+ */
+public record ScenarioResult(Optional transposeFlowGraph, List states) {
+
+}
diff --git a/bundles/target/spotless-index b/bundles/target/spotless-index
new file mode 100644
index 0000000..e56c6ee
--- /dev/null
+++ b/bundles/target/spotless-index
@@ -0,0 +1 @@
+5bP+q5RGTlzoUUa67A5j/Ag4/hqzK0RyFIsPNaRQnG8=
diff --git a/features/mdpa.gdpr.analysis.feature/target/spotless-index b/features/mdpa.gdpr.analysis.feature/target/spotless-index
new file mode 100644
index 0000000..198a53a
--- /dev/null
+++ b/features/mdpa.gdpr.analysis.feature/target/spotless-index
@@ -0,0 +1 @@
+afRmtmoGwfBNyk/Lc779JO5/EZ25j0fbw0kEg0IzLEY=
diff --git a/features/target/spotless-index b/features/target/spotless-index
new file mode 100644
index 0000000..9d59cb3
--- /dev/null
+++ b/features/target/spotless-index
@@ -0,0 +1 @@
+vnDLeIEa2gCzehs9JALFzLEZEb+kx63EccEbhrtcXI0=
diff --git a/releng/mdpa.gdpr.analysis.targetplatform/target/spotless-index b/releng/mdpa.gdpr.analysis.targetplatform/target/spotless-index
new file mode 100644
index 0000000..3f9a7cf
--- /dev/null
+++ b/releng/mdpa.gdpr.analysis.targetplatform/target/spotless-index
@@ -0,0 +1 @@
+qzSdVduwP77OFE+D2Yfp1tHUgzpTiF+1gRuSB9FqPco=
diff --git a/releng/mdpa.gdpr.analysis.updatesite/target/spotless-index b/releng/mdpa.gdpr.analysis.updatesite/target/spotless-index
new file mode 100644
index 0000000..0995855
--- /dev/null
+++ b/releng/mdpa.gdpr.analysis.updatesite/target/spotless-index
@@ -0,0 +1 @@
+K7/n32yKkJujP4NSzDlJZya3B5bDQMgIq61C3j5/0eU=
diff --git a/releng/target/spotless-index b/releng/target/spotless-index
new file mode 100644
index 0000000..978e9c5
--- /dev/null
+++ b/releng/target/spotless-index
@@ -0,0 +1 @@
+KfNhrVrRYQYu3k6BU+DhU49gfT2zLweYfb16AqOHMWc=
diff --git a/target/spotless-index b/target/spotless-index
new file mode 100644
index 0000000..8d9dcc6
--- /dev/null
+++ b/target/spotless-index
@@ -0,0 +1,41 @@
+NxZRFe0x/Rq9CxfV/GQbazgEKqeB1VxYzSYj615IogQ=
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/AnalysisExecutor.java 2025-09-21T18:23:36.546364240Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/GDPRModelBuilder.java 2025-09-21T18:23:36.287355973Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/MemoryGDPRResourceProvider.java 2025-09-22T09:52:29.082011826Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/ScalibilityCaseRunner.java 2025-09-21T18:23:36.644367368Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/ScalibilityEvent.java 2025-09-21T18:23:36.533363825Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/ScalibilityParameter.java 2025-09-21T18:23:36.522363474Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/cases/AbstractScalibilityCase.java 2025-09-21T18:23:36.300356388Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/cases/BaseScalibilityCase.java 2025-09-21T18:23:36.317356931Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/cases/ContextDefinitionAmountScalibilityCase.java 2025-09-21T18:23:36.488362389Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/cases/ContextDefinitionSizeScalibilityCase.java 2025-09-21T18:23:36.470361814Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/cases/DataDefinedContextDependentAttributeScalibilityCase.java 2025-09-21T18:23:36.432360601Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/cases/DataScalibilityCase.java 2025-09-21T18:23:36.375358782Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/cases/DataUndefinedContextDependentAttributeScalibilityCase.java 2025-09-21T18:23:36.446361048Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/cases/NodeDefinedContextDependentAttributeScalibilityCase.java 2025-09-21T18:23:36.400359580Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/cases/NodeUndefinedContextDependentAttribute.java 2025-09-21T18:23:36.416360090Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/cases/PropagationScalibilityCase.java 2025-09-21T18:23:36.336357537Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/cases/PurposeScalibilityCase.java 2025-09-21T18:23:36.506362963Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/cases/RoleScalibilityCase.java 2025-09-21T18:23:36.357358207Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/exporter/GraphExporter.java 2025-09-21T18:23:36.580365325Z
+bundles/mdpa.gdpr.analysis.validation/src/mdpa/gdpr/analysis/validation/exporter/ResultExporter.java 2025-09-21T18:23:36.605366123Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/GDPRLegalAssessmentAnalysis.java 2025-09-23T11:55:29.322761164Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/GDPRLegalAssessmentAnalysisBuilder.java 2025-09-23T11:55:29.392761100Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/UncertaintyUtils.java 2025-09-23T11:55:29.519760983Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextAttributeState.java 2025-09-23T11:55:28.980761479Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextDependentAttributeScenario.java 2025-09-23T11:55:29.072761394Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/ContextDependentAttributeSource.java 2025-09-23T11:55:29.149761323Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/TransformationManager.java 2025-09-23T11:55:29.243761237Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/resource/GDPRResourceProvider.java 2025-09-23T11:55:28.812761634Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/core/resource/GDPRURIResourceProvider.java 2025-09-23T11:55:28.881761570Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRFlowGraphCollection.java 2025-09-23T11:55:28.433761984Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRTransposeFlowGraph.java 2025-09-23T11:55:28.743761698Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DFDGDPRVertex.java 2025-09-22T07:29:08.355066418Z
+bundles/mdpa.gdpr.analysis/src/mdpa/gdpr/analysis/dfd/DataFlowDiagramAndDataDictionary.java 2025-09-23T11:55:28.475761946Z
+tests/mdpa.gdpr.analysis.testmodels/src/mdpa/gdpr/analysis/testmodels/Activator.java 2024-07-07T12:32:19.169183894Z
+tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/BankingTest.java 2025-09-21T18:23:36.661367910Z
+tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/BaseTest.java 2025-09-21T18:23:36.674368325Z
+tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TrainModelEvaluation.java 2025-09-22T09:52:29.312019400Z
+tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TravelPlannerEvaluation.java 2025-09-22T09:52:29.312019400Z
+tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/ValidationBase.java 2025-09-21T18:23:36.724369921Z
+tests/mdpa.gdpr.analysis.validation.testmodels/src/mdpa/gdpr/analysis/validation/testmodels/Activator.java 2024-07-24T06:32:57.505570189Z
diff --git a/tests/mdpa.gdpr.analysis.testmodels/models/Banking/default.contextproperties b/tests/mdpa.gdpr.analysis.testmodels/models/Banking/default.contextproperties
index e1b18f7..825a8bb 100644
--- a/tests/mdpa.gdpr.analysis.testmodels/models/Banking/default.contextproperties
+++ b/tests/mdpa.gdpr.analysis.testmodels/models/Banking/default.contextproperties
@@ -1,22 +1,22 @@
-
-
-
-
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/mdpa.gdpr.analysis.testmodels/models/TrainModel/default.contextproperties b/tests/mdpa.gdpr.analysis.testmodels/models/TrainModel/default.contextproperties
index 92e1d47..d9063c7 100644
--- a/tests/mdpa.gdpr.analysis.testmodels/models/TrainModel/default.contextproperties
+++ b/tests/mdpa.gdpr.analysis.testmodels/models/TrainModel/default.contextproperties
@@ -1,29 +1,29 @@
-
-
-
-
+
+
+
+
-
-
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/mdpa.gdpr.analysis.testmodels/models/TravelPlanner/default.contextproperties b/tests/mdpa.gdpr.analysis.testmodels/models/TravelPlanner/default.contextproperties
index c2a11da..40ffd94 100644
--- a/tests/mdpa.gdpr.analysis.testmodels/models/TravelPlanner/default.contextproperties
+++ b/tests/mdpa.gdpr.analysis.testmodels/models/TravelPlanner/default.contextproperties
@@ -1,16 +1,16 @@
-
-
-
+
+
+
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/mdpa.gdpr.analysis.tests/converted.datadictionary b/tests/mdpa.gdpr.analysis.tests/converted.datadictionary
new file mode 100644
index 0000000..260037c
--- /dev/null
+++ b/tests/mdpa.gdpr.analysis.tests/converted.datadictionary
@@ -0,0 +1,111 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/mdpa.gdpr.analysis.tests/converted.dataflowdiagram b/tests/mdpa.gdpr.analysis.tests/converted.dataflowdiagram
new file mode 100644
index 0000000..1d14b01
--- /dev/null
+++ b/tests/mdpa.gdpr.analysis.tests/converted.dataflowdiagram
@@ -0,0 +1,139 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/mdpa.gdpr.analysis.tests/mdpa.gdpr.analysis.tests.iml b/tests/mdpa.gdpr.analysis.tests/mdpa.gdpr.analysis.tests.iml
index 80c152c..6a37853 100644
--- a/tests/mdpa.gdpr.analysis.tests/mdpa.gdpr.analysis.tests.iml
+++ b/tests/mdpa.gdpr.analysis.tests/mdpa.gdpr.analysis.tests.iml
@@ -4,7 +4,7 @@
-
+
diff --git a/tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TrainModelEvaluation.java b/tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TrainModelEvaluationTest.java
similarity index 87%
rename from tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TrainModelEvaluation.java
rename to tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TrainModelEvaluationTest.java
index 9fcc6cf..3083d0a 100644
--- a/tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TrainModelEvaluation.java
+++ b/tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TrainModelEvaluationTest.java
@@ -1,29 +1,35 @@
package mdpa.gdpr.analysis.tests.validation;
-import java.util.List;
-import java.util.stream.Collectors;
import mdpa.gdpr.analysis.core.ContextDependentAttributeScenario;
import mdpa.gdpr.analysis.dfd.DFDGDPRFlowGraphCollection;
import mdpa.gdpr.analysis.dfd.DFDGDPRTransposeFlowGraph;
import mdpa.gdpr.analysis.dfd.DFDGDPRVertex;
import org.apache.log4j.Logger;
+import org.dataflowanalysis.analysis.utils.LoggerManager;
import org.dataflowanalysis.dfd.dataflowdiagram.Node;
import org.junit.jupiter.api.Test;
-public class TrainModelEvaluation extends ValidationBase {
- private Logger logger = Logger.getLogger(TrainModelEvaluation.class);
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+public class TrainModelEvaluationTest extends ValidationBase {
+ private Logger logger = LoggerManager.getLogger(TrainModelEvaluationTest.class);
- public TrainModelEvaluation() {
+ public TrainModelEvaluationTest() {
super("default", "models/TrainModel");
}
@Test
public void testFlowGraphAmount() {
DFDGDPRFlowGraphCollection flowGraphs = (DFDGDPRFlowGraphCollection) this.analysis.findFlowGraphs();
+ assertEquals(3, flowGraphs.getTransposeFlowGraphs().size());
var alternateFlowGraphs = flowGraphs.resolveContextDependentAttributes();
logger.info("Number of TFGs: " + alternateFlowGraphs.getTransposeFlowGraphs()
.size());
+ assertEquals(6, alternateFlowGraphs.getTransposeFlowGraphs().size());
for (var tfg : alternateFlowGraphs.getTransposeFlowGraphs()) {
var gdprTFG = (DFDGDPRTransposeFlowGraph) tfg;
System.out.println("---- State: " + gdprTFG.getContextAttributeState() + " -----------");
@@ -40,32 +46,38 @@ public void testImpactAmount() {
DFDGDPRFlowGraphCollection flowGraphs = (DFDGDPRFlowGraphCollection) this.analysis.findFlowGraphs();
var alternateFlowGraphs = flowGraphs.resolveContextDependentAttributes();
+ int affectedFlowGraphCount = 0;
for (DFDGDPRTransposeFlowGraph transposeFlowGraph : alternateFlowGraphs.getTransposeFlowGraphs()
.stream()
.filter(DFDGDPRTransposeFlowGraph.class::isInstance)
.map(DFDGDPRTransposeFlowGraph.class::cast)
.toList()) {
List impactScenarios = transposeFlowGraph.getContextAttributeState()
- .getSelectedScenarios()
+ .selectedScenarios()
.stream()
.toList();
var impactedElements = this.getImpactedElements(transposeFlowGraph, impactScenarios);
System.out.println("---- State: " + transposeFlowGraph.getContextAttributeState() + " -----------");
System.out.println("---- Impacted Elements: -----");
+ if (!impactedElements.isEmpty()) {
+ affectedFlowGraphCount++;
+ }
for (var vertex : impactedElements) {
var referencedElement = (Node) vertex.getReferencedElement();
System.out.println(referencedElement.getEntityName() + ", " + referencedElement.getId());
}
System.out.println();
}
+ assertEquals(6, affectedFlowGraphCount);
}
@Test
public void testViolations() {
- // logger.setLevel(Level.INFO);
DFDGDPRFlowGraphCollection flowGraphs = (DFDGDPRFlowGraphCollection) this.analysis.findFlowGraphs();
var alternateFlowGraphs = flowGraphs.resolveContextDependentAttributes();
alternateFlowGraphs.evaluate();
+
+ int violatingFlowGraphCount = 0;
for (DFDGDPRTransposeFlowGraph flowGraph : alternateFlowGraphs.getTransposeFlowGraphs()
.stream()
.filter(DFDGDPRTransposeFlowGraph.class::isInstance)
@@ -107,6 +119,7 @@ public void testViolations() {
logger.debug("------------------------");
continue;
}
+ violatingFlowGraphCount++;
System.out.println("---- State: " + flowGraph.getContextAttributeState() + " -----------");
System.out.println("---- Impacted Elements: -----");
for (var vertex : flowGraph.getVertices()) {
@@ -116,5 +129,6 @@ public void testViolations() {
System.out.println("---- Violations: " + violations);
System.out.println();
}
+ assertEquals(3, violatingFlowGraphCount);
}
}
diff --git a/tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TravelPlannerEvaluation.java b/tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TravelPlannerEvaluationTest.java
similarity index 85%
rename from tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TravelPlannerEvaluation.java
rename to tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TravelPlannerEvaluationTest.java
index 75f76ca..35a4da4 100644
--- a/tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TravelPlannerEvaluation.java
+++ b/tests/mdpa.gdpr.analysis.tests/src/mdpa/gdpr/analysis/tests/validation/TravelPlannerEvaluationTest.java
@@ -7,23 +7,29 @@
import mdpa.gdpr.analysis.dfd.DFDGDPRTransposeFlowGraph;
import mdpa.gdpr.analysis.dfd.DFDGDPRVertex;
import org.apache.log4j.Logger;
+import org.dataflowanalysis.analysis.utils.LoggerManager;
import org.dataflowanalysis.dfd.dataflowdiagram.Node;
import org.junit.jupiter.api.Test;
-public class TravelPlannerEvaluation extends ValidationBase {
- private Logger logger = Logger.getLogger(TravelPlannerEvaluation.class);
+import static org.junit.jupiter.api.Assertions.assertEquals;
- public TravelPlannerEvaluation() {
+public class TravelPlannerEvaluationTest extends ValidationBase {
+ private final Logger logger = LoggerManager.getLogger(TrainModelEvaluationTest.class);
+
+ public TravelPlannerEvaluationTest() {
super("default", "models/TravelPlanner");
}
@Test
public void testFlowGraphAmount() {
DFDGDPRFlowGraphCollection flowGraphs = (DFDGDPRFlowGraphCollection) this.analysis.findFlowGraphs();
- var alternateFlowGraphs = flowGraphs.resolveContextDependentAttributes();
+ assertEquals(1, flowGraphs.getTransposeFlowGraphs().size());
+ var alternateFlowGraphs = flowGraphs.resolveContextDependentAttributes();
logger.info("Number of TFGs: " + alternateFlowGraphs.getTransposeFlowGraphs()
.size());
+ assertEquals(2, alternateFlowGraphs.getTransposeFlowGraphs().size());
+
for (var tfg : alternateFlowGraphs.getTransposeFlowGraphs()) {
var gdprTFG = (DFDGDPRTransposeFlowGraph) tfg;
System.out.println("---- State: " + gdprTFG.getContextAttributeState() + " -----------");
@@ -40,13 +46,14 @@ public void testImpactAmount() {
DFDGDPRFlowGraphCollection flowGraphs = (DFDGDPRFlowGraphCollection) this.analysis.findFlowGraphs();
var alternateFlowGraphs = flowGraphs.resolveContextDependentAttributes();
+ int affectedFlowGraphCount = 0;
for (DFDGDPRTransposeFlowGraph transposeFlowGraph : alternateFlowGraphs.getTransposeFlowGraphs()
.stream()
.filter(DFDGDPRTransposeFlowGraph.class::isInstance)
.map(DFDGDPRTransposeFlowGraph.class::cast)
.toList()) {
List impactScenarios = transposeFlowGraph.getContextAttributeState()
- .getSelectedScenarios()
+ .selectedScenarios()
.stream()
.filter(it -> !it.getName()
.equals("UserNecessity"))
@@ -54,20 +61,25 @@ public void testImpactAmount() {
var impactedElements = this.getImpactedElements(transposeFlowGraph, impactScenarios);
System.out.println("---- State: " + transposeFlowGraph.getContextAttributeState() + " -----------");
System.out.println("---- Impacted Elements: -----");
+ if (!impactedElements.isEmpty()) {
+ affectedFlowGraphCount++;
+ }
for (var vertex : impactedElements) {
var referencedElement = (Node) vertex.getReferencedElement();
System.out.println(referencedElement.getEntityName() + ", " + referencedElement.getId());
}
System.out.println();
}
+ assertEquals(2, affectedFlowGraphCount);
}
@Test
public void testViolations() {
- // logger.setLevel(Level.INFO);
DFDGDPRFlowGraphCollection flowGraphs = (DFDGDPRFlowGraphCollection) this.analysis.findFlowGraphs();
var alternateFlowGraphs = flowGraphs.resolveContextDependentAttributes();
alternateFlowGraphs.evaluate();
+
+ int violatingFlowGraphCount = 0;
for (DFDGDPRTransposeFlowGraph flowGraph : alternateFlowGraphs.getTransposeFlowGraphs()
.stream()
.filter(DFDGDPRTransposeFlowGraph.class::isInstance)
@@ -103,8 +115,9 @@ public void testViolations() {
logger.debug("------------------------");
continue;
}
+ violatingFlowGraphCount++;
var sourcesString = flowGraph.getContextAttributeState()
- .getSelectedScenarios()
+ .selectedScenarios()
.stream()
.map(it -> it.getName())
.collect(Collectors.joining(","));
@@ -112,5 +125,6 @@ public void testViolations() {
logger.info("Violating vertices:" + violations);
logger.info("------------------------");
}
+ assertEquals(1, violatingFlowGraphCount);
}
}
diff --git a/tests/target/spotless-index b/tests/target/spotless-index
new file mode 100644
index 0000000..10f36f2
--- /dev/null
+++ b/tests/target/spotless-index
@@ -0,0 +1 @@
+DFm3Q+MAka/mhcx+cq8iS3YzvVInDRbsbjw314494s8=