I wanted to share an example of how to store and retrieve data in attribute in ACT. When time_history=True in an ACT extension the Evaluate function is called for every timepoint. This can significantly slow things down, especially when using DPF. DPF is great at retrieving the data for all timepoints at once using a field container. So it does not make sense to call the result operator multiple times.
This example shows how to use attributes to store and retrieve the DPF field container data.
Key points to note
1. Attributes cannot store DPF data structures (fields, field_containers). We need to store the data as a list() or dict() (or scalar).
2. The DPF results are NOT extracted in the geometry scoping order, they are extracted in an efficient way and thus the order may not be the same. The collector from ACT wants the data in the order it provided. We could loop through and use fc.GetDataById() but that is unnecessarily slow.
The rescope operator is used to rescope the data to the same nodal scoping, but the rescope operator keeps the order. Now we can use collector.SetAllValues() which is much faster.
import mech_dpf
import Ans.DataProcessing as dpf
mech_dpf.setExtAPI(ExtAPI)
#
def CreateRes(analysis):
global myres
myres = analysis.CreateResultObject("Gen_Nodal_Result",ExtAPI.ExtensionManager.CurrentExtension)
def ResEval(result, stepInfo, collector):
analysis = result.Analysis
step = stepInfo.Set
if step == 1:
rstFile=analysis.ResultFileName
dataSource = dpf.DataSources(rstFile)
model=dpf.Model(dataSource)
mesh = model.MeshProvider
# Set up Time Scoping
time_op = dpf.operators.metadata.time_freq_provider()
time_op.inputs.data_sources.Connect(dataSource)
time_freq_support = time_op.outputs.time_freq_support.GetData()
timeScop = dpf.Scoping()
timeScop.Ids = range(1,time_freq_support.NumberSets+1)
scopNodes = dpf.Scoping()
scopNodes.Location = 'Nodal'
scopNodes.Ids = collector.Ids
#ExtAPI.Log.WriteMessage(str(collector.Ids))
s_op=dpf.operators.result.stress()
s_op.inputs.data_sources.Connect(dataSource)
s_op.inputs.time_scoping.Connect(timeScop)
s_op.inputs.requested_location.Connect('Nodal')
s_op.inputs.mesh_scoping.Connect(scopNodes)
s_comp=s_op.outputs.fields_container.GetData()
# Obtain input from details
Stress_Com = result.Properties["Stress_Comp"].Value
# Define macro argument input from input
if Stress_Com == "SX":
comp = 0
elif Stress_Com == "SY":
comp = 1
elif Stress_Com == "SZ":
comp = 2
elif Stress_Com == "SXY":
comp = 3
elif Stress_Com == "SYZ":
comp = 4
elif Stress_Com == "SXZ":
comp = 5
component_selector_op = dpf.operators.logic.component_selector_fc()
component_selector_op.inputs.fields_container.Connect(s_comp)
component_selector_op.inputs.component_number.Connect(comp)
comp_s = component_selector_op.outputs.fields_container.GetData()
# We use rescope here to get the data back to the original mesh scoping order
# Rescope does not change the data, it just reorders it according to the new scoping
resc_op = dpf.operators.scoping.rescope_fc()
resc_op.inputs.fields_container.Connect(comp_s)
resc_op.inputs.mesh_scoping.Connect(scopNodes)
resc_op.inputs.default_value.Connect(0.0)
newcomp_s=resc_op.outputs.fields_container.GetData()
collector.SetAllValues(newcomp_s[step-1].Data)
# Cache the data into an attribute for subsequent steps
# Attributes cannot store DPF fields directly, so we store the data arrays instead
result.Attributes.SetValue("DPFData", [newcomp_s[i].Data for i in range(newcomp_s.FieldCount)])
else:
# For subsequent steps, just return the cached values.
comp_s = result.Attributes.GetValue("DPFData")
collector.SetAllValues(comp_s[step-1])
def resOnClearData(result):
"""
Clear the data for the result.
"""
result.Attributes.SetValue("DPFData", None)