pyDPF - element nodal forces - Element with id x has higher number of elementary data...

total_obliteration
total_obliteration Member Posts: 4
First Comment
**
edited May 13 in Structures

Hello,
Trying to extract contact reaction force in pyDPF. The code below returns:

ENF:495<-mapdl::rst::ENF:500<-Element with id 192983 has higher number of elementary data (32) than number of nodes (8). Nodal rotation to global coordinate system using Euler nodes angles can't be achieved, failed to read element nodal forces from rst file.

rst_file = glob.glob('*.rst')[1]
model = dpf.Model(rst_file)
global_mesh = model.metadata.meshed_region

#results - element nodal force
results = model.results
element_nodal_forces = results.element_nodal_forces().outputs.fields_container()

My approach is as follows, could not have tested due to error above. Any advise appreciated.

#get NSs
get_NSs = model.metadata.available_named_selections
ns = 'CONTACT'
find_ns = [s for s in get_NSs if s.startswith(ns)]
ns = find_ns[2]
ns

#select NS nodes
scoping_NS = model.metadata.named_selection(ns)
print(scoping_NS)

#filter mesh by NS scoping - result is only CONTA+TARGE elems
mesh174_op = dpf.operators.mesh.from_scoping(
    scoping=scoping_NS,
    inclusive=0,
    nodes_only=False,
    mesh=global_mesh)

mesh174 = mesh174_op.outputs.mesh()
print(mesh174)

#only CONTA+TARGE elems
etypes = mesh174.property_field("apdl_element_type")
etypes = pd.Series(etypes.data_as_list)
print(etypes.unique())

#extract and sum nodal forces - from 
element_nodal_forces = model.results.element_nodal_forces.on_mesh_scoping(scoping_NS).eval()

element_nodal_forces_df = pd.DataFrame(element_nodal_forces[0].data, columns = ['fx', 'fy', 'fz'])
element_nodal_forces_sum = element_nodal_forces_df.sum()
print(element_nodal_forces_sum)

I achieved the desired effect using pyMAPDL like below (here, printed for etypes for comparison, trying to figure out how Mechanical calculates the contact reaction force):

#select NS and print fsum by type - CONT ONLY
etypes = ['154', '170', '174', '186', '187']
fsum_dict = {}

print('fsum - CONT ONLY')
for etype in etypes:
    mapdl.esel('all')
    mapdl.cmsel(type_='S', name=named_selection)
    mapdl.esel('r', 'ename', '', int(etype))
    ecount = mapdl.get(entity='elem', item1='count')
    
    print('etype: ' + str(int(etype)) + '; elem count: ' + str(int(ecount)))

    mapdl.fsum(item='CONT')
    f_x = round(mapdl.get(entity='FSUM', item1='ITEM', it1num='FX'), 2)
    f_y = round(mapdl.get(entity='FSUM', item1='ITEM', it1num='FY'), 2)
    f_z = round(mapdl.get(entity='FSUM', item1='ITEM', it1num='FZ'), 2)
    fsum_dict[etype] = [f_x, f_y, f_z]
fsum_dict
Tagged:

Answers

  • Chris Harrold
    Chris Harrold Member, Administrator, Employee Posts: 183
    100 Comments 5 Answers First Anniversary Ansys Employee
    admin

    @Ramdane - can you assist? Thanks!

  • total_obliteration
    total_obliteration Member Posts: 4
    First Comment
    **

    Thank you. In the meantime, I have been successfully playing with nmisc.
    Now I wonder what is the best approach calculation-time-wise? In the example below, I read model>streams>time scoping and then, in a nested loop:

    for every name selections:
         read subsequent time step
         extract nmisc data
         sum nodal forces to obtain contact reaction force
    

    Perhaps the better approach is to ready model>streams>time scoping like before but reorder the nested loop like this:

    for every time step:
         read subsequent name selections
         extract nmisc data
         sum nodal forces to obtain contact reaction force
    

    Or is it perhaps possible to FIRST extract all nodes from all name selections that I am interested in, iterate thru all time steps and then "segregate" nodes by name selection and sum them all in "post-proc"?

    Any advise appreciated, DPF seems to be a powerful tool but almost 2 hours to get contact reaction forces for 3 named selections is not acceptable (it's a massive 130GB .rst file, tho).

    def extract_nmisc(scoping_time, scoping_NS, sc):
        print('... reading CNFX ...')
        cont_r_f_x_op = dpf.operators.result.nmisc(
            time_scoping = scoping_time,
            mesh_scoping = scoping_NS,
            streams_container = sc,
            #data_sources = my_data_sources,
            item_index = 43) #NMISC=43 for CONTA174: CNFX
        nodal_f_x = cont_r_f_x_op.outputs.fields_container()
        
        print('... reading CNFY ...')
        nodal_f_y_op = dpf.operators.result.nmisc(
            time_scoping = scoping_time,
            mesh_scoping = scoping_NS,
            streams_container = sc,
            #data_sources = my_data_sources,
            item_index = 44) #NMISC=44 for CONTA174: CNFY
        nodal_f_y = nodal_f_y_op.outputs.fields_container()
        
        print('... reading CNFZ ...')
        cont_r_f_z_op = dpf.operators.result.nmisc(
            time_scoping = scoping_time,
            mesh_scoping = scoping_NS,
            streams_container = sc,
            #data_sources = my_data_sources,
            item_index = 45) #NMISC=45 for CONTA174: CNFZ
        nodal_f_z = cont_r_f_z_op.outputs.fields_container()
    
        return nodal_f_x, nodal_f_y, nodal_f_z
    
    def sum_nodal_f(nodal_f_x, nodal_f_y, nodal_f_z):
        print('... summing nodal forces ...')
        nodal_f_dict = {}
        nodal_f_sum_dict = {}
        for i in range(len(nodal_f_x)):
            nodal_f_dict['time_step_' + str(i+1)] = pd.DataFrame({'fx': nodal_f_x[i].data, 'fy': nodal_f_y[i].data, 'fz': nodal_f_z[i].data})
            nodal_f_sum_dict['time_step_' + str(i+1)] = nodal_f_dict['time_step_' + str(i+1)].sum().values
        
        nodal_f_sum = pd.DataFrame(nodal_f_sum_dict).T
        nodal_f_sum.columns = ['fx', 'fy', 'fz']
        
        return nodal_f_dict, nodal_f_sum
    
    #%%
    st = time.time()
    
    # .rst file locations
    rst_dir = "whatever"
    rst_file = glob.glob(rst_dir + str('*'))[0]
    
    # read model data
    print('... reading dpf.Model ...')
    model = dpf.Model(rst_file)
    print('... reading streams provider ...')
    sc = model.metadata.streams_provider.outputs.streams_container
    print('... reading time scoping ...')
    ls_all = dpf.time_freq_scoping_factory.scoping_on_all_time_freqs(model)
    #ls_all = dpf.time_freq_scoping_factory.scoping_by_load_steps(load_steps=[1, 5])
    print('... reading NSs ...')
    get_NSs = model.metadata.available_named_selections
    
    # select NS
    #NS = 'MB_TO_MB_UPPER_10_CAD_MODEL_'
    #find_NS = [s for s in get_NSs if s.startswith(NS)]
    #NS = find_NS[0]
    NS_list = ['MB_TO_MB_UPPER_10_CAD_MODEL_', 'CC_TO_MC_MAIN_CAD_MODEL_', 'CC_BEARING_6_CAD_MODEL_']
    
    print('... reading data ...')
    NS_cont_r_force_dist = {}
    for NS_i, NS in enumerate(NS_list, start = 1):
        scoping_NS = model.metadata.named_selection(NS)
        for ls in ls_all:
            print('Processing NS: ' + str(NS) + ' (#' + str(NS_i) + ' out of #' + str(len(NS_list)) + ')')
            scoping_time = dpf.time_freq_scoping_factory.scoping_by_load_steps(load_steps=[ls])
            print('load step: #' + str(scoping_time.ids[0]) + ' (out of #' + str(len(ls_all)) + ')')
            
            nodal_f_x, nodal_f_y, nodal_f_z = extract_nmisc(scoping_time, scoping_NS, sc)
            nodal_f_dict, nodal_f_sum = sum_nodal_f(nodal_f_x, nodal_f_y, nodal_f_z)
            NS_cont_r_force_dist[NS] = nodal_f_sum
    
    et = time.time()
    et = et - st
    print('time elapsed: ' + str(et) + 's')