How to determine the element distribution per core for a distributed solution
M
Member, Employee Posts: 244
✭✭✭✭
This was a request for some training material.
The challenge is to create a method to get the element break down per core when doing a distributed solve.
Note you need to solve with My Computer/Background or RSM and bring back all files (Mechanical - File - Options - Results - RSM File Manager - RSM Output Files Download - Show) and then the partial code below will use the os.walk function to get all the partial .rst files, read them and put the element ids in a dictionary that used the number of the file (file1, file2...) as the key.
def define_dpf_workflow(analysis): import mech_dpf import Ans.DataProcessing as dpf import os rstFiles = [] elemDict = {} wd = ExtAPI.DataModel.Project.Model.Analyses[0].WorkingDir for root, dirs, files in os.walk(wd): for file in files: if file.endswith('rst') and file != 'file.rst': ExtAPI.Log.WriteMessage(file) rstFiles.append(os.path.join(root,file)) for file in rstFiles: model = dpf.Model(dpf.DataSources(file)) meshElems = model.Mesh.ElementIds ekey = file.split(os.sep)[-1].split('.')[0].split('e')[-1] elemDict[ekey] = meshElems #ExtAPI.Log.WriteMessage('my cpu --> ' + ekey) mech_dpf.setExtAPI(ExtAPI) dataSource = dpf.DataSources(analysis.ResultFileName) ... here the field creation and plot will be added
1
Comments
-
def define_dpf_workflow(analysis): import mech_dpf,os import Ans.DataProcessing as dpf mech_dpf.setExtAPI(ExtAPI) analysis1 = analysis path = analysis1.Solution.ResultFileDirectory files = os.listdir(path) rstfiles = filter(lambda x:".rst" in x and any(char.isdigit() for char in x), files) number_of_distributed_processor = rstfiles.Count number_of_distributed_processor = 2 #Result Data result_file_path = analysis1.ResultFileName path, filename = os.path.split(result_file_path) my_fields = [] for processor in range(0,number_of_distributed_processor): filename1 = filename.Split(".")[0]+"%s"%(processor)+"."+filename.Split(".")[1] distributed_file_path = os.path.join(path,filename1) dataSource = dpf.DataSources(distributed_file_path) #model my_model = dpf.Model(distributed_file_path) #my mesh my_mesh = my_model.Mesh #Create your own field my_field = dpf.FieldsFactory.CreateScalarField(my_mesh.ElementCount) my_data = [float(processor)]*my_mesh.ElementCount my_field.Data = my_data my_field.ScopingIds = my_mesh.ElementIds my_field.Location = "Elemental" my_fields.append(my_field) #merged_field = dpf.operators.utility.merge_fields(fields1=my_fields[0],fields2=my_fields[1]) merged_field = my_fields[0] for field_num in range(1,my_fields.Count): merged_field = dpf.operators.utility.merge_fields(fields1=merged_field,fields2=my_fields[field_num]) op = dpf.operators.math.scale(field=merged_field,ponderation=1.0) dpf_workflow = dpf.Workflow() dpf_workflow.Add(op) dpf_workflow.SetOutputContour(op) dpf_workflow.Record('wf_id', False) this.WorkflowId = dpf_workflow.GetRecordedId()
0