How can I convert a file.rst to h5dpf format?

Ayush Kumar
Ayush Kumar Member, Moderator, Employee Posts: 434
250 Likes Solution Developer Community of Practice Member Ansys Employee First Anniversary
✭✭✭✭

How can I convert a file.rst to h5dpf format?

Tagged:

Comments

  • Ayush Kumar
    Ayush Kumar Member, Moderator, Employee Posts: 434
    250 Likes Solution Developer Community of Practice Member Ansys Employee First Anniversary
    ✭✭✭✭

    Please change line 5 to the installation directory. The feature works from 2024R2 onwards.
    This example will write out only Nodal displacements for all time-steps. Refer to migrate_to_h5dpf operator for more information.

    import os
    import time
    from ansys.dpf import core as dpf
    dpf.set_default_server_context(dpf.AvailableServerContexts.premium)
    dpf.start_local_server(ansys_path=r"C:\Program Files\ANSYS Inc\v242")  # Change path to Ansys Installation folder
    
    cwd = os.getcwd()
    rst_file_path = os.path.join(cwd, "file.rst")
    h5_file = os.path.join(cwd, "file.h5")
    if os.path.exists(rst_file_path):
        dataSource_rst = dpf.DataSources(rst_file_path)
    else:
        raise NameError("No RST found in the working directory for conversion to h5dpf!")
    
    model = dpf.Model(dataSource_rst)
    
    # Get Result to Export
    resultInfoProvider = dpf.operators.metadata.result_info_provider()
    resultInfoProvider.inputs.data_sources.connect(dataSource_rst)
    result_info = resultInfoProvider.outputs.result_info()
    
    server_op = dpf.operators.utility.server_path()
    server_op.inputs.subpath.connect(4)
    workflows_path = server_op.outputs.path()
    
    # Filtering Workflows
    import_wf_op = dpf.operators.serialization.import_symbolic_workflow()
    import_wf_op.inputs.string_or_path.connect(workflows_path + r'\filtering\pass_input_pins.swf')
    import_wf_op.inputs.format.connect(1)
    pass_input_pins = import_wf_op.outputs.workflow()
    pass_input_pins.connect("bool_rotate_to_global", False)
    
    # Create GDC
    filtering_gdc_op = dpf.Operator("forward_to_gdc")
    filtering_gdc_op.connect(0, "default")
    filtering_gdc_op.connect(1, pass_input_pins)
    
    # Compression Workflows
    import_wf_op = dpf.operators.serialization.import_symbolic_workflow()
    import_wf_op.inputs.string_or_path.connect(workflows_path + r'\compression\pod_compression.swf')
    import_wf_op.inputs.format.connect(1)
    rom_wf = import_wf_op.outputs.workflow()
    rom_wf.connect("pod_threshold", 1e-5) #A adapter
    
    # Create GDC
    compression_gdc_op = dpf.Operator("forward_to_gdc")
    compression_gdc_op.connect(0, "default")
    compression_gdc_op.connect(1, rom_wf)
    
    # Migration
    migrate_op = dpf.operators.result.migrate_to_h5dpf()
    migrate_op.connect(-2, 1) # GZIP Level
    migrate_op.inputs.export_floats.connect(False) # True if Conversion to float
    migrate_op.inputs.filename.connect(h5_file)
    migrate_op.connect(1, "U")
    migrate_op.inputs.all_time_sets.connect(True)
    migrate_op.inputs.data_sources.connect(dataSource_rst)
    migrate_op.connect(7, filtering_gdc_op,0)
    migrate_op.connect(6, compression_gdc_op,0)
    migrate_op.run()