Open H6hh opened 7 months ago
1027295113@qq.com
You can use "array = np.load('fname.npy')" in python with numpy to unzip the data from npy file into ndarray. The array will be of size [15/16, 101, 101] . The first channel is the layer id of laminates. The second and the third channel are the image of the damage variables on the node. You can futher save the array to .csv files using 'np.savetxt('path_to_csv.csv', array[i, :, :], delimiter=',')', with 0 <= i < 16.
Guowener.Wang @.***
------------------ Original ------------------ From: "Zhenpeng @.>; Date: 2024年4月2日(星期二) 晚上7:39 To: @.>; Cc: @.***>; Subject: [GW-Wang-thu/VQ-SM] some questions (Issue #1)
Hello! Can you share with me how to get the data set npy file in your google drive? Is it converted from images to npy files? Can you share the code? Thank you!
— Reply to this email directly, view it on GitHub, or unsubscribe. You are receiving this because you are subscribed to this thread.Message ID: @.***>
You can use "array = np.load('fname.npy')" in python with numpy to unzip the data from npy file into ndarray. The array will be of size [15/16, 101, 101] . The first channel is the layer id of laminates. The second and the third channel are the image of the damage variables on the node. You can futher save the array to .csv files using 'np.savetxt('path_to_csv.csv', array[i, :, :], delimiter=',')', with 0 <= i < 16. Guowener.Wang @. … ------------------ Original ------------------ From: "Zhenpeng @.>; Date: 2024年4月2日(星期二) 晚上7:39 To: @.>; Cc: @.>; Subject: [GW-Wang-thu/VQ-SM] some questions (Issue #1) Hello! Can you share with me how to get the data set npy file in your google drive? Is it converted from images to npy files? Can you share the code? Thank you! — Reply to this email directly, view it on GitHub, or unsubscribe. You are receiving this because you are subscribed to this thread.Message ID: @.***>
Sorry, I was wondering how the damage image samples you got from ABAQUS were converted to .NPY files?
The code below, running in the abaqus python environment, convert the required odb results to .rpt file.
from abaqus import from abaqusConstants import from caeModules import * from driverUtils import executeOnCaeStartup import os odb_path = r'F:\DATASET\LVI_SM\odb' rpt_path = r'F:\DATASET\LVI_SM\rpt4' all_files = os.listdir(odb_path) all_odbs = [os.path.join(odb_path, f) for f in all_files if f.endswith('.odb')] for fname in all_odbs: temp_fname = fname.split('\')[-1].split('.')[0] if tempfname.split('')[-1][0] == "S": n_step = int(tempfname.split('')[-1][1:]) / 25 else: n_step = 20 if os.path.exists(rpt_path + '\'+temp_fname+'.rpt'): continue o1 = session.openOdb(name=fname) session.viewports['Viewport: 1'].setValues(displayedObject=o1) session.viewports['Viewport: 1'].odbDisplay.setFrame(step=0, frame=-1) leaf = dgo.LeafFromElementSets(elementSets=("LAMINATES-1.ALL_COHESIVE", "LAMINATES-1.ALL_LAMINATE", )) session.viewports['Viewport: 1'].odbDisplay.displayGroup.replace(leaf=leaf) session.writeFieldReport(fileName=rpt_path + '\'+temp_fname+'.rpt', append=ON, sortItem='Node Label', odb=o1, step=0, frame=n_step, outputPosition=NODAL, variable=(('COORD', NODAL, ((COMPONENT, 'COOR1'), (COMPONENT, 'COOR2'), ( COMPONENT, 'COOR3'), )), ('U', NODAL, ((COMPONENT, 'U1'), (COMPONENT, 'U2'), (COMPONENT, 'U3'), )), ('S', INTEGRATION_POINT, ((INVARIANT, 'Mises'), )), ('SDEG', INTEGRATION_POINT), ('SDV1', INTEGRATION_POINT), ('SDV2', INTEGRATION_POINT), ('SDV3', INTEGRATION_POINT), ( 'SDV4', INTEGRATION_POINT), ), stepFrame=SPECIFY) o1.close()
And the following python script convert the rpt file to mat/npy.
import numpy as np import matplotlib.pyplot as plt import os import re
plt.rcParams['font.sans-serif']=['SimHei'] # 指定默认字体 SimHei为黑体 plt.rcParams['axes.unicode_minus']=False # 用来正常显示负号
def rpt2npy(fname): flag = False layer = 0 arrays_sdv1 = [] arrays_sdv2 = [] arrays_sdv3 = [] arrays_sdv4 = [] arrays_sdeg = [] with open(fname, 'r') as f: while True: tmp_line = f.readline() if tmp_line == '': break if tmp_line.startswith('\n') and flag: flag = False if len(kws) == 12: array_1, array_2, array_3, array_4 = pt2array(tmp_array, mode=0) arrays_sdv1.append(array_1) arrays_sdv2.append(array_2) arrays_sdv3.append(array_3) arrays_sdv4.append(array_4) else: array = pt2array(tmp_array, mode=1) arrays_sdeg.append(array) if flag: kws = [float(s) for s in re.findall(r'-?\d+.?\d*'+'E?-?+?\d?\d?', tmp_line) if s[-1] != 'E'] x_init = kws[1] - kws[4] y_init = kws[2] - kws[5] z_init = kws[3] - kws[6] if (x_init > 14.5) and (x_init < 85.5) and (z_init > -35.5) and (z_init < 35.5) and (round(y_init, 3) % 0.25 < 0.2): if len(kws) == 12: sdv_1 = kws[-4] sdv_2 = kws[-3] sdv_3 = kws[-2] sdv_4 = kws[-1] vect = [x_init, y_init, z_init, sdv_1, sdv_2, sdv_3, sdv_4] # plates, x, y, z, sdv3, sdv4, sdeg tmp_array.append(vect) else: sdeg = kws[-1] vect = [x_init, y_init, z_init, sdeg] # plates, x, y, z, sdv3, sdv4, sdeg tmp_array.append(vect) if tmp_line.startswith('--------------------------------------'): layer += 1 flag = True tmp_array = [] return arrays_sdv1, arrays_sdv2, arrays_sdv3, arrays_sdv4, arrays_sdeg
def pt2array(tmp_array, step=0.7, array_shape=(101, 101), st_x=15, st_z=-35, mode=0):
if mode == 0:
array_1 = np.zeros(shape=array_shape)
array_2 = np.zeros(shape=array_shape)
array_3 = np.zeros(shape=array_shape)
array_4 = np.zeros(shape=array_shape)
for i in range(len(tmp_array)):
tmp_vect = tmp_array[i]
x_id = int(round((tmp_vect[0] - st_x) / step, 0))
z_id = int(round((tmp_vect[2] - st_z) / step, 0))
array_1[x_id, z_id] = tmp_vect[-4]
array_2[x_id, z_id] = tmp_vect[-3]
array_3[x_id, z_id] = tmp_vect[-2]
array_4[x_id, z_id] = tmp_vect[-1]
return array_1, array_2, array_3, array_4
else:
array_1 = np.zeros(shape=array_shape)
for i in range(len(tmp_array)):
tmp_vect = tmp_array[i]
x_id = int(round((tmp_vect[0] - st_x) / step, 0))
z_id = int(round((tmp_vect[2] - st_z) / step, 0))
array_1[x_id, z_id] = tmp_vect[-1]
return array_1
if name == 'main':
rpt_path = r'F:\DATASET\LVI_SM\rpt4'
npy_path = r'F:\DATASET\LVI_SM\npy\all4'
all_files = os.listdir(rpt_path)
all_rpts = [os.path.join(rpt_path, f) for f in all_files if f.endswith('.rpt')]
data_samples = []
for fname in all_rpts:
temp_fname = fname.split('\\')[-1].split('.')[0]
temp_kws = temp_fname.split('_')[0:3]
data_samples.append([float(temp_kws[0][1:])/100, float(temp_kws[1][1:])/1000, float(temp_kws[2][1:])/100])
if os.path.exists(npy_path + '\\'+temp_fname+'_MCDMG.npy'):
continue
sdv1, sdv2, sdv3, sdv4, sdeg = rpt2npy(fname=fname)
zero_array = np.zeros_like(sdeg[0])
sdeg = [zero_array, *sdeg]
# print(len(sdv3))
sdeg_img = np.zeros_like(sdeg[0])
for i in range(len(sdeg)):
sdeg_img += sdeg[i]
sdv1_img = np.zeros_like(sdeg[0])
for i in range(len(sdeg)):
sdv1_img += sdv1[i]
sdv2_img = np.zeros_like(sdeg[0])
for i in range(len(sdeg)):
sdv2_img += sdv2[i]
sdv3_img = np.zeros_like(sdeg[0])
for i in range(len(sdeg)):
sdv3_img += sdv3[i]
sdv4_img = np.zeros_like(sdeg[0])
for i in range(len(sdeg)):
sdv4_img += sdv4[i]
np.save(npy_path + '\\'+temp_fname+'_SDEG.npy', np.array(sdeg))
np.save(npy_path + '\\'+temp_fname+'_FTDMG.npy', np.array(sdv1))
np.save(npy_path + '\\'+temp_fname+'_FCDMG.npy', np.array(sdv2))
np.save(npy_path + '\\'+temp_fname+'_MTDMG.npy', np.array(sdv3))
np.save(npy_path + '\\'+temp_fname+'_MCDMG.npy', np.array(sdv4))
Hope that will help!
Guowener.Wang @.***
------------------ Original ------------------ From: "GW-Wang-thu/VQ-SM" @.>; Date: Tue, Apr 2, 2024 08:06 PM @.>; @.**@.>; Subject: Re: [GW-Wang-thu/VQ-SM] some questions (Issue #1)
You can use "array = np.load('fname.npy')" in python with numpy to unzip the data from npy file into ndarray. The array will be of size [15/16, 101, 101] . The first channel is the layer id of laminates. The second and the third channel are the image of the damage variables on the node. You can futher save the array to .csv files using 'np.savetxt('path_to_csv.csv', array[i, :, :], delimiter=',')', with 0 <= i < 16. Guowener.Wang @.*** … ------------------ Original ------------------ From: "Zhenpeng @.>; Date: 2024年4月2日(星期二) 晚上7:39 To: @.>; Cc: @.>; Subject: [GW-Wang-thu/VQ-SM] some questions (Issue #1) Hello! Can you share with me how to get the data set npy file in your google drive? Is it converted from images to npy files? Can you share the code? Thank you! — Reply to this email directly, view it on GitHub, or unsubscribe. You are receiving this because you are subscribed to this thread.Message ID: @.>
Sorry, I was wondering how the damage image samples you got from ABAQUS were converted to .NPY files?
— Reply to this email directly, view it on GitHub, or unsubscribe. You are receiving this because you commented.Message ID: @.***>
Hello! Can you share with me how to get the data set npy file in your google drive? Is it converted from images to npy files? Can you share the code? Thank you!