diff --git a/CHANGELOG b/CHANGELOG index 73eb59cf171618bd8a472216893ee66588b6a749..f4f9c5c3c33d87ccb2d118be0c7a58b4a94f778b 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -6,6 +6,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [0.0.7] - 2020-07-03 +### Added +- unified properties output +- Add 'if file exist' universal fuction +- debug main.py + +### Changed +- None + +### Removed +- None + + ## [0.0.6] - 2020-07-02 ### Added - Initial support for gmx commands diff --git a/main.py b/main.py index 2ecb5f4ab62c850ce872918dcbe7993f1afa862c..315feaf1ba7a79ed588c4dbb653e1b4a0a50b8fb 100644 --- a/main.py +++ b/main.py @@ -14,7 +14,7 @@ NUM_FR=1 TRAJ=SYSTEM_NAME+'/eq_traj.trr' GRO=SYSTEM_NAME+'/eq_final.gro' TPR=SYSTEM_NAME+'/eq_run.tpr' -ITP_DIC={'NS':'CER_SOVOVA.itp','FFA':'FFA_CG.itp','CHOL':'CHOL_CG.itp'} +#ITP_DIC={'NS':'CER_SOVOVA.itp','FFA':'FFA_CG.itp','CHOL':'CHOL_CG.itp'} ################################################### # {NAME:[QUEUE OF PROCESSES]} # @@ -32,13 +32,13 @@ ITP_DIC={'NS':'CER_SOVOVA.itp','FFA':'FFA_CG.itp','CHOL':'CHOL_CG.itp'} # index: Creates unique code (md5) for every subdomain to use in data saving process # density: Detrmine density profile of x,y,z and save peaks of directions with the least number # gmx_ndx: Saves one ndx for every subdomain -# [save, [type], save_name]: Save function and properties aka, type: pkl, json, Name +# [save, [type], save_name]: Save result of previous function, type: pkl, json # ################################################### GROUPS={'ALL':['gmx_ndx','index',['save', ['pkl'],'index'],'density',['save', ['pkl'],'dens']], 'HD_GROUP':['surf',['save', ['pkl', 'json'],'time_domain_c-normal-cg']], 'TL_GROUP':['vector'], - 'COMBINE':[['HD_GROUP','surf'],['TL_GROUP','vector'],['COMB','tilt']] + 'COMBINE':[['HD_GROUP','surf'],['TL_GROUP','vector'],['COMB','tilt'],['save', ['pkl'],'tilt']] } ALL={'NS':['C6', 'Na', 'P4', 'P3', 'C7','C3', 'C4', 'C5', 'C8', 'C9', 'C10'], 'CHOL':['ROH','R1', 'R2', 'R3', 'R4', 'R5'], 'FFA':['AC','C1', 'C2', 'C3', 'C4']} HD_GROUP={'NS':['C6', 'Na', 'P4', 'P3', 'C7'], 'CHOL':['ROH'], 'FFA':['AC']} @@ -55,6 +55,7 @@ print('================') _,data_num,_,res_num,res_type,atom_type,atom_num,_ = tbf.read_gro(GRO) print(' ') ################################################### +#-------------------------------------------------- #Read .itp files #weights={} #for MOL in ITP_DIC.keys(): @@ -63,6 +64,7 @@ print(' ') # print(' ') #print(weights) ################################################### +#-------------------------------------------------- if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_data.pkl'): if LOG==1: print('WARNING: Preprocessing files exist.') @@ -74,7 +76,40 @@ else: data_all=tbf.fr_export(trajfile=TRAJ,num_frames=NUM_FR) tbf.topickle(fl=data_all, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_data') ################################################### +#-------------------------------------------------- +#Check save files if exist in order to skip functions +prev=0 +sv_index={} for i in GROUPS.keys(): + for j in GROUPS[i]: + try: + sv_index[j]={} + sv_index[j]['status']='not exist' + sv_index[j]['name']='None' + except TypeError: + sv_index[str(j)]={} + sv_index[str(j)]['status']='not exist' + sv_index[str(j)]['name']='None' + if len(j)==3: + if j[0]=='save': + for k in j[1]: + if k=='pkl': + if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2]+'.pkl'): + sv_index[prev]['status']='exist' + sv_index[prev]['name']='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2]+'.pkl' + else: + sv_index[prev]['status']='not exist' + if k=='json': + if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2]+'.pkl'): + sv_index[prev]['status']='exist' + sv_index[prev]['name']='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2]+'.pkl' + else: + sv_index[prev]['status']='not exist' + prev=str(j) +################################################### +#-------------------------------------------------- +for i in GROUPS.keys(): +#not COMBINE section if i!='COMBINE': if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_ndx.pkl'): if LOG==1: @@ -86,7 +121,7 @@ for i in GROUPS.keys(): #Find atom type index in lists created above group_ndx=tbf.atomid_data(res_num, res_type, atom_type, atom_num, group=locals()[i]) tbf.topickle(fl=group_ndx, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_ndx') -################################################### +#-------------------------------------------------- if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_box.pkl'): if LOG==1: print('WARNING: Preprocessing files exist.') @@ -107,25 +142,36 @@ for i in GROUPS.keys(): _,coord_vector=tbf.sub_coord(box=box_res, data=data_all, res_num=res_num) for j in GROUPS[i]: if len(j) > 1: - if j=='surf': + if j=='surf' and sv_index[j]['status']=='not exist': + #Creates dictionary with c, normal per subdomain for each frame + surf[i]=tbf.coord2norm2cg(coord_vector,img=False) + sv_data=surf[i] + elif j=='surf' and sv_index[j]['status']=='exist': if j not in locals(): surf={} - #Creates dictionary with c, normal per subdomain for each frame - surf[locals()[i]]=tbf.coord2norm2cg(coord_vector,img=False) - sv_data=surf[locals()[i]] - if j=='vector': - if vector not in locals(): + surf[i]=tbf.frompickle(sv_index[j]['name']) +#-------------------------------------------------- + if j=='vector' and sv_index[j]['status']=='not exist': + if j not in locals(): + vector={} + vector[i]=tbf.coord2vector(coord_vector) + sv_data=vector[i] + elif j=='vector' and sv_index[j]['status']=='exist': + if j not in locals(): vector={} - vector[locals()[i]]=tbf.coord2vector(coord_vector) - sv_data=vector[locals()[i]] - if j=='index': + vector[i]=tbf.frompickle(sv_index[j]['name']) +#-------------------------------------------------- + if j=='index' and sv_index[j]['status']=='not exist': uniq_id=tbgmx.ndx_index(SYSTEM_NAME) sv_data=uniq_id - if j=='density': + elif j=='index' and sv_index[j]['status']=='exist': + uniq_id=tbf.frompickle(sv_index[j]['name']) +#-------------------------------------------------- + if j=='density' and sv_index[j]['status']=='not exist': dens_df={} for iidd in uniq_id.keys(): dens_df[iidd]={} - fl='./'+uniq_id[iidd]['system']+'/gmx_ndx/'+uniq_id[iidd]['domain'] + fl='./'+uniq_id[iidd]['system']+'/gmx_ndx/'+uniq_id[iidd]['ndx_file'] cnt=-1 for mol in locals()[i].keys(): cnt=cnt+1 @@ -140,11 +186,14 @@ for i in GROUPS.keys(): tmp=peaks dens_df[iidd][mol]=peaks - print(dens_df) sv_data=dens_df + elif j=='density' and sv_index[j]['status']=='exist': + dens_df=tbf.frompickle(sv_index[j]['name']) +#-------------------------------------------------- if j=='gmx_ndx': tbf.togmxndx(box_res, fld='./'+SYSTEM_NAME, sv_name=SYSTEM_NAME+'_'+i) - +#-------------------------------------------------- + # Save module if len(j)==3: if j[0]=='save': try: @@ -158,15 +207,57 @@ for i in GROUPS.keys(): if k=='json': tbf.tojson(fl=sv_data, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2]) ################################################### +#COMBINE section else: for j in GROUPS[i]: - for jj in j: - if jj[0]!='COMB': - if jj[1]=='surf': - surf=surf[locals()[jj[0]]] - elif jj[1]=='vector': - vector=vector[locals()[jj[0]]] - if jj[0]=='COMB': - if jj[1]=='tilt': - tbf.SurfVector_angle(surf,vector) + #Input to COMBINE property + if j[0]!='COMB': + if j[1]=='surf': + surf=surf[j[0]] + if j[1]=='vector': + vector=vector[j[0]] + #Calculate COMBINE property + if j[0]=='COMB': + if j[1]=='tilt' and sv_index[str(j)]['status']=='not exist': + tilt=tbf.SurfVector_angle(surf,vector) + #Loop over timesteps and keep avgs tilts for each step + avg={} + ss=[] + for step in tilt.keys(): + for sub in tilt[step].keys(): + avgs=tilt[step][sub]['avg/frame'] + if sub not in ss: + ss.append(sub) + avg[sub]=avgs + else: + avg[sub].append(avgs) + #Calculate total average + tot_avg={} + for sub in avg.keys(): + for key, value in uniq_id.items(): + if str(value['domain']).strip() == str(sub).strip(): + hsh=key + break + try: + tot_avg[hsh]=sum(avg[sub])/len(avg[sub]) + except TypeError: #in case of one frame + tot_avg[hsh]=sum([avg[sub]])/len([avg[sub]]) + sv_data=tot_avg + elif j[1]=='tilt' and sv_index[str(j)]['status']=='exist': + tot_avg=tbf.frompickle(sv_index[str(j)]['name']) +#-------------------------------------------------- + # Save module + if len(j)==3: + if j[0]=='save': + try: + sv_data + except NameError: + pass + else: + for k in j[1]: + if k=='pkl': + tbf.topickle(fl=sv_data, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2]) + if k=='json': + tbf.tojson(fl=sv_data, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2]) +################################################### ################################################### \ No newline at end of file diff --git a/tooba_f.py b/tooba_f.py index 77233ba9f186166a687cd0c82a6e837d3a3ca3c4..c7b8e4f85394a731619bd5c686dd41f0c3fa3954 100644 --- a/tooba_f.py +++ b/tooba_f.py @@ -677,11 +677,14 @@ def SurfVector_angle(surf,vector): bar = Bar('Step: '+step, max=len(surf[step].keys())) for sudomain in surf[step].keys(): angle[step][sudomain]={} + tot=[] for resid in vector[step][sudomain].keys(): P1=tuple(surf[step][sudomain]['normal']) P2=tuple(vector[step][sudomain][resid]) #print(tbf.angle_between3D(P1,P2)) angle[step][sudomain][resid]=angle_between3D(P1,P2) + tot.append(angle_between3D(P1,P2)) + angle[step][sudomain]['avg/frame']=sum(tot)/len(tot) bar.next() bar.finish() return angle @@ -693,7 +696,10 @@ def togmxndx(box_res, fld, sv_name): cnt=0 fl_save=fld+'/gmx_ndx/' if not os.path.exists(fl_save): - os.makedirs(fl_save) + os.makedirs(fl_save) + else: + print('WARNING: .ndx files exists. Nothing to do!') + return ndx={} for step in box_res.keys(): diff --git a/tooba_gmx.py b/tooba_gmx.py index 325f91cea2169876e40ff44f6d117bbbeeebd979..abd4d03dba9fb4f5ffbdfefbeb07b286cfba3061 100644 --- a/tooba_gmx.py +++ b/tooba_gmx.py @@ -23,8 +23,9 @@ def ndx_index(SYSTEM_NAME): for f in os.listdir(path): text=SYSTEM_NAME+'_'+f iidd=hashlib.md5(text.encode('utf-8')).hexdigest() + dom=f.split('_')[2].split('.')[0] uniq_id[iidd]={} - uniq_id[iidd]={'system':SYSTEM_NAME,'domain':f} + uniq_id[iidd]={'system':SYSTEM_NAME,'ndx_file':f,'domain':dom} return uniq_id def read_xvg(XVG):