Commit 332978a9 authored by Stelios Karozis's avatar Stelios Karozis

Dynamic I/O, ndx print for gmx

parent 1272fa88
...@@ -6,17 +6,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ...@@ -6,17 +6,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] ## [Unreleased]
## [0.0.5] - 2020-05-20 ## [0.0.5] - 2020-05-21
### Added ### Added
- Add progress bar per function - Add progress bar per function
- Save preprocessing arrays to pickles and read them if exist for speed up whole process - Save preprocessing arrays to pickles and read them if exist for speed up whole process
- new function to calculate surface per subdomain (different input) - new function to calculate surface per subdomain (different input)
- print groups to gmx_ndx format (one ndx for each subdomain) [Future use as to create new .trr with gmx trjconv]
### Changed ### Changed
- debug atomid_data function - debug atomid_data function
- more dynamic I/O of whole code
### Removed ### Removed
- None - printing .png of surface due to matplotlib unknown error
## [0.0.4] - 2020-05-19 ## [0.0.4] - 2020-05-19
### Added ### Added
......
...@@ -6,15 +6,32 @@ import tooba_f as tbf ...@@ -6,15 +6,32 @@ import tooba_f as tbf
# in case of tail is the one closest to the head, hence # in case of tail is the one closest to the head, hence
# the code is a good approximation # the code is a good approximation
################################################### ###################################################
TRAJ='system/eq_traj.trr' LOG=0
SYSTEM_NAME='test'
DISCET=[6, 6, 6]
NUM_FR=1 NUM_FR=1
GRO='system/eq_final.gro' TRAJ=SYSTEM_NAME+'/eq_traj.trr'
HEAD=True GRO=SYSTEM_NAME+'/eq_final.gro'
###################################################
# {NAME:[QUEUE OF PROCESSES]}
#
# NAME: It is user defined. A dictionary must follows with the same name.
# The dict structure has to be: {res_type:[atom_types]}
#
# if NAME is COMBINATION then it needs part or all tha info from aforementioned
# groups to execute a process. You cannot use combination as first group.
#
# QUEUE OF PROCESSES: surf, vector, [save, [type], save_name]
# type: pkl, json, gmx_ndx (one ndx for every subdomain)
#
# if NAME is COMBINATION: [tilt]
###################################################
GROUPS={'HD_GROUP':['surf',['save', ['pkl','gmx_ndx'],'time_domain_c-normal-cg']],
'TL_GROUP':['vector'],
'COMBINE':['tilt']
}
HD_GROUP={'NS':['C6', 'Na', 'P4', 'P3', 'C7'], 'CHOL':['ROH'], 'FFA':['AC']} HD_GROUP={'NS':['C6', 'Na', 'P4', 'P3', 'C7'], 'CHOL':['ROH'], 'FFA':['AC']}
TAIL=False
TL_GROUP={'NS':['C3', 'C4', 'C5', 'C8', 'C9', 'C10'], 'CHOL':['R1', 'R2', 'R3', 'R4', 'R5'], 'FFA':['C1', 'C2', 'C3', 'C4']} TL_GROUP={'NS':['C3', 'C4', 'C5', 'C8', 'C9', 'C10'], 'CHOL':['R1', 'R2', 'R3', 'R4', 'R5'], 'FFA':['C1', 'C2', 'C3', 'C4']}
DISCET=[3, 3, 3]
RES_NAME='time_domain_c-normal-cg'
################################################### ###################################################
################################################### ###################################################
print(' ') print(' ')
...@@ -27,57 +44,69 @@ print('================') ...@@ -27,57 +44,69 @@ print('================')
_,data_num,_,res_num,res_type,atom_type,atom_num,_ = tbf.read_gro(GRO) _,data_num,_,res_num,res_type,atom_type,atom_num,_ = tbf.read_gro(GRO)
print(' ') print(' ')
################################################### ###################################################
if os.path.isfile('./data.pkl'): if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_data.pkl'):
if LOG==1:
print('WARNING: Preprocessing files exist.') print('WARNING: Preprocessing files exist.')
print(' Erase data.pkl if the system is new.') print(' Erase data.pkl if the system is new.')
print('--------------------------------------------') print('--------------------------------------------')
data_all=tbf.frompickle('./data.pkl') data_all=tbf.frompickle('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_data.pkl')
else: else:
#Read .trr file #Read .trr file
data_all=tbf.fr_export(trajfile=TRAJ,num_frames=NUM_FR) data_all=tbf.fr_export(trajfile=TRAJ,num_frames=NUM_FR)
tbf.topickle(fl=data_all, sv_name='data') tbf.topickle(fl=data_all, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_data')
################################################### ###################################################
if HEAD==True: for i in GROUPS.keys():
if os.path.isfile('./ndx_HEAD.pkl'): if i!='COMBINE':
if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_ndx.pkl'):
if LOG==1:
print('WARNING: Preprocessing files exist.') print('WARNING: Preprocessing files exist.')
print(' Erase ndx_HEAD.pkl if the system is new.') print(' Erase ndx_HEAD.pkl if the system is new.')
print('--------------------------------------------') print('--------------------------------------------')
group_ndx=tbf.frompickle('./ndx_HEAD.pkl') group_ndx=tbf.frompickle('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_ndx.pkl')
else: else:
#Find atom type index in lists created above #Find atom type index in lists created above
group_ndx=tbf.atomid_data(res_num, res_type, atom_type, atom_num, group=HD_GROUP) group_ndx=tbf.atomid_data(res_num, res_type, atom_type, atom_num, group=HD_GROUP)
tbf.topickle(fl=group_ndx, sv_name='ndx_HEAD') tbf.topickle(fl=group_ndx, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_ndx')
################################################### ###################################################
if os.path.isfile('./box.pkl'): if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_box.pkl'):
if LOG==1:
print('WARNING: Preprocessing files exist.') print('WARNING: Preprocessing files exist.')
print(' Erase box.pkl if the system is new') print(' Erase box.pkl if the system is new')
print(' or new grid is applied !') print(' or new grid is applied !')
print('--------------------------------------------') print('--------------------------------------------')
box_res=tbf.frompickle('./box.pkl') box_res=tbf.frompickle('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_box.pkl')
else: else:
#Create subdomains coordinates #Create subdomains coordinates
box_p=tbf.domain_decomposition(data=data_all,dx=DISCET[0],dy=DISCET[1],dz=DISCET[2]) box_p=tbf.domain_decomposition(data=data_all,dx=DISCET[0],dy=DISCET[1],dz=DISCET[2])
#Assign desired atoms (from above function) to subdomains #Assign desired atoms (from above function) to subdomains
##result1: {step:{res:{atom_type:{atom_num:(subX,subYsubZ)}}}} ##result1: {step:{res:{atom_type:{atom_num:(subX,subYsubZ)}}}}
##result2: {step:{res:{atom_type:{(subX,subYsubZ):[atom_num]}}}} ##result2: {step:{res:{atom_type:{(subX,subYsubZ):[atom_num]}}}}
_,box_res=tbf.atom2grid(data_all,box_p, group_ndx) _,box_res=tbf.atom2grid(data_all,box_p, group_ndx)
tbf.topickle(fl=box_res, sv_name='box') tbf.topickle(fl=box_res, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_box')
###################################################
################################################### ###################################################
if HEAD==True:
#Creates dictionary with coordinates per subdomain for each frame
coord_norm,coord_vector=tbf.sub_coord(box=box_res, data=data_all, res_num=res_num)
#Creates dictionary with c, normal per subdomain for each frame
surf=tbf.coord2norm2cg(coord_vector,img=True)
print(surf)
exit()
tbf.topickle(fl=surf, sv_name=RES_NAME)
tbf.tojson(fl=surf, sv_name=RES_NAME)
###################################################
if TAIL==True:
#Creates dictionary with coordinates per subdomain for each frame #Creates dictionary with coordinates per subdomain for each frame
_,coord_vector=tbf.sub_coord(box=box_res, data=data_all, res_num=res_num) _,coord_vector=tbf.sub_coord(box=box_res, data=data_all, res_num=res_num)
for j in GROUPS[i]:
if len(j) > 1:
if j=='surf':
#Creates dictionary with c, normal per subdomain for each frame
surf=tbf.coord2norm2cg(coord_vector,img=False)
sv_data=surf
if j=='vector':
vector=tbf.coord2vector(coord_vector) vector=tbf.coord2vector(coord_vector)
sv_data=vector
if len(j)==3:
if j[0]=='save':
for k in j[1]:
if k=='pkl':
tbf.topickle(fl=sv_data, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+j[2])
if k=='json':
tbf.tojson(fl=sv_data, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+j[2])
if k=='gmx_ndx':
tbf.togmxndx(box_res, fld='./'+SYSTEM_NAME, sv_name=SYSTEM_NAME)
################################################### ###################################################
if HEAD==True and TAIL==True: else:
for j in GROUPS[i]:
if j=='tilt':
tbf.SurfVector_angle(surf,vector) tbf.SurfVector_angle(surf,vector)
\ No newline at end of file
import os import os
import numpy as np import numpy as np
#from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import scipy.optimize import scipy.optimize
import pickle as pkl import pickle as pkl
import json import json
from progress.bar import Bar from progress.bar import Bar
#from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from pytrr import ( from pytrr import (
read_trr_header, read_trr_header,
...@@ -52,6 +52,8 @@ def center_gravity(a): ...@@ -52,6 +52,8 @@ def center_gravity(a):
return cg return cg
def topickle(fl, sv_name): def topickle(fl, sv_name):
print(' ')
print('Save to pickle |################################| 1/1')
with open(sv_name+'.pkl', 'wb') as handle: with open(sv_name+'.pkl', 'wb') as handle:
pkl.dump(fl, handle, protocol=pkl.HIGHEST_PROTOCOL) pkl.dump(fl, handle, protocol=pkl.HIGHEST_PROTOCOL)
...@@ -62,6 +64,8 @@ def frompickle(fl): ...@@ -62,6 +64,8 @@ def frompickle(fl):
def tojson(fl, sv_name): def tojson(fl, sv_name):
print(' ')
print('Save to json |################################| 1/1')
with open(sv_name+'.json', 'w') as file: with open(sv_name+'.json', 'w') as file:
file.write(json.dumps(str(fl))) file.write(json.dumps(str(fl)))
...@@ -69,7 +73,6 @@ def plot_surf(data, normal, c, save_name): ...@@ -69,7 +73,6 @@ def plot_surf(data, normal, c, save_name):
#Plot surface #Plot surface
fig = plt.figure() fig = plt.figure()
ax = fig.gca(projection='3d') ax = fig.gca(projection='3d')
# plot fitted plane # plot fitted plane
maxx = np.max(data[:,0]) maxx = np.max(data[:,0])
maxy = np.max(data[:,1]) maxy = np.max(data[:,1])
...@@ -92,8 +95,8 @@ def plot_surf(data, normal, c, save_name): ...@@ -92,8 +95,8 @@ def plot_surf(data, normal, c, save_name):
ax.set_xlabel('x') ax.set_xlabel('x')
ax.set_ylabel('y') ax.set_ylabel('y')
ax.set_zlabel('z') ax.set_zlabel('z')
plt.savefig(save_name+'.png') #plt.savefig(save_name+'.png')
#plt.show() plt.show()
plt.clf() plt.clf()
plt.cla() plt.cla()
plt.close() plt.close()
...@@ -493,10 +496,7 @@ def coord2norm(coord,img=True): ...@@ -493,10 +496,7 @@ def coord2norm(coord,img=True):
bar = Bar('Step: '+step, max=len(coord[step].keys())) bar = Bar('Step: '+step, max=len(coord[step].keys()))
for subdomain in coord[step].keys(): for subdomain in coord[step].keys():
c,normal = fitPlaneLTSQ(np.array(coord[step][subdomain])) c,normal = fitPlaneLTSQ(np.array(coord[step][subdomain]))
cgx = center_gravity(coord[step][subdomain]) surf[step][subdomain]={'c':c, 'normal':normal}
cgy = center_gravity(coord[step][subdomain])
cgz = center_gravity(coord[step][subdomain])
surf[step][subdomain]={'c':c, 'normal':normal, 'cg':[cgx,cgy,cgz]}
#Change save variable if you want to save .png elsewere #Change save variable if you want to save .png elsewere
save='png/'+str(subdomain) save='png/'+str(subdomain)
if img==True: if img==True:
...@@ -545,12 +545,16 @@ def coord2norm2cg(coord_vector,img=True): ...@@ -545,12 +545,16 @@ def coord2norm2cg(coord_vector,img=True):
c,normal = fitPlaneLTSQ(np.array(surf_points)) c,normal = fitPlaneLTSQ(np.array(surf_points))
surf[step][subdomain]={'c':c, 'normal':normal} surf[step][subdomain]={'c':c, 'normal':normal}
surf[step][subdomain].update(res_cg) surf[step][subdomain].update(res_cg)
save='png/'+str(subdomain)
if img==True: if img==True:
fl_save='png/'
save=fl_save+str(subdomain)
if not os.path.exists(fl_save):
os.makedirs(fl_save)
try: try:
plot_surf(np.array(surf_points), normal, c, save) plot_surf(np.array(surf_points), normal, c, save)
except: except Exception as e:
print("ERROR: Folder png/ doesn't exist in root") print(e)
bar.next() bar.next()
bar.finish() bar.finish()
return surf return surf
...@@ -590,9 +594,13 @@ def SurfVector_angle(surf,vector): ...@@ -590,9 +594,13 @@ def SurfVector_angle(surf,vector):
output: angle = {step:{subdomain:{resid:angle}} output: angle = {step:{subdomain:{resid:angle}}
""" """
print(' ')
print('Tilt progress:')
print('==============================')
angle={} angle={}
for step in surf.keys(): for step in surf.keys():
angle[step]={} angle[step]={}
bar = Bar('Step: '+step, max=len(surf[step].keys()))
for sudomain in surf[step].keys(): for sudomain in surf[step].keys():
angle[step][sudomain]={} angle[step][sudomain]={}
for resid in vector[step][sudomain].keys(): for resid in vector[step][sudomain].keys():
...@@ -600,4 +608,50 @@ def SurfVector_angle(surf,vector): ...@@ -600,4 +608,50 @@ def SurfVector_angle(surf,vector):
P2=tuple(vector[step][sudomain][resid]) P2=tuple(vector[step][sudomain][resid])
#print(tbf.angle_between3D(P1,P2)) #print(tbf.angle_between3D(P1,P2))
angle[step][sudomain][resid]=angle_between3D(P1,P2) angle[step][sudomain][resid]=angle_between3D(P1,P2)
bar.next()
bar.finish()
return angle return angle
def togmxndx(box_res, fld, sv_name):
print(' ')
print('Save to gmx_ndx progress:')
print('==============================')
cnt=0
fl_save=fld+'/gmx_ndx/'
if not os.path.exists(fl_save):
os.makedirs(fl_save)
ndx={}
for step in box_res.keys():
if cnt==1:
break
for resid in box_res[step].keys():
for res in box_res[step][resid].keys():
for subdomain in box_res[step][resid][res].keys():
ndx[subdomain]={}
rs=[]
bar = Bar('Create format: ', max=len(box_res[step].keys()))
for resid in box_res[step].keys():
for res in box_res[step][resid].keys():
for subdomain in box_res[step][resid][res]:
if (res,subdomain) not in rs:
rs.append((res,subdomain))
ndx[subdomain][res]=[]
for atomnum in box_res[step][resid][res][subdomain]:
ndx[subdomain][res].append(atomnum)
bar.next()
bar.finish()
cnt=cnt+1
bar = Bar('Save file: ', max=len(ndx.keys()))
for subdomain in ndx.keys():
save=fl_save+sv_name+'_'+str(subdomain)+'.ndx'
with open(save, "a") as myfile:
for res in ndx[subdomain].keys():
myfile.write("["+res+"]\n")
for atomnum in ndx[subdomain][res]:
myfile.write(atomnum+'\n')
bar.next()
bar.finish()
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment