main.py 13.1 KB
Newer Older
Stelios Karozis's avatar
Stelios Karozis committed
1
import os
Stelios Karozis's avatar
Stelios Karozis committed
2
import tooba_f as tbf
3
import tooba_gmx as tbgmx
Stelios Karozis's avatar
Stelios Karozis committed
4 5 6 7 8 9
###################################################
#NOTICE:    resids of head in each subdomain may differ in tail case
#           keep all atoms of group in the first occurent subdomain
#           in case of tail is the one closest to the head, hence
#           the code is a good approximation 
###################################################
10 11 12
LOG=0
SYSTEM_NAME='test'
DISCET=[6, 6, 6]
Stelios Karozis's avatar
Stelios Karozis committed
13
NUM_FR=1
14 15
TRAJ=SYSTEM_NAME+'/eq_traj.trr'
GRO=SYSTEM_NAME+'/eq_final.gro'
16
TPR=SYSTEM_NAME+'/eq_run.tpr'
17
#ITP_DIC={'NS':'CER_SOVOVA.itp','FFA':'FFA_CG.itp','CHOL':'CHOL_CG.itp'}
18 19 20 21 22 23
###################################################
# {NAME:[QUEUE OF PROCESSES]}
#
#   NAME: It is user defined. A dictionary must follows with the same name.
#         The dict structure has to be: {res_type:[atom_types]}
# 
24
#         if NAME is COMBINE then it needs part or all the info from aforementioned
25 26
#         groups to execute a process. You cannot use combination as first group.
#
27 28 29 30 31 32 33 34
#   QUEUE OF PROCESSES: surf, vector, tilt, index, density, gmx_ndx, [save, [type], save_name] 
#                                            
#                       surf:                       Determine surface from atoms (ex. Head of lipid)
#                       vector:                     Determine vector that fits atoms (ex. Tail of lipid)
#                       tilt:                       Use surf and vector result to calculate angle (if NAME is COMBINE)
#                       index:                      Creates unique code (md5) for every subdomain to use in data saving process 
#                       density:                    Detrmine density profile of x,y,z and save peaks of directions with the least number 
#                       gmx_ndx:                    Saves one ndx for every subdomain
35
#                       [save, [type], save_name]:  Save result of previous function, type: pkl, json
36 37
#
###################################################
38 39
GROUPS={'ALL':['gmx_ndx','index',['save', ['pkl'],'index'],'density',['save', ['pkl'],'dens']],
        'HD_GROUP':['surf',['save', ['pkl', 'json'],'time_domain_c-normal-cg']],
40
        'TL_GROUP':['vector'],
41
        'COMBINE':[['HD_GROUP','surf'],['TL_GROUP','vector'],['COMB','tilt'],['save', ['pkl'],'tilt']]
42
}
43
ALL={'NS':['C6', 'Na', 'P4', 'P3', 'C7','C3', 'C4', 'C5', 'C8', 'C9', 'C10'], 'CHOL':['ROH','R1', 'R2', 'R3', 'R4', 'R5'], 'FFA':['AC','C1', 'C2', 'C3', 'C4']}
Stelios Karozis's avatar
Stelios Karozis committed
44 45
HD_GROUP={'NS':['C6', 'Na', 'P4', 'P3', 'C7'], 'CHOL':['ROH'], 'FFA':['AC']}
TL_GROUP={'NS':['C3', 'C4', 'C5', 'C8', 'C9', 'C10'], 'CHOL':['R1', 'R2', 'R3', 'R4', 'R5'], 'FFA':['C1', 'C2', 'C3', 'C4']}
Stelios Karozis's avatar
Stelios Karozis committed
46
###################################################
Stelios Karozis's avatar
Stelios Karozis committed
47 48 49 50 51 52 53
###################################################
print(' ')
print('================')
print('Starting process')
print('================')
###################################################
###################################################
54
#Read .gro file
Stelios Karozis's avatar
Stelios Karozis committed
55
_,data_num,_,res_num,res_type,atom_type,atom_num,_ = tbf.read_gro(GRO)
Stelios Karozis's avatar
Stelios Karozis committed
56 57
print(' ')
###################################################
58
#--------------------------------------------------
59 60 61 62 63 64 65 66
#Read .itp files
#weights={}
#for MOL in ITP_DIC.keys():
#    weights_tmp = tbf.read_itp(SYSTEM_NAME+'/'+ITP_DIC[MOL])
#    weights[MOL]=weights_tmp
#    print(' ')
#print(weights)
###################################################
67
#--------------------------------------------------
68 69
if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_data.pkl'):
    if LOG==1:
Stelios Karozis's avatar
Stelios Karozis committed
70
        print('WARNING: Preprocessing files exist.')
71
        print('         Erase data.pkl if the system is new.')
Stelios Karozis's avatar
Stelios Karozis committed
72
        print('--------------------------------------------')
73
    data_all=tbf.frompickle('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_data.pkl')
Stelios Karozis's avatar
Stelios Karozis committed
74
else:
75 76 77
    #Read .trr file
    data_all=tbf.fr_export(trajfile=TRAJ,num_frames=NUM_FR)
    tbf.topickle(fl=data_all, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_data')
Stelios Karozis's avatar
Stelios Karozis committed
78
###################################################
79 80 81 82
#--------------------------------------------------
#Check save files if exist in order to skip functions
prev=0
sv_index={}
83
for i in GROUPS.keys():
84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112
    for j in GROUPS[i]:
        try:
            sv_index[j]={}
            sv_index[j]['status']='not exist'
            sv_index[j]['name']='None'
        except TypeError:
            sv_index[str(j)]={}
            sv_index[str(j)]['status']='not exist'
            sv_index[str(j)]['name']='None'
        if len(j)==3:
            if j[0]=='save':
                for k in j[1]:
                    if k=='pkl':
                        if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2]+'.pkl'):
                            sv_index[prev]['status']='exist'
                            sv_index[prev]['name']='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2]+'.pkl'
                        else:
                            sv_index[prev]['status']='not exist'
                    if k=='json':    
                        if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2]+'.pkl'):
                            sv_index[prev]['status']='exist'
                            sv_index[prev]['name']='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2]+'.pkl'
                        else:
                            sv_index[prev]['status']='not exist'
        prev=str(j)
###################################################
#--------------------------------------------------
for i in GROUPS.keys():
#not COMBINE section
113 114 115 116 117 118 119 120 121
    if i!='COMBINE': 
        if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_ndx.pkl'):
            if LOG==1: 
                print('WARNING: Preprocessing files exist.')
                print('         Erase ndx_HEAD.pkl if the system is new.')
                print('--------------------------------------------')
            group_ndx=tbf.frompickle('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_ndx.pkl')
        else:
            #Find atom type index in lists created above
122
            group_ndx=tbf.atomid_data(res_num, res_type, atom_type, atom_num, group=locals()[i])
123
            tbf.topickle(fl=group_ndx, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_ndx')
124
#--------------------------------------------------
125 126 127 128 129 130 131 132 133 134 135 136 137 138 139
        if os.path.isfile('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_box.pkl'):
            if LOG==1:
                print('WARNING: Preprocessing files exist.')
                print('         Erase box.pkl if the system is new')
                print('         or new grid is applied !')
                print('--------------------------------------------')
            box_res=tbf.frompickle('./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_box.pkl')
        else:
            #Create subdomains coordinates
            box_p=tbf.domain_decomposition(data=data_all,dx=DISCET[0],dy=DISCET[1],dz=DISCET[2])
            #Assign desired atoms (from above function) to subdomains
            ##result1: {step:{res:{atom_type:{atom_num:(subX,subYsubZ)}}}}
            ##result2: {step:{res:{atom_type:{(subX,subYsubZ):[atom_num]}}}}
            _,box_res=tbf.atom2grid(data_all,box_p, group_ndx)
            tbf.topickle(fl=box_res, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_box')
140
###################################################
141 142 143 144
        #Creates dictionary with coordinates per subdomain for each frame
        _,coord_vector=tbf.sub_coord(box=box_res, data=data_all, res_num=res_num)
        for j in GROUPS[i]:
            if len(j) > 1:
145 146 147 148 149
                if j=='surf' and sv_index[j]['status']=='not exist':
                    #Creates dictionary with c, normal per subdomain for each frame
                    surf[i]=tbf.coord2norm2cg(coord_vector,img=False)
                    sv_data=surf[i]
                elif j=='surf' and sv_index[j]['status']=='exist':
150 151
                    if j not in locals():
                        surf={}
152 153 154 155 156 157 158 159 160
                    surf[i]=tbf.frompickle(sv_index[j]['name'])
#--------------------------------------------------   
                if j=='vector' and sv_index[j]['status']=='not exist':
                    if j not in locals():
                        vector={}
                    vector[i]=tbf.coord2vector(coord_vector)
                    sv_data=vector[i]
                elif j=='vector' and sv_index[j]['status']=='exist':
                    if j not in locals():
161
                        vector={}
162 163 164
                    vector[i]=tbf.frompickle(sv_index[j]['name'])
#--------------------------------------------------
                if j=='index' and sv_index[j]['status']=='not exist':
165 166
                    uniq_id=tbgmx.ndx_index(SYSTEM_NAME)
                    sv_data=uniq_id
167 168 169 170
                elif j=='index' and sv_index[j]['status']=='exist':
                    uniq_id=tbf.frompickle(sv_index[j]['name'])
#--------------------------------------------------
                if j=='density' and sv_index[j]['status']=='not exist':
171 172 173
                    dens_df={}
                    for iidd in uniq_id.keys():
                        dens_df[iidd]={}
174
                        fl='./'+uniq_id[iidd]['system']+'/gmx_ndx/'+uniq_id[iidd]['ndx_file']
175 176 177 178 179 180 181 182 183 184 185 186 187 188 189
                        cnt=-1
                        for mol in locals()[i].keys():
                            cnt=cnt+1
                            for d in ('x','y','z'):
                                peaks = tbgmx.density_picks(TRR=TRAJ,TPR=TPR,IND=fl,SLC=400,ST=1000,EN=-1,normal=d,fld='./'+uniq_id[iidd]['system'],arg=cnt,dist_pk=20)
                                if d=='x':
                                    tmp=peaks
                                else:
                                    print(len(tmp),len(peaks))
                                    if len(tmp)<len(peaks):
                                        peaks=tmp
                                    tmp=peaks

                            dens_df[iidd][mol]=peaks
                        sv_data=dens_df
190 191 192
                elif j=='density' and sv_index[j]['status']=='exist':
                    dens_df=tbf.frompickle(sv_index[j]['name'])
#--------------------------------------------------
193 194
                if j=='gmx_ndx':
                    tbf.togmxndx(box_res, fld='./'+SYSTEM_NAME, sv_name=SYSTEM_NAME+'_'+i)
195 196
#--------------------------------------------------
            # Save module
197 198
            if len(j)==3:
                if j[0]=='save':
199 200 201 202 203 204 205 206 207 208
                    try:
                        sv_data
                    except NameError:
                        pass
                    else:
                        for k in j[1]:
                            if k=='pkl':
                                tbf.topickle(fl=sv_data, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2])
                            if k=='json':    
                                tbf.tojson(fl=sv_data, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2])
Stelios Karozis's avatar
Stelios Karozis committed
209
###################################################
210
#COMBINE section
211 212
    else:
        for j in GROUPS[i]:
213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262
            #Input to COMBINE property
            if j[0]!='COMB':
                if j[1]=='surf':
                    surf=surf[j[0]]
                if j[1]=='vector':
                    vector=vector[j[0]]
            #Calculate COMBINE property
            if j[0]=='COMB':
                if j[1]=='tilt' and sv_index[str(j)]['status']=='not exist':
                    tilt=tbf.SurfVector_angle(surf,vector)
                    #Loop over timesteps and keep avgs tilts for each step
                    avg={}
                    ss=[]
                    for step in tilt.keys():
                        for sub in tilt[step].keys():
                            avgs=tilt[step][sub]['avg/frame']
                            if sub not in ss:
                                ss.append(sub)
                                avg[sub]=avgs
                            else:
                                avg[sub].append(avgs)
                    #Calculate total average 
                    tot_avg={}
                    for sub in avg.keys():
                        for key, value in uniq_id.items():
                            if str(value['domain']).strip() == str(sub).strip():
                                hsh=key
                                break
                        try:
                            tot_avg[hsh]=sum(avg[sub])/len(avg[sub])
                        except TypeError: #in case of one frame
                            tot_avg[hsh]=sum([avg[sub]])/len([avg[sub]])
                    sv_data=tot_avg
                elif j[1]=='tilt' and sv_index[str(j)]['status']=='exist':
                    tot_avg=tbf.frompickle(sv_index[str(j)]['name'])
#--------------------------------------------------
            # Save module
            if len(j)==3:
                if j[0]=='save':
                    try:
                        sv_data
                    except NameError:
                        pass
                    else:
                        for k in j[1]:
                            if k=='pkl':
                                tbf.topickle(fl=sv_data, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2])
                            if k=='json':    
                                tbf.tojson(fl=sv_data, sv_name='./'+SYSTEM_NAME+'/'+SYSTEM_NAME+'_'+i+'_'+j[2])             
###################################################
263
###################################################