1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23 """Module for interfacing with Dasha."""
24
25
26 import dep_check
27
28
29 from math import pi
30 from os import F_OK, access, chdir, getcwd, sep
31 PIPE, Popen = None, None
32 if dep_check.subprocess_module:
33 from subprocess import PIPE, Popen
34 import sys
35
36
37 from generic_fns import angles, diffusion_tensor, pipes, relax_data, value
38 from generic_fns.interatomic import return_interatom_list
39 from generic_fns.mol_res_spin import exists_mol_res_spin_data, first_residue_num, last_residue_num, residue_loop, return_spin, spin_loop
40 from relax_errors import RelaxDirError, RelaxError, RelaxFileError, RelaxNoPdbError, RelaxNoSequenceError, RelaxNoTensorError
41 from relax_io import extract_data, mkdir_nofail, open_write_file, strip, test_binary
42 from specific_fns.setup import model_free_obj
43
44
70
71
72 -def create(algor='LM', dir=None, force=False):
125
126
128 """Create the Dasha script file.
129
130 @param file: The opened file descriptor.
131 @type file: file object
132 @param model_type: The model-free model type.
133 @type model_type: str
134 @param algor: The optimisation algorithm to use. This can be the Levenberg-Marquardt algorithm 'LM' or the Newton-Raphson algorithm 'NR'.
135 @type algor: str
136 """
137
138
139 file.write("# Delete all data.\n")
140 file.write("del 1 10000\n")
141
142
143 file.write("\n# Nucleus type.\n")
144 nucleus = None
145 for spin in spin_loop():
146
147 if spin.isotope == '1H':
148 continue
149
150
151 if nucleus and spin.isotope != nucleus:
152 raise RelaxError("The nuclei '%s' and '%s' do not match, relax can only handle one nucleus type in Dasha." % (nucleus, spin.isotope))
153
154
155 if not nucleus:
156 nucleus = spin.isotope
157
158
159 if nucleus == '15N':
160 nucleus = 'N15'
161 elif nucleus == '13C':
162 nucleus = 'C13'
163 else:
164 raise RelaxError("Cannot handle the nucleus type '%s' within Dasha." % nucleus)
165 file.write("set nucl %s\n" % nucleus)
166
167
168 file.write("\n# Number of frequencies.\n")
169 file.write("set n_freq %s\n" % relax_data.num_frq())
170
171
172 file.write("\n# Frequency values.\n")
173 count = 1
174 for frq in relax_data.frq_loop():
175 file.write("set H1_freq %s %s\n" % (frq / 1e6, count))
176 count += 1
177
178
179 file.write("\n# Set the diffusion tensor.\n")
180 if model_type != 'local_tm':
181
182 if cdp.diff_tensor.type == 'sphere':
183 file.write("set tr %s\n" % (cdp.diff_tensor.tm / 1e-9))
184
185
186 elif cdp.diff_tensor.type == 'spheroid':
187 file.write('set tr %s\n' % (cdp.diff_tensor.tm / 1e-9))
188
189
190 elif cdp.diff_tensor.type == 'ellipsoid':
191
192 Dx, Dy, Dz = diffusion_tensor.return_eigenvalues()
193
194
195 file.write("set tr %s\n" % (cdp.diff_tensor.tm / 1e-9))
196 file.write("set D1/D3 %s\n" % (Dx / Dz))
197 file.write("set D2/D3 %s\n" % (Dy / Dz))
198
199
200 file.write("set alfa %s\n" % (cdp.diff_tensor.alpha / (2.0 * pi) * 360.0))
201 file.write("set betta %s\n" % (cdp.diff_tensor.beta / (2.0 * pi) * 360.0))
202 file.write("set gamma %s\n" % (cdp.diff_tensor.gamma / (2.0 * pi) * 360.0))
203
204
205 file.write("\n# Reading the relaxation data.\n")
206 file.write("echo Reading the relaxation data.\n")
207 noe_index = 1
208 r1_index = 1
209 r2_index = 1
210 for ri_id in cdp.ri_ids:
211
212 if cdp.ri_type[ri_id] == 'NOE':
213
214 number = noe_index
215
216
217 data_type = 'noe'
218
219
220 noe_index = noe_index + 1
221
222
223 elif cdp.ri_type[ri_id] == 'R1':
224
225 number = r1_index
226
227
228 data_type = '1/T1'
229
230
231 r1_index = r1_index + 1
232
233
234 elif cdp.ri_type[ri_id] == 'R2':
235
236 number = r2_index
237
238
239 data_type = '1/T2'
240
241
242 r2_index = r2_index + 1
243
244
245 if number == 1:
246 file.write("\nread < %s\n" % data_type)
247 else:
248 file.write("\nread < %s %s\n" % (data_type, number))
249
250
251 for residue in residue_loop():
252
253 spin = residue.spin[0]
254
255
256 if not spin.select:
257 continue
258
259
260 if len(spin.ri_data) != len(cdp.ri_ids) or spin.ri_data[ri_id] == None:
261 spin.select = False
262 continue
263
264
265 file.write("%s %s %s\n" % (residue.num, spin.ri_data[ri_id], spin.ri_data_err[ri_id]))
266
267
268 file.write("exit\n")
269
270
271 if model_type == 'mf':
272
273 for residue in residue_loop():
274
275 spin = residue.spin[0]
276
277
278 if not spin.select:
279 continue
280
281
282 interatoms = return_interatom_list(spin._spin_ids[0])
283 if len(interatoms) == 0:
284 raise RelaxNoInteratomError
285 elif len(interatoms) > 1:
286 raise RelaxError("Only one interatomic data container, hence dipole-dipole interaction, is supported per spin.")
287
288
289 file.write("\n\n\n# Residue %s\n\n" % residue.num)
290
291
292 file.write("echo Optimisation of residue %s\n" % residue.num)
293
294
295 file.write("\n# Select the residue.\n")
296 file.write("set cres %s\n" % residue.num)
297
298
299 if cdp.diff_tensor.type == 'spheroid':
300 file.write("set teta %s\n" % spin.alpha)
301
302
303 elif cdp.diff_tensor.type == 'ellipsoid':
304 file.write("\n# Setting the spherical angles of the XH vector in the ellipsoid diffusion frame.\n")
305 file.write("set teta %s\n" % spin.theta)
306 file.write("set fi %s\n" % spin.phi)
307
308
309 if 'ts' in spin.params:
310 jmode = 3
311 elif 'te' in spin.params:
312 jmode = 2
313 elif 's2' in spin.params:
314 jmode = 1
315
316
317 if 'rex' in spin.params:
318 exch = True
319 else:
320 exch = False
321
322
323 if cdp.diff_tensor.type == 'sphere':
324 anis = False
325 else:
326 anis = True
327
328
329 if cdp.diff_tensor.type == 'spheroid':
330 sym = True
331 else:
332 sym = False
333
334
335 file.write("\n# Set the jmode.\n")
336 file.write("set def jmode %s" % jmode)
337 if exch:
338 file.write(" exch")
339 if anis:
340 file.write(" anis")
341 if sym:
342 file.write(" sym")
343 file.write("\n")
344
345
346 file.write("\n# Parameter default values.\n")
347 file.write("reset jmode %s\n" % residue.num)
348
349
350 file.write("\n# Bond length.\n")
351 file.write("set r_hx %s\n" % (interatoms[0].r / 1e-10))
352
353
354 file.write("\n# CSA value.\n")
355 file.write("set csa %s\n" % (spin.csa / 1e-6))
356
357
358 if not 'tf' in spin.params and jmode == 3:
359 file.write("\n# Fix the tf parameter.\n")
360 file.write("fix tf 0\n")
361
362
363 file.write("\n\n\n# Optimisation of all residues.\n")
364 if algor == 'LM':
365 file.write("lmin %s %s" % (first_residue_num(), last_residue_num()))
366 elif algor == 'NR':
367 file.write("min %s %s" % (first_residue_num(), last_residue_num()))
368
369
370 file.write("\n# Show the results.\n")
371 file.write("echo\n")
372 file.write("show all\n")
373
374
375 file.write("\n# Write the results.\n")
376 file.write("write s2.out S\n")
377 file.write("write s2f.out Sf\n")
378 file.write("write s2s.out Ss\n")
379 file.write("write te.out te\n")
380 file.write("write tf.out tf\n")
381 file.write("write ts.out ts\n")
382 file.write("write rex.out rex\n")
383 file.write("write chi2.out F\n")
384
385 else:
386 raise RelaxError("Optimisation of the parameter set '%s' currently not supported." % model_type)
387
388
390 """Execute Dasha.
391
392 @param dir: The optional directory where the script is located.
393 @type dir: str or None
394 @param force: A flag which if True will cause any pre-existing files to be overwritten by Dasha.
395 @type force: bool
396 @param binary: The name of the Dasha binary file. This can include the path to the binary.
397 @type binary: str
398 """
399
400
401 test_binary(binary)
402
403
404 orig_dir = getcwd()
405
406
407 if dir == None:
408 dir = pipes.cdp_name()
409 if not access(dir, F_OK):
410 raise RelaxDirError('Dasha', dir)
411
412
413 chdir(dir)
414
415
416 try:
417
418 if not access('dasha_script', F_OK):
419 raise RelaxFileError('dasha script', 'dasha_script')
420
421
422 if Popen == None:
423 raise RelaxError("The subprocess module is not available in this version of Python.")
424
425
426 pipe = Popen(binary, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=False)
427
428
429 script = open('dasha_script')
430 lines = script.readlines()
431 script.close()
432 for line in lines:
433
434 if hasattr(line, 'encode'):
435 line = line.encode()
436
437
438 pipe.stdin.write(line)
439
440
441 pipe.stdin.close()
442
443
444 for line in pipe.stdout.readlines():
445
446 if hasattr(line, 'decode'):
447 line = line.decode()
448
449
450 sys.stdout.write(line)
451
452
453 for line in pipe.stderr.readlines():
454
455 if hasattr(line, 'decode'):
456 line = line.decode()
457
458
459 sys.stderr.write(line)
460
461
462 except:
463
464 chdir(orig_dir)
465
466
467 raise
468
469
470 chdir(orig_dir)
471
472
473 sys.stdout.write("\n\n")
474
475
477 """Extract the data from the Dasha results files.
478
479 @param dir: The optional directory where the results file is located.
480 @type dir: str or None
481 """
482
483
484 if not exists_mol_res_spin_data():
485 raise RelaxNoSequenceError
486
487
488 if dir == None:
489 dir = pipes.cdp_name()
490 if not access(dir, F_OK):
491 raise RelaxDirError('Dasha', dir)
492
493
494 for param in ['s2', 's2f', 's2s', 'te', 'tf', 'ts', 'rex']:
495
496 file_name = dir + sep + param + '.out'
497
498
499 if not access(file_name, F_OK):
500 raise RelaxFileError('Dasha', file_name)
501
502
503 if param in ['te', 'tf', 'ts']:
504 scaling = 1e-9
505 elif param == 'rex':
506 scaling = 1.0 / (2.0 * pi * cdp.frq[cdp.ri_ids[0]]) ** 2
507 else:
508 scaling = 1.0
509
510
511 data = read_results(file=file_name, scaling=scaling)
512
513
514 for i in range(len(data)):
515 value.set(val=data[i][1], error=data[i][0], param=param, spin_id=data[i][0])
516
517
518 for spin in spin_loop():
519
520 if param in spin.params:
521 continue
522
523
524 setattr(spin, param.lower(), None)
525
526
527 file_name = dir + sep+'chi2.out'
528
529
530 if not access(file_name, F_OK):
531 raise RelaxFileError('Dasha', file_name)
532
533
534 data = read_results(file=file_name)
535
536
537 for i in range(len(data)):
538 spin = return_spin(data[i][0])
539 spin.chi2 = data[i][1]
540
541
543 """Extract the data from the Dasha results file.
544
545 @keyword file: The name of the file to open.
546 @type file: str
547 @keyword dir: The directory containing the file (defaults to the current directory if None).
548 @type dir: str or None
549 @keyword scaling: The parameter scaling factor.
550 @type scaling: float
551 """
552
553
554 data = extract_data(file=file, dir=dir)
555
556
557 data = strip(data)
558
559
560 new_data = []
561 for i in range(len(data)):
562 spin_id = ':%s@N' % data[i][0]
563 value = float(data[i][1]) * scaling
564 error = float(data[i][2]) * scaling
565 new_data.append([spin_id, value, error])
566
567
568 return new_data
569