1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23 """Module for interfacing with Dasha."""
24
25
26 import dep_check
27
28
29 from math import pi
30 from os import F_OK, access, chdir, getcwd, sep
31 PIPE, Popen = None, None
32 if dep_check.subprocess_module:
33 from subprocess import PIPE, Popen
34 import sys
35
36
37 from lib.errors import RelaxDirError, RelaxError, RelaxFileError, RelaxNoPdbError, RelaxNoSequenceError, RelaxNoTensorError
38 from lib.io import extract_data, mkdir_nofail, open_write_file, strip, test_binary
39 from pipe_control import angles, diffusion_tensor, pipes, value
40 from pipe_control.interatomic import return_interatom_list
41 from pipe_control.mol_res_spin import exists_mol_res_spin_data, first_residue_num, last_residue_num, residue_loop, return_spin, spin_loop
42 from pipe_control.pipes import check_pipe
43 from pipe_control.spectrometer import loop_frequencies
44 from specific_analyses.model_free.model import determine_model_type
45
46
72
73
74 -def create(algor='LM', dir=None, force=False):
127
128
130 """Create the Dasha script file.
131
132 @param file: The opened file descriptor.
133 @type file: file object
134 @param model_type: The model-free model type.
135 @type model_type: str
136 @param algor: The optimisation algorithm to use. This can be the Levenberg-Marquardt algorithm 'LM' or the Newton-Raphson algorithm 'NR'.
137 @type algor: str
138 """
139
140
141 file.write("# Delete all data.\n")
142 file.write("del 1 10000\n")
143
144
145 file.write("\n# Nucleus type.\n")
146 nucleus = None
147 for spin in spin_loop():
148
149 if spin.isotope == '1H':
150 continue
151
152
153 if nucleus and spin.isotope != nucleus:
154 raise RelaxError("The nuclei '%s' and '%s' do not match, relax can only handle one nucleus type in Dasha." % (nucleus, spin.isotope))
155
156
157 if not nucleus:
158 nucleus = spin.isotope
159
160
161 if nucleus == '15N':
162 nucleus = 'N15'
163 elif nucleus == '13C':
164 nucleus = 'C13'
165 else:
166 raise RelaxError("Cannot handle the nucleus type '%s' within Dasha." % nucleus)
167 file.write("set nucl %s\n" % nucleus)
168
169
170 file.write("\n# Number of frequencies.\n")
171 file.write("set n_freq %s\n" % cdp.spectrometer_frq_count)
172
173
174 file.write("\n# Frequency values.\n")
175 count = 1
176 for frq in loop_frequencies():
177 file.write("set H1_freq %s %s\n" % (frq / 1e6, count))
178 count += 1
179
180
181 file.write("\n# Set the diffusion tensor.\n")
182 if model_type != 'local_tm':
183
184 if cdp.diff_tensor.type == 'sphere':
185 file.write("set tr %s\n" % (cdp.diff_tensor.tm / 1e-9))
186
187
188 elif cdp.diff_tensor.type == 'spheroid':
189 file.write('set tr %s\n' % (cdp.diff_tensor.tm / 1e-9))
190
191
192 elif cdp.diff_tensor.type == 'ellipsoid':
193
194 Dx, Dy, Dz = diffusion_tensor.return_eigenvalues()
195
196
197 file.write("set tr %s\n" % (cdp.diff_tensor.tm / 1e-9))
198 file.write("set D1/D3 %s\n" % (Dx / Dz))
199 file.write("set D2/D3 %s\n" % (Dy / Dz))
200
201
202 file.write("set alfa %s\n" % (cdp.diff_tensor.alpha / (2.0 * pi) * 360.0))
203 file.write("set betta %s\n" % (cdp.diff_tensor.beta / (2.0 * pi) * 360.0))
204 file.write("set gamma %s\n" % (cdp.diff_tensor.gamma / (2.0 * pi) * 360.0))
205
206
207 file.write("\n# Reading the relaxation data.\n")
208 file.write("echo Reading the relaxation data.\n")
209 noe_index = 1
210 r1_index = 1
211 r2_index = 1
212 for ri_id in cdp.ri_ids:
213
214 if cdp.ri_type[ri_id] == 'NOE':
215
216 number = noe_index
217
218
219 data_type = 'noe'
220
221
222 noe_index = noe_index + 1
223
224
225 elif cdp.ri_type[ri_id] == 'R1':
226
227 number = r1_index
228
229
230 data_type = '1/T1'
231
232
233 r1_index = r1_index + 1
234
235
236 elif cdp.ri_type[ri_id] == 'R2':
237
238 number = r2_index
239
240
241 data_type = '1/T2'
242
243
244 r2_index = r2_index + 1
245
246
247 if number == 1:
248 file.write("\nread < %s\n" % data_type)
249 else:
250 file.write("\nread < %s %s\n" % (data_type, number))
251
252
253 for residue in residue_loop():
254
255 spin = residue.spin[0]
256
257
258 if not spin.select:
259 continue
260
261
262 if len(spin.ri_data) != len(cdp.ri_ids) or spin.ri_data[ri_id] == None:
263 spin.select = False
264 continue
265
266
267 file.write("%s %s %s\n" % (residue.num, spin.ri_data[ri_id], spin.ri_data_err[ri_id]))
268
269
270 file.write("exit\n")
271
272
273 if model_type == 'mf':
274
275 for residue in residue_loop():
276
277 spin = residue.spin[0]
278
279
280 if not spin.select:
281 continue
282
283
284 interatoms = return_interatom_list(spin_hash=spin._hash)
285 if len(interatoms) == 0:
286 raise RelaxNoInteratomError
287 elif len(interatoms) > 1:
288 raise RelaxError("Only one interatomic data container, hence dipole-dipole interaction, is supported per spin.")
289
290
291 file.write("\n\n\n# Residue %s\n\n" % residue.num)
292
293
294 file.write("echo Optimisation of residue %s\n" % residue.num)
295
296
297 file.write("\n# Select the residue.\n")
298 file.write("set cres %s\n" % residue.num)
299
300
301 if cdp.diff_tensor.type == 'spheroid':
302 file.write("set teta %s\n" % spin.alpha)
303
304
305 elif cdp.diff_tensor.type == 'ellipsoid':
306 file.write("\n# Setting the spherical angles of the XH vector in the ellipsoid diffusion frame.\n")
307 file.write("set teta %s\n" % spin.theta)
308 file.write("set fi %s\n" % spin.phi)
309
310
311 if 'ts' in spin.params:
312 jmode = 3
313 elif 'te' in spin.params:
314 jmode = 2
315 elif 's2' in spin.params:
316 jmode = 1
317
318
319 if 'rex' in spin.params:
320 exch = True
321 else:
322 exch = False
323
324
325 if cdp.diff_tensor.type == 'sphere':
326 anis = False
327 else:
328 anis = True
329
330
331 if cdp.diff_tensor.type == 'spheroid':
332 sym = True
333 else:
334 sym = False
335
336
337 file.write("\n# Set the jmode.\n")
338 file.write("set def jmode %s" % jmode)
339 if exch:
340 file.write(" exch")
341 if anis:
342 file.write(" anis")
343 if sym:
344 file.write(" sym")
345 file.write("\n")
346
347
348 file.write("\n# Parameter default values.\n")
349 file.write("reset jmode %s\n" % residue.num)
350
351
352 file.write("\n# Bond length.\n")
353 file.write("set r_hx %s\n" % (interatoms[0].r / 1e-10))
354
355
356 file.write("\n# CSA value.\n")
357 file.write("set csa %s\n" % (spin.csa / 1e-6))
358
359
360 if not 'tf' in spin.params and jmode == 3:
361 file.write("\n# Fix the tf parameter.\n")
362 file.write("fix tf 0\n")
363
364
365 file.write("\n\n\n# Optimisation of all residues.\n")
366 if algor == 'LM':
367 file.write("lmin %s %s" % (first_residue_num(), last_residue_num()))
368 elif algor == 'NR':
369 file.write("min %s %s" % (first_residue_num(), last_residue_num()))
370
371
372 file.write("\n# Show the results.\n")
373 file.write("echo\n")
374 file.write("show all\n")
375
376
377 file.write("\n# Write the results.\n")
378 file.write("write s2.out S\n")
379 file.write("write s2f.out Sf\n")
380 file.write("write s2s.out Ss\n")
381 file.write("write te.out te\n")
382 file.write("write tf.out tf\n")
383 file.write("write ts.out ts\n")
384 file.write("write rex.out rex\n")
385 file.write("write chi2.out F\n")
386
387 else:
388 raise RelaxError("Optimisation of the parameter set '%s' currently not supported." % model_type)
389
390
392 """Execute Dasha.
393
394 @param dir: The optional directory where the script is located.
395 @type dir: str or None
396 @param force: A flag which if True will cause any pre-existing files to be overwritten by Dasha.
397 @type force: bool
398 @param binary: The name of the Dasha binary file. This can include the path to the binary.
399 @type binary: str
400 """
401
402
403 test_binary(binary)
404
405
406 orig_dir = getcwd()
407
408
409 if dir == None:
410 dir = pipes.cdp_name()
411 if not access(dir, F_OK):
412 raise RelaxDirError('Dasha', dir)
413
414
415 chdir(dir)
416
417
418 try:
419
420 if not access('dasha_script', F_OK):
421 raise RelaxFileError('dasha script', 'dasha_script')
422
423
424 if Popen == None:
425 raise RelaxError("The subprocess module is not available in this version of Python.")
426
427
428 pipe = Popen(binary, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=False)
429
430
431 script = open('dasha_script')
432 lines = script.readlines()
433 script.close()
434 for line in lines:
435
436 if hasattr(line, 'encode'):
437 line = line.encode()
438
439
440 pipe.stdin.write(line)
441
442
443 pipe.stdin.close()
444
445
446 for line in pipe.stdout.readlines():
447
448 if hasattr(line, 'decode'):
449 line = line.decode()
450
451
452 sys.stdout.write(line)
453
454
455 for line in pipe.stderr.readlines():
456
457 if hasattr(line, 'decode'):
458 line = line.decode()
459
460
461 sys.stderr.write(line)
462
463
464 except:
465
466 chdir(orig_dir)
467
468
469 raise
470
471
472 chdir(orig_dir)
473
474
475 sys.stdout.write("\n\n")
476
477
479 """Extract the data from the Dasha results files.
480
481 @param dir: The optional directory where the results file is located.
482 @type dir: str or None
483 """
484
485
486 if not exists_mol_res_spin_data():
487 raise RelaxNoSequenceError
488
489
490 if dir == None:
491 dir = pipes.cdp_name()
492 if not access(dir, F_OK):
493 raise RelaxDirError('Dasha', dir)
494
495
496 for param in ['s2', 's2f', 's2s', 'te', 'tf', 'ts', 'rex']:
497
498 file_name = dir + sep + param + '.out'
499
500
501 if not access(file_name, F_OK):
502 raise RelaxFileError('Dasha', file_name)
503
504
505 if param in ['te', 'tf', 'ts']:
506 scaling = 1e-9
507 elif param == 'rex':
508 scaling = 1.0 / (2.0 * pi * cdp.spectrometer_frq[cdp.ri_ids[0]]) ** 2
509 else:
510 scaling = 1.0
511
512
513 data = read_results(file=file_name, scaling=scaling)
514
515
516 for i in range(len(data)):
517 value.set(val=data[i][1], param=param, spin_id=data[i][0])
518 value.set(val=data[i][0], param=param, spin_id=data[i][0], error=True)
519
520
521 for spin in spin_loop():
522
523 if param in spin.params:
524 continue
525
526
527 setattr(spin, param.lower(), None)
528
529
530 file_name = dir + sep+'chi2.out'
531
532
533 if not access(file_name, F_OK):
534 raise RelaxFileError('Dasha', file_name)
535
536
537 data = read_results(file=file_name)
538
539
540 for i in range(len(data)):
541 spin = return_spin(spin_id=data[i][0])
542 spin.chi2 = data[i][1]
543
544
546 """Extract the data from the Dasha results file.
547
548 @keyword file: The name of the file to open.
549 @type file: str
550 @keyword dir: The directory containing the file (defaults to the current directory if None).
551 @type dir: str or None
552 @keyword scaling: The parameter scaling factor.
553 @type scaling: float
554 """
555
556
557 data = extract_data(file=file, dir=dir)
558
559
560 data = strip(data)
561
562
563 new_data = []
564 for i in range(len(data)):
565 spin_id = ':%s@N' % data[i][0]
566 value = float(data[i][1]) * scaling
567 error = float(data[i][2]) * scaling
568 new_data.append([spin_id, value, error])
569
570
571 return new_data
572