fsl_mrs_proc 29.3 KB
Newer Older
1
2
3
4
5
6
7
#!/usr/bin/env python

# fsl_mrs_proc - script for individual MRS preprocessing stages
#
# Author:   Will Clarke <william.clarke@ndcn.ox.ac.uk>
#           Saad Jbabdi <saad@fmrib.ox.ac.uk>
#
8
# Copyright (C) 2020 University of Oxford
9
10
# SHBASECOPYRIGHT

11
# Imports
12
from fsl_mrs.auxiliary import configargparse
13
14
from fsl_mrs import __version__
from fsl_mrs.utils.splash import splash
15
from os import makedirs
16
from shutil import rmtree
17
import os.path as op
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
from fsl_mrs.utils.preproc import nifti_mrs_proc as preproc
from fsl_mrs.core import NIFTI_MRS, is_nifti_mrs
from dataclasses import dataclass


class InappropriateDataError(Exception):
    pass


class ArgumentError(Exception):
    pass


@dataclass
class datacontainer:
    '''Class for keeping track of data and reference data together.'''
    data: NIFTI_MRS
    datafilename: str
    reference: NIFTI_MRS = None
    reffilename: str = None
38
39
40
41


def main():
    # Parse command-line arguments
42
43
44
    p = configargparse.ArgParser(
        add_config_file_help=False,
        description="FSL Magnetic Resonance Spectroscopy - Preprocessing")
45

46
47
48
49
50
    p.add_argument('-v', '--version', action='version', version=__version__)
    p.add('--config',
          required=False,
          is_config_file=True,
          help='configuration file')
51

52
53
    sp = p.add_subparsers(title='subcommands',
                          description='Preprocessing subcommands',
54
55
                          required=True,
                          dest='subcommand')
56
57

    # Coil combination subcommand
58
59
60
    ccparser = sp.add_parser('coilcombine',
                             help='Combine coils.',
                             add_help=False)
61
    cc_group = ccparser.add_argument_group('coilcombine arguments')
62
    cc_group.add_argument('--file', type=str, required=True,
63
                          help='Uncombined coil data file(s)')
64
    cc_group.add_argument('--reference', type=str, required=False,
65
66
67
                          help='Water unsuppressed reference data')
    cc_group.add_argument('--no_prewhiten', action="store_false",
                          help="Don't prewhiten data before coil combination")
68
    ccparser.set_defaults(func=coilcombine)
69
    add_common_args(ccparser)
70
71

    # Average subcommand
72
    avgparser = sp.add_parser('average', help='Average FIDs.', add_help=False)
73
    avg_group = avgparser.add_argument_group('average arguments')
74
    avg_group.add_argument('--file', type=str, required=True,
75
                           help='MRS file(s)')
76
    avg_group.add_argument('--dim', type=str,
77
                           help='Select dimension to average across')
78
    avgparser.set_defaults(func=average)
79
    add_common_args(avgparser)
80

81
82
    # Align subcommand - frequency/phase alignment
    alignparser = sp.add_parser('align', help='Align FIDs.', add_help=False)
83
    align_group = alignparser.add_argument_group('Align arguments')
84
    align_group.add_argument('--file', type=str, required=True,
85
                             help='List of files to align')
86
87
    align_group.add_argument('--dim', type=str, default='DIM_DYN',
                             help='NIFTI-MRS dimension tag to align across.'
88
                                  'Or "all" to align over all spectra in higer dimensions.'
89
                                  'Default = DIM_DYN')
90
    align_group.add_argument('--ppm', type=float, nargs=2,
91
                             metavar=('<lower-limit>', '<upper-limit>'),
92
93
94
95
96
                             default=(0.2, 4.2),
                             help='ppm limits of alignment window'
                                  ' (default=0.2->4.2)')
    align_group.add_argument('--reference', type=str, required=False,
                             help='Align to this reference data.')
97
98
    align_group.add_argument('--apod', type=float, default=10,
                             help='Apodise data to reduce noise (Hz).')
99
    alignparser.set_defaults(func=align)
100
    add_common_args(alignparser)
101

102
103
104
105
    # Align difference spectra subcommand - frequency/phase alignment
    alignDparser = sp.add_parser('align-diff', add_help=False,
                                 help='Align subspectra for differencing.')
    alignD_group = alignDparser.add_argument_group('Align subspec arguments')
106
    alignD_group.add_argument('--file', type=str, required=True,
107
                              help='Subspectra 1 - List of files to align')
108
    alignD_group.add_argument('--dim', type=str, default='DIM_DYN',
109
110
111
                              help='NIFTI-MRS dimension tag to align across')
    alignD_group.add_argument('--dim_diff', type=str, default='DIM_EDIT',
                              help='NIFTI-MRS dimension tag to difference across')
112
113
114
115
116
117
118
119
    alignD_group.add_argument('--ppm', type=float, nargs=2,
                              metavar='<lower-limit upper-limit>',
                              default=(0.2, 4.2),
                              help='ppm limits of alignment window'
                                   ' (default=0.2->4.2)')
    alignD_group.add_argument('--diff_type', type=str, required=False,
                              default='add',
                              help='add (default) or subtract.')
120
    alignDparser.set_defaults(func=aligndiff)
121
    add_common_args(alignDparser)
122

123
    # ECC subcommand - eddy current correction
124
125
126
    eccparser = sp.add_parser('ecc', add_help=False,
                              help='Eddy current correction')
    ecc_group = eccparser.add_argument_group('ECC arguments')
127
    ecc_group.add_argument('--file', type=str, required=True,
128
                           help='Uncombined coil data file(s)')
129
    ecc_group.add_argument('--reference', type=str, required=True,
130
                           help='Phase reference data file(s)')
131
    eccparser.set_defaults(func=ecc)
132
    add_common_args(eccparser)
133
134

    # remove subcommand - remove peak using HLSVD
135
136
137
    hlsvdparser = sp.add_parser('remove', add_help=False,
                                help='Remove peak (default water) with HLSVD.')
    hlsvd_group = hlsvdparser.add_argument_group('HLSVD arguments')
138
    hlsvd_group.add_argument('--file', type=str, required=True,
139
140
141
142
143
                             help='Data file(s)')
    hlsvd_group.add_argument('--ppm', type=float, nargs=2,
                             metavar='<lower-limit upper-limit>',
                             default=[4.5, 4.8],
                             help='ppm limits of removal window')
144
    hlsvdparser.set_defaults(func=remove)
145
    add_common_args(hlsvdparser)
146

147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
    # model subcommand - model peaks using HLSVD
    modelparser = sp.add_parser('model', add_help=False,
                                help='Model peaks with HLSVD.')
    model_group = modelparser.add_argument_group('HLSVD modelling arguments')
    model_group.add_argument('--file', type=str, required=True,
                             help='Data file(s)')
    model_group.add_argument('--ppm', type=float, nargs=2,
                             metavar='<lower-limit upper-limit>',
                             default=[4.5, 4.8],
                             help='ppm limits of removal window')
    model_group.add_argument('--components', type=int,
                             default=5,
                             help='Number of components to model.')
    modelparser.set_defaults(func=model)
    add_common_args(modelparser)

163
    # tshift subcommand - shift/resample in timedomain
164
165
166
    tshiftparser = sp.add_parser('tshift', add_help=False,
                                 help='shift/resample in timedomain.')
    tshift_group = tshiftparser.add_argument_group('Time shift arguments')
167
    tshift_group.add_argument('--file', type=str, required=True,
168
169
170
171
172
173
174
175
176
177
178
                              help='Data file(s) to shift')
    tshift_group.add_argument('--tshiftStart', type=float, default=0.0,
                              help='Time shift at start (ms),'
                                   ' negative pads with zeros,'
                                   ' positive truncates')
    tshift_group.add_argument('--tshiftEnd', type=float, default=0.0,
                              help='Time shift at end (ms),'
                                   ' negative truncates,'
                                   ' positive pads with zeros')
    tshift_group.add_argument('--samples', type=int,
                              help='Resample to N points in FID.')
179
    tshiftparser.set_defaults(func=tshift)
180
    add_common_args(tshiftparser)
181

182
    # truncate
183
184
185
186
187
    truncateparser = sp.add_parser('truncate', add_help=False,
                                   help='truncate or pad by integer'
                                        ' points in timedomain.')
    truncate_group = truncateparser.add_argument_group(
        'Truncate/pad arguments')
188
    truncate_group.add_argument('--file', type=str, required=True,
189
190
191
192
193
                                help='Data file(s) to shift')
    truncate_group.add_argument('--points', type=int, default=0,
                                help='Points to add/remove (+/-)')
    truncate_group.add_argument('--pos', type=str, default='last',
                                help=" first' or 'last' (default)")
194
    truncateparser.set_defaults(func=truncate)
195
    add_common_args(truncateparser)
196
197

    # apodize
198
199
    apodparser = sp.add_parser('apodize', help='Apodize FID.', add_help=False)
    apod_group = apodparser.add_argument_group('Apodize arguments')
200
    apod_group.add_argument('--file', type=str, required=True,
201
202
203
204
205
206
207
208
                            help='Data file(s) to shift')
    apod_group.add_argument('--filter', type=str, default='exp',
                            help="Filter choice."
                                 "Either 'exp' (default) or 'l2g'.")
    apod_group.add_argument('--amount', type=float, nargs='+',
                            help='Amount of broadening.'
                                 ' In Hz for exp mode.'
                                 ' Use space separated list for l2g.')
209
    apodparser.set_defaults(func=apodize)
210
    add_common_args(apodparser)
211

212
    # fshift subcommand - shift in frequency domain
213
214
215
    fshiftparser = sp.add_parser('fshift', add_help=False,
                                 help='shift in frequency domain.')
    fshift_group = fshiftparser.add_argument_group('Frequency shift arguments')
216
    fshift_group.add_argument('--file', type=str, required=True,
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
                              help='Data file(s) to shift')
    fshift_group.add_argument('--shiftppm', type=float,
                              help='Apply fixed shift (ppm scale)')
    fshift_group.add_argument('--shifthz', type=float,
                              help='Apply fixed shift (Hz scale)')
    fshift_group.add_argument('--shiftRef', action="store_true",
                              help='Shift to reference (default = Cr)')
    fshift_group.add_argument('--ppm', type=float, nargs=2,
                              metavar='<lower-limit upper-limit>',
                              default=(2.8, 3.2),
                              help='Shift maximum point in this range'
                                   ' to target (must specify --target).')
    fshift_group.add_argument('--target', type=float, default=3.027,
                              help='Target position (must be used with ppm).'
                                   ' Default = 3.027')
232
    fshiftparser.set_defaults(func=fshift)
233
    add_common_args(fshiftparser)
234
235

    # unlike subcomand - find FIDs that are unlike
236
237
238
    unlikeparser = sp.add_parser('unlike', add_help=False,
                                 help='Identify unlike FIDs.')
    unlike_group = unlikeparser.add_argument_group('unlike arguments')
239
    unlike_group.add_argument('--file', type=str, required=True,
240
241
242
243
244
245
246
247
248
249
250
251
                              help='Data file(s) to shift')
    unlike_group.add_argument('--sd', type=float, default=1.96,
                              help='Exclusion limit'
                                   ' (# of SD from mean,default=1.96)')
    unlike_group.add_argument('--iter', type=int, default=2,
                              help='Iterations of algorithm.')
    unlike_group.add_argument('--ppm', type=float, nargs=2,
                              metavar='<lower-limit upper-limit>',
                              default=None,
                              help='ppm limits of alignment window')
    unlike_group.add_argument('--outputbad', action="store_true",
                              help='Output failed FIDs')
252
    unlikeparser.set_defaults(func=unlike)
253
    add_common_args(unlikeparser)
254
255

    # Phasing - based on maximum point in range
256
257
258
259
    phaseparser = sp.add_parser('phase', add_help=False,
                                help='Phase spectrum based on'
                                     ' maximum point in range')
    phase_group = phaseparser.add_argument_group('Phase arguments')
260
    phase_group.add_argument('--file', type=str, required=True,
261
262
263
264
265
                             help='Data file(s) to shift')
    phase_group.add_argument('--ppm', type=float, nargs=2,
                             metavar='<lower-limit upper-limit>',
                             default=(2.8, 3.2),
                             help='ppm limits of alignment window')
266
267
    phase_group.add_argument('--hlsvd', action="store_true",
                             help='Remove peaks outside the search area')
268
    phaseparser.set_defaults(func=phase)
269
    add_common_args(phaseparser)
270

271
272
273
    fixphaseparser = sp.add_parser('fixed_phase', add_help=False,
                                   help='Apply fixed phase to spectrum')
    fphase_group = fixphaseparser.add_argument_group('Phase arguments')
274
    fphase_group.add_argument('--file', type=str, required=True,
275
276
277
278
279
280
281
282
283
284
285
                              help='Data file(s) to shift')
    fphase_group.add_argument('--p0', type=float,
                              metavar='<degrees>',
                              help='Zero order phase (degrees)')
    fphase_group.add_argument('--p1', type=float,
                              default=0.0,
                              metavar='<seconds>',
                              help='First order phase (seconds)')
    fixphaseparser.set_defaults(func=fixed_phase)
    add_common_args(fixphaseparser)

286
    # subtraction - subtraction of FIDs
287
    subtractparser = sp.add_parser('subtract', add_help=False,
288
                                   help='Subtract two FID files or across a dimension')
289
    subtract_group = subtractparser.add_argument_group('Subtraction arguments')
290
291
292
293
294
295
296
    subtract_group.add_argument('--file', type=str, required=True,
                                help='File to subtract from')
    subtract_group.add_argument('--reference', type=str,
                                help='File to subtract from --file'
                                     '(output is file - reference)')
    subtract_group.add_argument('--dim', type=str,
                                help='NIFTI-MRS dimension tag to subtract across')
297
    subtractparser.set_defaults(func=subtract)
298
    add_common_args(subtractparser)
299
300

    # add - addition of FIDs
301
    addparser = sp.add_parser('add', add_help=False, help='Add two FIDs or across a dimension')
302
    add_group = addparser.add_argument_group('Addition arguments')
303
304
305
306
307
308
    add_group.add_argument('--file', type=str, required=True,
                           help='File to add to.')
    add_group.add_argument('--reference', type=str,
                           help='File to add to --file')
    add_group.add_argument('--dim', type=str,
                           help='NIFTI-MRS dimension tag to add across')
309
    addparser.set_defaults(func=add)
310
    add_common_args(addparser)
311

312
313
314
    # conj - conjugation
    conjparser = sp.add_parser('conj', add_help=False, help='Conjugate fids')
    conj_group = conjparser.add_argument_group('Conjugation arguments')
315
    conj_group.add_argument('--file', type=str, required=True,
316
317
318
319
                            help='Data file(s) to conjugate')
    conj_group.set_defaults(func=conj)
    add_common_args(conj_group)

320
321
    # Parse command-line arguments
    args = p.parse_args()
322

323
324
325
    # Output kickass splash screen
    if args.verbose:
        splash(logo='mrs')
326
327
328

    # Parse file arguments
    datafiles, reffiles = parsefilearguments(args)
329
330

    # Handle data loading
331
    dataList = loadData(datafiles,
332
                        refdatafile=reffiles)
333
334
335
336

    # Create output folder if required
    if not op.isdir(args.output):
        makedirs(args.output)
337
338
339
    elif op.isdir(args.output) and args.overwrite:
        rmtree(args.output)
        makedirs(args.output)
340

341
342
343
344
345
346
347
348
    # Handle report generation output location.
    # Bit of a hack, but I messed up the type expected by the
    # nifti mrs proc functions.
    if args.generateReports:
        args.generateReports = args.output
    else:
        args.generateReports = None

349
350
351
352
    # Call function - pass dict like view of args
    #  for compatibility with other modules
    dataout = args.func(dataList, vars(args))
    if isinstance(dataout, tuple):
353
354
355
356
357
        additionalOutputs = dataout[1:]
        dataout = dataout[0]
    else:
        additionalOutputs = None

358
    # Write data
359
    writeData(dataout, args)
360
361
362
363

    # Output any additional arguments
    if additionalOutputs is not None:
        print(additionalOutputs)
364

365

366
367
368
369
370
def add_common_args(p):
    """Add any arguments which are common between the sub commands."""
    # This is so the arguments can appear after the subcommand.

    # Arguments not associated with subcommands
371
372
    required = p.add_argument_group('required arguments')
    optional = p.add_argument_group('additional options')
373

374
    # REQUIRED ARGUMENTS
375
    required.add_argument('--output',
376
                          required=True, type=str, metavar='<str>',
377
378
                          help='output folder')

379
380
    # ADDITIONAL OPTIONAL ARGUMENTS
    optional.add_argument('--overwrite', action="store_true",
381
                          help='overwrite existing output folder')
382
    optional.add_argument('-r', '--generateReports', action="store_true",
383
                          help='Generate HTML reports.')
384
385
386
387
388
389
390
391
    # optional.add_argument('-i', '--reportIndicies',
    #                       type=int,
    #                       nargs='+',
    #                       default=[0],
    #                       help='Generate reports for selected inputs where'
    #                            ' multiple input files exist.'
    #                            ' Defaults to first (0).'
    #                            ' Specify as indices counting from 0.')
392
393
394
    optional.add_argument('--allreports', action="store_true",
                          help='Generate reports for all inputs.'
                               ' Overrides arguments to reportIndicies.')
395
396
    # optional.add_argument('--conjugate', action="store_true",
    #                       help='apply conjugate to FID')
397
398
399
    optional.add_argument('--filename', type=str, metavar='<str>',
                          help='Override output file name.')
    optional.add_argument('--verbose', action="store_true",
400
401
                          help='spit out verbose info')
    optional.add_argument('-h', '--help', action='help',
402
                          help='show this help message and exit')
403

404
405
406
407
408
409
410
411
412
413

def parsefilearguments(args):
    # print(args.file)
    datafiles = args.file
    if 'reference' in args:
        # print(args.reference)
        reffiles = args.reference
    else:
        reffiles = None

414
415
    return datafiles, reffiles

416
417

# Data I/O functions
418
419
def loadData(datafile, refdatafile=None):
    """ Load data from path.
420

421
422
    The data must be of NIFTI MRS format.
    Optionaly loads a reference file.
423
424
    """

425
426
427
428
429
430
431
432
433
434
435
436
437
    # Do a check on the data file passed. The data must be of nifti type.
    if not is_nifti_mrs(datafile):
        raise ValueError('Preprocessing routines only handle NIFTI MRS'
                         ' format data. Please convert your data using'
                         ' spec2nii.')

    if refdatafile and not is_nifti_mrs(refdatafile):
        raise ValueError('Preprocessing routines only handle NIFTI MRS'
                         ' format data. Please convert your data using'
                         ' spec2nii.')

    if refdatafile:
        loaded_data = datacontainer(NIFTI_MRS(datafile),
438
                                    op.basename(datafile),
439
                                    NIFTI_MRS(refdatafile),
440
                                    op.basename(datafile))
441
442
    else:
        loaded_data = datacontainer(NIFTI_MRS(datafile),
443
                                    op.basename(datafile))
444

445
    return loaded_data
446

447
448

def writeData(dataobj, args):
449

450
451
452
453
    if args.filename is None:
        fileout = op.join(args.output, dataobj.datafilename)
    else:
        fileout = op.join(args.output, args.filename + '.nii.gz')
454

455
    dataobj.data.save(fileout)
456

457

458
459
460
# Option functions
# Functions below here should be associated with a
# subcommand method specified above.
461
# They should call a method in nifti_mrs_proc.py.
462

463
464
# Preprocessing functions
def coilcombine(dataobj, args):
465
466
467
468
469
470
471
472
473
474
475
476

    if 'DIM_COIL' not in dataobj.data.dim_tags:
        raise InappropriateDataError(f'Data ({dataobj.datafilename}) has no coil dimension.'
                                     f' Dimensions are is {dataobj.data.dim_tags}.')

    combined = preproc.coilcombine(dataobj.data,
                                   reference=dataobj.reference,
                                   no_prewhiten=args['no_prewhiten'],
                                   report=args['generateReports'],
                                   report_all=args['allreports'])

    return datacontainer(combined, dataobj.datafilename)
477

478
479

def average(dataobj, args):
480
481
482
    if args['dim'] not in dataobj.data.dim_tags:
        raise InappropriateDataError(f'Data ({dataobj.datafilename}) has no {args["dim"]} dimension.'
                                     f' Dimensions are is {dataobj.data.dim_tags}.')
483

484
485
486
487
488
489
    averaged = preproc.average(dataobj.data,
                               args["dim"],
                               report=args['generateReports'],
                               report_all=args['allreports'])

    return datacontainer(averaged, dataobj.datafilename)
490

491
492

def align(dataobj, args):
493
494
495
    if args['dim'].lower() == 'all':
        pass
    elif args['dim'] not in dataobj.data.dim_tags:
496
497
498
499
500
501
502
503
504
        raise InappropriateDataError(f'Data ({dataobj.datafilename}) has no {args["dim"]} dimension.'
                                     f' Dimensions are is {dataobj.data.dim_tags}.')

    aligned = preproc.align(dataobj.data,
                            args['dim'],
                            ppmlim=args['ppm'],
                            apodize=args['apod'],
                            report=args['generateReports'],
                            report_all=args['allreports'])
505

506
    return datacontainer(aligned, dataobj.datafilename)
507

508
509

def aligndiff(dataobj, args):
510
511
512
513
514
515
516
517
518
519
520
521
522
    if args['dim'] not in dataobj.data.dim_tags:
        raise InappropriateDataError(f'Data ({dataobj.datafilename}) has no {args["dim"]} dimension.'
                                     f' Dimensions are is {dataobj.data.dim_tags}.')

    aligned = preproc.aligndiff(dataobj.data,
                                args['dim'],
                                args['dim_diff'],
                                args['diff_type'],
                                ppmlim=args['ppm'],
                                report=args['generateReports'],
                                report_all=args['allreports'])

    return datacontainer(aligned, dataobj.datafilename)
523
524


525
def ecc(dataobj, args):
526
527
528
529
530
531
    corrected = preproc.ecc(dataobj.data,
                            dataobj.reference,
                            report=args['generateReports'],
                            report_all=args['allreports'])

    return datacontainer(corrected, dataobj.datafilename)
532

533
534

def remove(dataobj, args):
535
536
537
538
539
540
    corrected = preproc.remove_peaks(dataobj.data,
                                     limits=args['ppm'],
                                     report=args['generateReports'],
                                     report_all=args['allreports'])

    return datacontainer(corrected, dataobj.datafilename)
541

542

543
544
545
546
547
548
549
550
551
552
def model(dataobj, args):
    modelled = preproc.hlsvd_model_peaks(dataobj.data,
                                         limits=args['ppm'],
                                         components=args['components'],
                                         report=args['generateReports'],
                                         report_all=args['allreports'])

    return datacontainer(modelled, dataobj.datafilename)


553
def tshift(dataobj, args):
554
555
556
557
558
559
560
561
    shifted = preproc.tshift(dataobj.data,
                             tshiftStart=args['tshiftStart'],
                             tshiftEnd=args['tshiftEnd'],
                             samples=args['samples'],
                             report=args['generateReports'],
                             report_all=args['allreports'])

    return datacontainer(shifted, dataobj.datafilename)
562

563
564

def truncate(dataobj, args):
565
566
567
568
569
570
571
    truncated = preproc.truncate_or_pad(dataobj.data,
                                        args['points'],
                                        args['pos'],
                                        report=args['generateReports'],
                                        report_all=args['allreports'])

    return datacontainer(truncated, dataobj.datafilename)
572

573
574

def apodize(dataobj, args):
575
576
577
578
579
    apodized = preproc.truncate_or_pad(dataobj.data,
                                       args['amount'],
                                       filter=args['filter'],
                                       report=args['generateReports'],
                                       report_all=args['allreports'])
William Clarke's avatar
William Clarke committed
580

581
    return datacontainer(apodized, dataobj.datafilename)
William Clarke's avatar
William Clarke committed
582

583

584
585
586
587
588
589
590
591
592
593
594
def fshift(dataobj, args):
    if args['shiftppm'] is not None:
        shift = args['shiftppm'] * dataobj.data.spectrometer_frequency[0]
        callMode = 'fixed'
    elif args['shifthz'] is not None:
        shift = args['shifthz']
        callMode = 'fixed'
    elif args['shiftRef']:
        callMode = 'ref'
    else:
        raise ArgumentError('Specify --shiftppm or --shifthz.')
595

596
597
598
599
600
    if callMode == 'fixed':
        shifted = preproc.fshift(dataobj.data,
                                 shift,
                                 report=args['generateReports'],
                                 report_all=args['allreports'])
601

602
603
604
605
606
607
608
609
    elif callMode == 'ref':
        shifted = preproc.shift_to_reference(dataobj.data,
                                             args['target'],
                                             args['ppm'],
                                             report=args['generateReports'],
                                             report_all=args['allreports'])

    return datacontainer(shifted, dataobj.datafilename)
610

611

612
def unlike(dataobj, args):
613
614
615
616
617
618
619
620
621
    if dataobj.data.shape[:3] != (1, 1, 1):
        raise InappropriateDataError('unlike subcommand only works on single voxel data.'
                                     ' It is unclear what should happen with MRSI data.')

    good, bad = preproc.shift_to_reference(dataobj.data,
                                           ppmlim=args['ppm'],
                                           sdlimit=args['sd'],
                                           iterations=args['iter'],
                                           report=args['generateReports'])
622

623
    if args['outputbad']:
624
625
626
627
        # Save bad results here - bit of a hack!
        bad.save(op.join(args.output, dataobj.datafilename + '_FAIL'))

    return datacontainer(good, dataobj.datafilename)
628

629
630

def phase(dataobj, args):
631
632
633
634
635
636
637
    phased = preproc.phase_correct(dataobj.data,
                                   args['ppm'],
                                   hlsvd=args['hlsvd'],
                                   report=args['generateReports'],
                                   report_all=args['allreports'])

    return datacontainer(phased, dataobj.datafilename)
638

639

640
def fixed_phase(dataobj, args):
641
642
643
644
645
646
647
    phased = preproc.apply_fixed_phase(dataobj.data,
                                       args['p0'],
                                       p1=args['p1'],
                                       report=args['generateReports'],
                                       report_all=args['allreports'])

    return datacontainer(phased, dataobj.datafilename)
648
649


650
def subtract(dataobj, args):
651
652
653
654
655
656
657
658
659
660
661
662
    if dataobj.reference is not None:
        subtracted = preproc.subtract(dataobj.data,
                                      data1=dataobj.reference,
                                      report=args['generateReports'],
                                      report_all=args['allreports'])
    elif args['dim'] is not None:
        subtracted = preproc.subtract(dataobj.data,
                                      dim=args['dim'],
                                      report=args['generateReports'],
                                      report_all=args['allreports'])
    else:
        raise ArgumentError('Specify --reference or --dim.')
663

664
    return datacontainer(subtracted, dataobj.datafilename)
665

William Clarke's avatar
William Clarke committed
666

667
668
669
670
671
672
673
674
675
676
677
678
679
def add(dataobj, args):
    if dataobj.reference is not None:
        added = preproc.add(dataobj.data,
                            data1=dataobj.reference,
                            report=args['generateReports'],
                            report_all=args['allreports'])
    elif args['dim'] is not None:
        added = preproc.add(dataobj.data,
                            dim=args['dim'],
                            report=args['generateReports'],
                            report_all=args['allreports'])
    else:
        raise ArgumentError('Specify --reference or --dim.')
William Clarke's avatar
William Clarke committed
680

681
    return datacontainer(added, dataobj.datafilename)
682

683

684
def conj(dataobj, args):
685
686
687
    conjugated = preproc.conjugate(dataobj.data,
                                   report=args['generateReports'],
                                   report_all=args['allreports'])
688
689

    return datacontainer(conjugated, dataobj.datafilename)
690
691


692
if __name__ == '__main__':
693
    main()