Commit 74e7fea0 authored by Paul McCarthy's avatar Paul McCarthy 🚵
Browse files

RF: Default processing/rules are no more. They will become part of the fmrib

configuration. Default behaviour is now equivalent to --pass_through
parent ce59a54c
......@@ -14,7 +14,6 @@ import functools as ft
import itertools as it
import multiprocessing as mp
import sys
import glob
import shlex
import logging
import argparse
......@@ -35,13 +34,6 @@ log = logging.getLogger(__name__)
VERSION = funpack.__version__
FUNPACKDIR = op.dirname(__file__)
DEFAULT_TFILE = op.join(FUNPACKDIR, 'data', 'types.tsv')
DEFAULT_PFILE = op.join(FUNPACKDIR, 'data', 'processing.tsv')
DEFAULT_CFILE = op.join(FUNPACKDIR, 'data', 'categories.tsv')
DEFAULT_VFILES = op.join(FUNPACKDIR, 'data', 'variables_*.tsv')
DEFAULT_DFILES = op.join(FUNPACKDIR, 'data', 'datacodings_*.tsv')
DEFAULT_VFILES = list(glob.glob(DEFAULT_VFILES))
DEFAULT_DFILES = list(glob.glob(DEFAULT_DFILES))
DEFAULT_MERGE_AXIS = importing.MERGE_AXIS
DEFAULT_MERGE_STRATEGY = importing.MERGE_STRATEGY
DEFAULT_EXPORT_FORMAT = exporting.EXPORT_FORMAT
......@@ -69,13 +61,11 @@ CLI_ARGUMENTS = collections.OrderedDict((
(('ms', 'merge_strategy'), {'choices' : AVAILABLE_MERGE_STRATEGIES,
'default' : DEFAULT_MERGE_STRATEGY}),
(('cfg', 'config_file'), {}),
(('vf', 'variable_file'), {'action' : 'append',
'default' : DEFAULT_VFILES}),
(('df', 'datacoding_file'), {'action' : 'append',
'default' : DEFAULT_DFILES}),
(('tf', 'type_file'), {'default' : DEFAULT_TFILE}),
(('pf', 'processing_file'), {'default' : DEFAULT_PFILE}),
(('cf', 'category_file'), {'default' : DEFAULT_CFILE})]),
(('vf', 'variable_file'), {'action' : 'append'}),
(('df', 'datacoding_file'), {'action' : 'append'}),
(('tf', 'type_file'), {}),
(('pf', 'processing_file'), {}),
(('cf', 'category_file'), {})]),
('Import options', [
(('ia', 'import_all'), {'action' : 'store_true'}),
......@@ -229,24 +219,19 @@ CLI_ARGUMENT_HELP = {
'File containing default command line arguments.',
'variable_file' :
'File(s) containing rules for handling variables '
'(default: {}).'.format(DEFAULT_VFILES),
'File(s) containing rules for handling variables',
'datacoding_file' :
'File(s) containing rules for handling data codings '
'(default: {}).'.format(DEFAULT_DFILES),
'File(s) containing rules for handling data codings.',
'type_file' :
'File containing rules for handling types '
'(default: {}).'.format(DEFAULT_TFILE),
'File containing rules for handling types.',
'processing_file' :
'File containing variable processing rules '
'(default: {}).'.format(DEFAULT_PFILE),
'File containing variable processing rules.',
'category_file' :
'File containing variable categories '
'(default: {}).'.format(DEFAULT_CFILE),
'File containing variable categories.',
# Import options
'import_all' :
......@@ -394,8 +379,7 @@ CLI_ARGUMENT_HELP = {
# Miscellaneous options
'version' : 'Print version and exit.',
'dry_run' : 'Print a summary of what would happen and exit.',
'no_builtins' : 'Do not use the built in variable, data coding, type, '
'category or processing tables.',
'no_builtins' : 'Do not use the built in variable or data coding tables.',
'low_memory' : 'Store intermediate results on disk, rather than in RAM. '
'Use this flag on systems which cannot store the full '
'data set in RAM. ',
......@@ -596,13 +580,6 @@ def parseArgs(argv=None, namespace=None):
args.skip_recoding = True
args.skip_processing = True
if args.no_builtins:
if args.variable_file == DEFAULT_VFILES: args.variable_file = None
if args.datacoding_file == DEFAULT_DFILES: args.datacoding_file = None
if args.type_file == DEFAULT_TFILE: args.type_file = None
if args.processing_file == DEFAULT_PFILE: args.processing_file = None
if args.category_file == DEFAULT_CFILE: args.category_file = None
# the importing.loadData function accepts
# either a single encoding, or one encoding
# for each data file.
......@@ -790,6 +767,33 @@ def parseArgs(argv=None, namespace=None):
if args.type_clean is not None:
args.type_clean = {util.CTYPES[t] : e for t, e in args.type_clean}
# The variable_file, datacoding_file, type_file,
# processing_file, and category_file options
# can be specified relative to FUNPACKDIR
def fixPath(f):
if f is None:
return f
if not op.exists(f):
fixed = op.join(FUNPACKDIR, f)
if op.exists(fixed):
return fixed
# if the fixed version does not
# exist, allow processing to
# continue - it will fail later on.
return f
if args.variable_file is not None:
args.variable_file = [fixPath(f) for f in args.variable_file]
if args.datacoding_file is not None:
args.datacoding_file = [fixPath(f) for f in args.datacoding_file]
args.type_file = fixPath(args.type_file)
args.processing_file = fixPath(args.processing_file)
args.category_file = fixPath(args.category_file)
return args, argv
......
ID NAValues
\ No newline at end of file
ID RawLevels NewLevels
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment