Commit acee7348 authored by Paul McCarthy's avatar Paul McCarthy 🚵
Browse files

TEST: remove/fix tests using removed options

parent ba2cd039
......@@ -38,75 +38,6 @@ from . import (gen_DataTable,
gen_DataTableFromDataFrame)
def test_genColumnNames():
testdata = np.random.randint(1, 10, (10, 10))
with patch_base_tables():
dtable = gen_DataTable(testdata)
dtable.vartable.loc[6, 'Description'] = 'abcde'
exp = {c.name : c.name for c in dtable.dataColumns}
names = exporting.genColumnNames(dtable)
assert exp == names
colpat = '{variable}|{name}|{description}|{visit}|{instance}'
colmap = { '1-0.0' : 'variable_one', '2-0.0' : 'variable_two'}
names = exporting.genColumnNames(dtable, colpat, colmap)
exp = ['variable_one',
'variable_two',
'3|3-0.0||0|0',
'4|4-0.0||0|0',
'5|5-0.0||0|0',
'6|6-0.0|abcde|0|0',
'7|7-0.0||0|0',
'8|8-0.0||0|0',
'9|9-0.0||0|0',
'10|10-0.0||0|0']
exp = {c.name : n for c, n in zip(dtable.dataColumns, exp)}
assert exp == names
exp = ['var1', 'var2'] + ['{}-0.0'.format(v) for v in range(3, 11)]
exp = {c.name : n for c, n in zip(dtable.dataColumns, exp)}
names = exporting.genColumnNames(dtable, None, {'1-0.0' : 'var1',
'2-0.0' : 'var2'})
assert exp == names
exp = ['{}##0'.format(v) for v in range(1, 11)]
exp = {c.name : n for c, n in zip(dtable.dataColumns, exp)}
names = exporting.genColumnNames(dtable, '{variable}##{visit}')
assert exp == names
df = pd.DataFrame({'col1' : [1, 2, 3],
'col2' : [4, 5, 6],
'id' : [1, 2, 3]}).set_index('id')
dtable = gen_DataTableFromDataFrame(df)
exp = {c.name : c.name for c in dtable.dataColumns}
names = exporting.genColumnNames(dtable)
assert exp == names
exp = ['col1', 'cc2']
exp = {c.name : e for c, e in zip(dtable.dataColumns, exp)}
names = exporting.genColumnNames(dtable, None, {'col2' : 'cc2'})
assert exp == names
exp = ['00col1', 'cc2']
exp = {c.name : e for c, e in zip(dtable.dataColumns, exp)}
names = exporting.genColumnNames(dtable, '{visit}{instance}{name}',
{'col2' : 'cc2'})
assert exp == names
exp = ['00col1', '00col2']
exp = {c.name : e for c, e in zip(dtable.dataColumns, exp)}
names = exporting.genColumnNames(dtable, '{visit}{instance}{name}')
assert exp == names
def test_exportData():
custom.registerBuiltIns()
......@@ -121,9 +52,9 @@ def test_exportData():
with tempdir():
dtable = gen_DataTable(testdata)
exporting.exportData(dtable, 'data.tsv', colpat='{variable}')
exporting.exportData(dtable, 'data.tsv')
exp = ['\t'.join(['eid'] +
['{}'.format(i) for i in range(1, 11)])] + \
['{}-0.0'.format(i) for i in range(1, 11)])] + \
['\t'.join([str(i + 1)] + [str(c) for c in r])
for i, r in enumerate(testdata.T)]
exp = '\n'.join(exp)
......@@ -134,13 +65,10 @@ def test_exportData():
dtable = gen_DataTable(td)
exporting.exportData(dtable,
'data.tsv',
colpat='{variable}',
colmap={'1-0.0' : 'var1'},
idcol='sub',
sep='*',
missingValues='boo')
exp = ['*'.join(['sub', 'var1'] +
['{}'.format(i) for i in range(2, 11)])] + \
exp = ['*'.join(['eid'] +
['{}-0.0'.format(i) for i in range(1, 11)])] + \
['*'.join([str(i + 1)] +
['boo' if np.isnan(c) else str(c) for c in r])
for i, r in enumerate(td.T)]
......@@ -148,7 +76,6 @@ def test_exportData():
assert check('data.tsv', exp)
def test_exportData_subjid():
custom.registerBuiltIns()
......@@ -357,12 +284,6 @@ def test_exportHDF5():
with tempdir():
gen_test_data(5, 10, 'data.tsv', ctypes={1 : 'date', 2 : 'datetime'})
colnames = OrderedDict((('1-0.0', 'one'),
('2-0.0', 'two'),
('3-0.0', 'three'),
('4-0.0', 'four'),
('5-0.0', 'five')))
data = pd.read_csv('data.tsv',
delimiter='\t',
parse_dates=['1-0.0', '2-0.0'],
......@@ -372,18 +293,14 @@ def test_exportHDF5():
exporting_hdf5.exportHDF5(dt,
'out_funpack.h5',
'eid',
colnames,
key='h5key',
style='funpack')
exporting_hdf5.exportHDF5(dt,
'out_pandas.h5',
'eid',
colnames,
key='h5key',
style='pandas')
colnames = list(colnames.values())
colnames = [c.name for c in dt.dataColumns]
exp = dt[:, :]
gotpd = pd.read_hdf('out_pandas.h5')
......@@ -509,18 +426,9 @@ def test_exporting_id_column():
exporting.exportData(dt,
'out.txt',
fileFormat='tsv')
got = pd.read_csv('out.txt', delimiter='\t', index_col=0)
assert got.index.name == 'my_id'
exporting.exportData(dt,
'out.txt',
fileFormat='tsv',
idcol='my_id_renamed')
got = pd.read_csv('out.txt', delimiter='\t', index_col=0)
assert got.index.name == 'my_id_renamed'
def test_exporting_no_data():
......@@ -639,15 +547,11 @@ def test_exportTSV_parallel():
# in one go, single process
exporting_tsv.exportTSV(dt,
'out1.tsv',
'eid',
{},
numRows=100000)
# chunked, single process
exporting_tsv.exportTSV(dt,
'out2.tsv',
'eid',
{},
numRows=8767)
# chunked, multiprocess
......@@ -655,8 +559,6 @@ def test_exportTSV_parallel():
with mock.patch.object(dt, 'pool', return_value=pool):
exporting_tsv.exportTSV(dt,
'out3.tsv',
'eid',
{},
numRows=5675)
exp = dt[:, :]
......
......@@ -601,19 +601,6 @@ def test_main_column_names():
with tempdir():
gen_test_data(10, 100, 'data.tsv')
main.main(shlex.split('-nb -oi subby -cp "{variable}-woo" out.tsv data.tsv'))
got = pd.read_csv('out.tsv', delimiter='\t', index_col=0)
exp = ['{}-woo'.format(i) for i in range(1, 11)]
assert np.all(got.columns == exp)
assert got.index.name == 'subby'
main.main(shlex.split(
'-nb -ow -rc 1-0.0 woopy -cp "{name}" out.tsv data.tsv'))
got = pd.read_csv('out.tsv', delimiter='\t', index_col=0)
exp = ['{}-0.0'.format(i) for i in range(1, 11)]
exp[0] = 'woopy'
assert np.all(got.columns == exp)
# non-standard input column names
names = ['{}-0.0'.format(i) for i in range(1, 10)] + ['woopy']
gen_test_data(10, 100, 'data.tsv', names=names)
......@@ -621,12 +608,6 @@ def test_main_column_names():
got = pd.read_csv('out.tsv', delimiter='\t', index_col=0)
assert sorted(got.columns) == sorted(names)
gen_test_data(10, 100, 'data.tsv', names=names)
main.main(shlex.split('-nb -ow -cp "{name}-{visit}" out.tsv data.tsv'))
exp = ['{}-0.0-0'.format(i) for i in range(1, 10)] + ['woopy-0']
got = pd.read_csv('out.tsv', delimiter='\t', index_col=0)
assert sorted(got.columns) == sorted(exp)
@patch_logging
@clear_plugins
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment