#!/usr/bin/env python
# -*- coding: latin-1 -*-

#
# long_stats_slopes
#
# script to fit within-subject slopes into longitudinal data
#
# Original Author: Martin Reuter
# CVS Revision Info:
#    $Author: mreuter $
#    $Date: 2012/05/30 22:57:26 $
#    $Revision: 1.20 $
#
# Copyright © 2011 The General Hospital Corporation (Boston, MA) "MGH"
#
# Terms and conditions for use, reproduction, distribution and contribution
# are found in the 'FreeSurfer Software License Agreement' contained
# in the file 'LICENSE' found in the FreeSurfer distribution, and here:
#
# https://surfer.nmr.mgh.harvard.edu/fswiki/FreeSurferSoftwareLicense
#
# Reporting: freesurfer@nmr.mgh.harvard.edu
#
#

import warnings
warnings.filterwarnings('ignore', '.*negative int.*')
import os
import sys
import shlex
import optparse
import logging
import subprocess
import tempfile
import shutil
from subject_info import *
from LongQdecTable import *

# logging 
ch = logging.StreamHandler()
#create logger
slopelogger = logging.getLogger("long_stats_slopes")
slopelogger.setLevel(logging.INFO)
slopelogger.addHandler(ch)


HELPTEXT = """

SUMMARY

Computes slopes of stats in a longitudinal study.
The slope is computed within subject from the longitudinally processed
results (taken from the <tpNid>.long.<template> directories) and the
output is written into the subjects <template>/stats directory for further
processing (e.g. group analysis).


REQUIRED ARGUMENTS

--qdec <name>     qdec.table.dat file with first columns: fsid  fsid-base

--stats <name>    Stats file w/o path: e.g. aseg.stats or lh.aparc.stats

--meas <name>     Stats measure, e.g. volume, thickness, mean, std


One or more of the following:

--do-avg           Compute and output the temporal average (recommended)

--do-rate          Compute and output the rate (recommended)

--do-pc1fit        Compute and output the pct. change (w.r.t. tp1 computed from line fit)

--do-pc1           Compute and output the pct. change (w.r.t. tp1 directly, more noisy)

--do-spc           Compute and output the sym. pct. change (w.r.t. temp. average) (recommended)



OPTIONAL ARGUMENTS

--do-stack        Compute and output the a tables showing the time series (row per time point)

--time <name>     Variable name of time variable (e.g. age)


To manually specify the within subject output in <template>/stats/<name> :

--out-avg <name>     Output filename for temporal average (default: long.<stats>.<meas>-avg.dat)

--out-rate <name>    Output filename for the rate (diff per time) (default: long.<stats>.<meas>-rate.dat)

--out-pc1fit <name>  Output filename for percent change (w.r.t. time 1 fit) (default: long.<stats>.<meas>-pc1fit.dat)

--out-pc1 <name>     Output filename for percent change (w.r.t. time 1) (default: long.<stats>.<meas>-pc1.dat)

--out-spc <name>     Output filename for sym. pct. change (w.r.t. average) (default: long.<stats>.<meas>-spc.dat)

--out-stack <name>   Store time series file <template>/stats/<name> (default: long.<stats>.<meas>-stack.dat)


To manually specify stacked tables (across all input subjects):

--stack-avg <name>    Full output filename to stack temporal average tables (default no stacking)

--stack-rate <name>   Full output filename to stack rate tables (default no stacking)

--stack-pc1fit <name> Full output filename to stack pct. change to tp1fit tables (default no stacking)

--stack-pc1 <name>    Full output filename to stack pct. change to tp1 tables (default no stacking)

--stack-spc <name>    Full output filename to stack sym. pct. tables (default no stacking)



DETAILS
=======
QDEC.TABLE
Pass a qdec table file, where the first 2 columns need to be 'fsid  fsid-base'.
fsid is the id of the individual time points an 'fsid-base' the template/base
id (grouping the timepoints that belong to the same subject). By default the
third column is taken as the time variable, but this can be overwritten with
--time <name>. 

QDEC.TABLE-EXAMPLE:
fsid    fsid-base  age   weight   IQ
Elmo_1   Elmo       3      10    1000        
#Elmo_2  Elmo       3.5    15    1100
Elmo_3   Elmo       4      20    1300 
Snuffy_1 Snuffy    20      40    1100
Snuffy_2 Snuffy    21      45    1200
Bert_1   Bert       8      25    2000
Bert_2   Bert       9      30    2500
Bert_3   Bert       9.9    34    2400


OUTPUT
======
The within subject output will be written into the template stats directory:
<template>/stats/<name>

For the output choose one or more of the following options:
*  'out-rate' for rate, this will compute the slope of a linear fit.
   Depending on the time variable and the stats file, will yield the
   volume loss in mm^3/time or thinning in mm/time for each region
   (if the time variable is measured in years, such as age: mm/year).
*  'out-pc1' is the percent change, this is the rate normalized by the
   measure at the first time point times 100, e.g. percent thinning per year.
*  'out-pc1fit' is the percent change with respect to the value at
   tp1, evalutated from the linear fit. This is the rate normalized by the
   estimated measure at the first time point times 100, e.g. percent
   thinning per year. Using the linear fit instead of the measure at time
   point 1 directly reduces noise as tp1 data is more noisy than the fit,
   therefore we recommend pc1fit over pc1.
*  'out-spc' for symmetrized percent change. Here we normalize by the temporal
   average instead of taking it from the first time point. The average is
   computed from the linear fit at the middle of the time interval.
   This is a symmetric 'percent thinning per year' and more robust, as TP1
   can be an outlier. Note, however, that the temporal average will change
   when adding new time points!
*  'out-avg' for output of the temporal average (linear fit at mid time).
*  'out-stack' for a table with the time series (each row a time point).


It is also possible to store final stacked tables of the results
where each row corresponds to the computed measure for each subject.
For this specify --stack-* <name> with full path and filename of the
specific table.


REFERENCES
==========

Highly Accurate Inverse Consistent Registration: A Robust Approach,
  M. Reuter, H.D. Rosas, B. Fischl. NeuroImage 53(4), 1181-1196, 2010.
  http://dx.doi.org/10.1016/j.neuroimage.2010.07.020
  http://reuter.mit.edu/papers/reuter-robreg10.pdf 

Avoiding Asymmetry-Induced Bias in Longitudinal Image Processing,
  M. Reuter, B. Fischl. NeuroImage 57(1), 19-21, 2011.
  http://dx.doi.org/10.1016/j.neuroimage.2011.02.076
  http://reuter.mit.edu/papers/reuter-bias11.pdf 

Within-Subject Template Estimation for Unbiased Longitudinal Image Analysis.
  M. Reuter, N.J. Schmansky, H.D. Rosas, B. Fischl.
  NeuroImage 61(4), 1402-1418, 2012.
  http://dx.doi.org/10.1016/j.neuroimage.2012.02.084
  http://reuter.mit.edu/papers/reuter-long12.pdf

"""

def options_parse():
    """
    Command Line Options Parser for long_mris_slopes
    initiate the option parser and return the parsed object
    """
    parser = optparse.OptionParser(version='$Id: long_stats_slopes,v 1.20 2012/05/30 22:57:26 mreuter Exp $', usage=HELPTEXT)
    
    # help text
    h_qdec      = '(REQUIRED) qdec table file specifying the subjects and time points'
    h_stats     = '(REQUIRED) the stats file, e.g. aseg.stats or lh.aparc.stats'
    h_meas      = '(REQUIRED) the stats measure (e.g. volume, thickness, mean, std)'
    h_sd        = '(REQUIRED) full path to FreeSurfer subjects dir'

    h_doavg     = 'compute and output the temporal average'
    h_dorate    = 'compute and output the rate'
    h_dopc1     = 'compute and output the pct. change (w.r.t. tp1)'
    h_dopc1fit  = 'compute and output the pct. change (w.r.t. tp1 from linear fit)'
    h_dospc     = 'compute and output the sym. pct. change (w.r.t. temp. average)'
    h_dostack   = 'save the stacked within subject time point data (time series)'
    h_resid     = 'residual tp (pass 1 for tp1) to export (default no export)'

    h_out_avg   = 'filename to store temporal average in <template>/stats/<OUT_AVG> (default: long.<stats>.<meas>-avg.dat)'
    h_out_rate  = 'filename to store rate in <template>/stats/<OUT_RATE> (default: long.<stats>.<meas>-rate.dat)'
    h_out_pc1   = 'filename to store pct. change (to tp1) in <template>/stats/<OUT_PC1> (default: long.<stats>.<meas>-pc1.dat)'
    h_out_pc1fit= 'filename to store pct. change (to tp1fit) in <template>/stats/<OUT_PC1FIT> (default: long.<stats>.<meas>-pc1fit.dat)'
    h_out_spc   = 'filename to store sym. pct. change in <template>/stats/<OUT_SPC> (default: long.<stats>.<meas>-spc.dat)'
    h_out_resid = 'filename to store residual in <template>/stats/<OUT_RESID> (default: long.<stats>.<meas>-resid<resid>.dat), requires --resid <int> '
    h_outstack  = 'filename to store stacked measure file <template>/stats/<OUT_STACK> (default: long.<stats>.<meas>-stack.dat)'

    h_time      = 'variable name for time column variable (e.g. age) in qdec table'    
    h_generictime = 'time points are ordered in qdec file, assume time=1,2,3...'    
    
    h_stack_avg = 'full filename to stack temporal average tables (default no stacking)'
    h_stack_rate= 'full filename to stack rate tables (default no stacking)'
    h_stack_pc1 = 'full filename to stack pct. change to tp1 tables (default no stacking)'
    h_stack_pc1fit = 'full filename to stack pct. change to tp1fit tables (default no stacking)'
    h_stack_spc = 'full filename to stack sym. pct. tables (default no stacking)'
    h_stack_resid = 'full filename to stack residual tables (default no stacking)'
    
    h_cross     = 'use cross sectional results (for testing only)'
    # Add options 

    # Sepcify inputs
    group = optparse.OptionGroup(parser, "Required Arguments")
    group.add_option('--qdec', dest='qdec', help=h_qdec)
    group.add_option('--stats', dest='stats', help=h_stats)
    group.add_option('--meas', dest='meas', help=h_meas)
    group.add_option('--sd',   dest='sd'  , help=h_sd)
    parser.add_option_group(group)

    # do computations:
    group = optparse.OptionGroup(parser, "Computations", "Select one or more of the following options:")
    group.add_option('--do-avg'  , action='store_true', dest='do_avg'  , help=h_doavg  , default=False)
    group.add_option('--do-rate' , action='store_true', dest='do_rate' , help=h_dorate , default=False)
    group.add_option('--do-pc1fit'  , action='store_true', dest='do_pc1fit'  , help=h_dopc1fit  , default=False)
    group.add_option('--do-pc1'  , action='store_true', dest='do_pc1'  , help=h_dopc1  , default=False)
    group.add_option('--do-spc'  , action='store_true', dest='do_spc'  , help=h_dospc  , default=False)
    group.add_option('--do-stack', action='store_true', dest='do_stack', help=h_dostack, default=False)
    group.add_option('--resid', dest='resid', type="int", help=h_resid, default=-1)
    parser.add_option_group(group)
    
    # parameters:
    group = optparse.OptionGroup(parser, "Parameters")
    group.add_option('--time', dest='time', help=h_time)
    group.add_option('--generic-time', action='store_true', dest='generic_time', help=h_generictime, default=False)
    group.add_option('--cross', action='store_true', dest='cross', help=h_cross, default=False)
    parser.add_option_group(group)
    
    # overwrite default output names:
    group = optparse.OptionGroup(parser, "Within-Subject Output","Select these to overwrite the default names:")
    group.add_option('--out-avg'  , dest='out_avg'  , help=h_out_avg)
    group.add_option('--out-rate' , dest='out_rate' , help=h_out_rate)
    group.add_option('--out-pc1fit'  , dest='out_pc1fit'  , help=h_out_pc1fit)
    group.add_option('--out-pc1'  , dest='out_pc1'  , help=h_out_pc1)
    group.add_option('--out-spc'  , dest='out_spc'  , help=h_out_spc)
    group.add_option('--out-resid'  , dest='out_resid'  , help=h_out_resid)
    group.add_option('--out-stack', dest='out_stack', help=h_outstack)
    parser.add_option_group(group)
    
    group = optparse.OptionGroup(parser, "Stacked Tables","To output tables with results from all subjects")
    group.add_option('--stack-avg'  , dest='stack_avg'  , help=h_stack_avg)
    group.add_option('--stack-rate' , dest='stack_rate' , help=h_stack_rate)
    group.add_option('--stack-pc1fit'  , dest='stack_pc1fit'  , help=h_stack_pc1fit)
    group.add_option('--stack-pc1'  , dest='stack_pc1'  , help=h_stack_pc1)
    group.add_option('--stack-spc'  , dest='stack_spc'  , help=h_stack_spc)
    group.add_option('--stack-resid'  , dest='stack_resid'  , help=h_stack_resid)
    parser.add_option_group(group)

                      
    (options, args) = parser.parse_args()
    
    # extensive error checks
    if options.qdec is None:
        parser.print_help()
        print '\nERROR: Specify --qedc (and other required arguments)\n'
        sys.exit(1)

        
    if options.stats is None:
        print 'ERROR: Specify --stats (e.g. \'aseg.stats\')\n'
        sys.exit(1)

    if options.meas is None:
        print 'ERROR: Specify --meas (e.g. \'volume\')\n'
        sys.exit(1)

    if options.sd is None:
        print 'ERROR: Specify the subject dir with --sd <fullpath>\n'
        sys.exit(1)   
    
 
    crosslong = 'long.'
    if options.cross:
        crosslong = 'cross.'
 
    if options.out_avg is None:
        options.out_avg = crosslong+options.stats+'.'+options.meas+'-avg.dat'
    else: 
        options.do_avg = True
    if options.out_rate is None:
        options.out_rate = crosslong+options.stats+'.'+options.meas+'-rate.dat'
    else: 
        options.do_rate = True
    if options.out_pc1fit is None:
        options.out_pc1fit = crosslong+options.stats+'.'+options.meas+'-pc1fit.dat'
    else:
        options.do_pc1fit = True
    if options.out_pc1 is None:
        options.out_pc1 = crosslong+options.stats+'.'+options.meas+'-pc1.dat'
    else:
        options.do_pc1 = True
    if options.out_spc is None:
        options.out_spc = crosslong+options.stats+'.'+options.meas+'-spc.dat'
    else:
        options.do_spc = True
    if options.out_resid is None:
        options.out_resid = crosslong+options.stats+'.'+options.meas+'-resid'+str(options.resid)+'.dat'
    else:
        if options.resid < 1:
            print 'ERROR: Please also specify the tp number to export residuals (starting at 1) with --resid !\n'
            sys.exit(1)
                
    if options.out_stack is None:
        options.out_stack = crosslong+options.stats+'.'+options.meas+'-stack.dat'
    else:
        options.do_stack = True
    
    do_something = options.do_avg or options.do_rate or options.do_pc1fit or options.do_pc1 or options.do_spc or (options.resid > 0)
    if not do_something:
        print 'ERROR: Analysis type should be specified, use one or more of --do-avg, --do-rate, --do-pc1fit, --do-pc1, --do-spc, --resid <tpid>\n'
        sys.exit(1)
                
    return options


def run_cmd(cmd,err_msg):
    """
    execute the comand
    """
    print cmd+'\n'
    args = shlex.split(cmd)
    retcode = subprocess.call(args)
    if retcode != 0 :
        print 'ERROR: '+err_msg
        sys.exit(1)
    print '\n'

def create_template_table(intable,measure,rows,outtable):
    """
    read columns from intable and switch measure and row headers
    write to outtable
    assumes white space for separator
    """
    if not os.path.exists(intable):
        print 'ERROR: '+str(intable)+' not found!'
        sys.exit(1)

    file = open(intable,'r')
    first = file.readline()    
    file.close() 
    cols = first.split(" ",1)
    if len(cols) != 2:
        print 'ERROR: table columns cannot be split into measure and headers?'
        sys.exit(1)
        
    cols[0] = 'Measure:'+measure
    file = open(outtable,'w')
    file.write(cols[0]+' '+cols[1])
    for r in rows:
        file.write(r+'\n')
    file.close()
    

if __name__=="__main__":
    # Command Line options and error checking done here
    options = options_parse()
    slopelogger.debug('-- The options you entered --')
    slopelogger.debug(options) 

    subjectsdir = ''
    # Parse the stats files 
    print 'Parsing the qdec table: '+options.qdec
    try:
        slopelogger.debug('Processing file ' + options.qdec)
        qdectable = LongQdecTable(options.qdec)
        #subjects_tp_map, variables, subjectdir = qdecparse.parse()
    except BadFileError, e:
        print 'ERROR: qdec table '+str(e)+' not found!'
        sys.exit(1)
        
    # make sure we have a long table containing the bases
    if qdectable.cross:
        print '\nERROR: qdec table '+options.qdec+' is cross sectional\n       (2nd column not \'fsid-base\')!\n'
        sys.exit(1)
        
    # use the first column by default for time variable
    varidx = 1
    
    # get other variables:
    variables=qdectable.variables
    if len(variables) < 1:
        varidx = -1
        if not options.generic_time:
            print '\nERROR: qdec table '+options.qdec+' needs 3rd column with time value,'
            print '       e.g. age or time since baseline scan ...!\n'
            print '       Or pass --generic-time assuming tps are ordered 1,2,3...\n'
            sys.exit(1)        
        
    # if time variable is passed on command line, make sure it is part of variables
    if not options.time is None:
        defaultvar = options.time
        # compute correct index (starting with 1, 0 is the tpID)
        foundidx = False
        for index in (i for i in xrange(len(variables)) if variables[i].upper()==defaultvar.upper()):
            varidx = index
            foundidx = True
            #print 'found: '+str(varidx)+' '+variables[varidx]
            break
        if not foundidx:
            print '\nERROR: DefaultVariable \''+str(defaultvar)+'\' not found in variables: '+str(variables)+'\n'
            sys.exit(1)
        varidx = varidx +1;
        
    # maybe later check if time column is really a float?

    # if env is set, overwrite info from file (if it was passed in qdec)
    subjectsdir = options.sd
    if subjectsdir is None:
        subjectsdir = os.getenv('SUBJECTS_DIR')
    if subjectsdir is None:
        subjectsdir = qdectable.subjectsdir
    if subjectsdir is None:
        print'\nERROR: no subjects dir specified, use --sd <fullpath>'
        sys.exit(1)
    print '\nWorking in SUBJECTS_DIR: '+subjectsdir+'\n'
    os.environ['SUBJECTS_DIR'] =  subjectsdir
    
    # process
    retcode = 0
    allavg = []
    allspc = []
    allpc1 = []
    allpc1fit = []
    allrate= []
    allresid= []
    for subjectid, tplist in qdectable.subjects_tp_map.items():
        print '\nSubject-Template: '+subjectid
        
        # check if basedir exists
        basedir = os.path.join(subjectsdir,subjectid)
        if not os.path.exists(basedir):
            print 'ERROR: Template dir '+str(basedir)+' does not exist!'
            sys.exit(1)

        basestatsdir=os.path.join(basedir,'stats')            
        
        # check if 2 or more time points
        if len(tplist) < 2 :
            print 'ERROR: '+str(basedir)+' must have at least 2 time points!'
            sys.exit(1)
        
        # create tmpdir:
        prefix = './tmp-'+subjectid+'_'+options.stats+'_'+options.meas+'_'
        dirname = tempfile.mkdtemp('',prefix,'')
        if not os.path.exists(dirname):
            print 'ERROR: tmp dir '+str(dirname)+' cannot be created (write permissions?)!'
            sys.exit(1)
        
        # extract ids and age data:
        if options.cross:
            tpids = [entry[0] for entry in tplist]
        else:
            tpids = [entry[0]+'.long.'+subjectid for entry in tplist]
        if varidx > 0:
            times = [float(entry[varidx]) for entry in tplist]
        else:
            times = [float(x) for x in range(1,len(tplist)+1)]
        num   = len(times)
        meant = sum(times) / num
        print '\n\nINFO: '+str(num)+' TPs in '+subjectid+' , mean age: '+str(meant)+'\n'
        
        # list of subjects:
        all = " ".join(tpids)
        
        # create time series stats table
        meas_target = os.path.join(dirname,options.out_stack)
        if options.do_stack:
            meas_target = os.path.join(basestatsdir,options.out_stack)
        stats = options.stats
        prog  = 'asegstats2table --common-segs --stats '+stats
        if options.stats[0:3] == 'lh.' or options.stats[0:3]=='rh.':
            stats = options.stats[3:]
            if stats[-6:] == '.stats':
               stats=stats[0:-6]
            prog = 'aparcstats2table --common-parcs --hemi '+options.stats[0:2]+' --parc '+stats
        cmd = prog+' --subjects '+all+' --meas '+options.meas+' --tablefile '+ meas_target+' -d space'
        run_cmd(cmd,prog+' stacking did not work?')        

        #write X-matrix (times):
        x_target    = os.path.join(dirname,'X-long.mat')
        print 'Writing '+x_target+' ...\n'
        if os.path.exists(x_target):
            os.remove(x_target)
        xfp = open(x_target, 'w')
        for time in times:
            xfp.write('1 '+str(time-times[0])+'\n')
#            xfp.write('1 '+str(time-meant)+'\n')
        xfp.close()    

        # run glm in tmp dir:
        zerodof=""
        if num==2:
            zerodof=" --allow-zero-dof "
        saveeres=""
        if options.resid > 0:
            saveeres=" --eres-save "

        glmdir=os.path.join(dirname,'glm')
        cmd = 'mri_glmfit --table '+meas_target+' --X '+x_target+zerodof+saveeres+' --no-contrasts-ok --glmdir '+glmdir
        run_cmd(cmd,'mri_glmfit did not work?')

        # harvest results (in beta.mgh)
        betafn = os.path.join(glmdir,'beta.mgh' )  
        beta0  = os.path.join(dirname,'beta0.mgh')  
        beta1  = os.path.join(dirname,'beta1.mgh') 
        betam  = os.path.join(dirname,'betam.mgh') 
        if not os.path.exists(betafn):
            print 'ERROR: GLM results '+str(betafn)+' does not exist!'
            sys.exit(1)
            
        # split beta
        cmd = 'mri_convert --frame 0 '+betafn+' '+beta0 
        run_cmd(cmd,'mri_convert split frames 0 did not work?')
        
        cmd = 'mri_convert --frame 1 '+betafn+' '+beta1 
        run_cmd(cmd,'mri_convert split frames 1 did not work?')
        
        # evaluate fit at mean age: betam = beta0 + (meanage-baseage) beta1
        cmd = 'mris_calc -o '+betam+' '+beta1+' mul '+str(meant-times[0])
        run_cmd(cmd,'mris_calc compute betam (mul) problem?')    
        cmd = 'mris_calc -o '+betam+' '+betam+' add '+beta0
        run_cmd(cmd,'mris_calc compute betam (add) problem?')    
                
        # create ouput (depending on type)
        outex=''
        
        if options.resid > 0:
            # output residual in specific time point
            eres =  os.path.join(glmdir,'eres.mgh' )
            outtmp = os.path.join(dirname,options.out_resid+'.mgh')
            cmd = 'mri_convert --frame '+str(options.resid - 1)+' '+eres+' '+outtmp 
            run_cmd(cmd,'mri_convert split residual frames '+str(options.resid)+' did not work?')      
            outname = os.path.join(basestatsdir,options.out_resid)
            outex = outname
            allresid.append(outname)
            template = os.path.join(dirname,'template.'+options.out_resid+'.dat')
            create_template_table(meas_target,options.meas+'-resid'+str(options.resid),[ subjectid ], template)
            cmd ='mri_convert --out_stats_table --like '+template+' '+outtmp+' '+outname
            run_cmd(cmd,'converting resid '+str(options.resid)+' to statstable failed?') 
                
        if options.do_pc1fit:
            # compute pct change wrt slope evaluated at baseline:
            outtmp = os.path.join(dirname,options.out_pc1fit+'.mgh')
            cmd = 'mris_calc -o '+outtmp+' '+beta1+' div '+beta0
            run_cmd(cmd,'mris_calc compute pct. change (pc1fit) problem (div)?')    
            cmd = 'mris_calc -o '+outtmp+' '+outtmp+' mul 100'
            run_cmd(cmd,'mris_calc compute pct. change (pc1fit) problem (mul)?')    
            outname = os.path.join(basestatsdir,options.out_pc1fit)
            outex = outname
            allpc1fit.append(outname)
            template = os.path.join(dirname,'template.'+options.out_pc1fit+'.dat')
            create_template_table(meas_target,options.meas+'-pc1fit',[ subjectid ], template)
            cmd ='mri_convert --out_stats_table --like '+template+' '+outtmp+' '+outname
            run_cmd(cmd,'converting pc1fit to statstable failed?') 

        if options.do_spc:
            # compute symmetrized pct change:
            outtmp = os.path.join(dirname,options.out_spc+'.mgh')
            cmd = 'mris_calc -o '+outtmp+' '+beta1+' div '+betam
            run_cmd(cmd,'mris_calc compute sym. pct. change (spc) problem (div)?')    
            cmd = 'mris_calc -o '+outtmp+' '+outtmp+' mul 100'
            run_cmd(cmd,'mris_calc compute sym. pct. change (spc) problem (mul)?')    
            outname = os.path.join(basestatsdir,options.out_spc)
            outex = outname
            allspc.append(outname)
            template = os.path.join(dirname,'template.'+options.out_spc+'.dat')
            create_template_table(meas_target,options.meas+'-spc',[ subjectid ], template)
            cmd ='mri_convert --out_stats_table --like '+template+' '+outtmp+' '+outname
            run_cmd(cmd,'converting spc to statstable failed?') 
                
        if options.do_pc1:
            # create tp1 table and mgh file
            tp1tab = os.path.join(dirname,'tp1.'+options.stats+'.'+options.meas+'.dat')
            cmd = 'head -n 2 '+meas_target
            print cmd
            args = shlex.split(cmd)
            output_f = open(tp1tab, 'w')
            p = subprocess.Popen(args,stdout=output_f)
            retcode = p.wait()
            if retcode != 0 :
                print '\nERROR: head on first 2 lines of time series table did not work? ('+str(retcode)+')\n'
                sys.exit(1)
            output_f.close()
            tp1 = os.path.join(dirname,'tp1.'+options.stats+'.'+options.meas+'.mgh')
            cmd = 'mri_convert --in_stats_table '+tp1tab+' '+tp1
            run_cmd(cmd,'mri_convert tp1 from stats-table to mgh did not work?')
            # compute pct change:
            outtmp = os.path.join(dirname,options.out_pc1+'.mgh')
            cmd = 'mris_calc -o '+outtmp+' '+beta1+' div '+tp1
            run_cmd(cmd,'mris_calc compute percent change (pc1) problem (div)?')
            cmd = 'mris_calc -o '+outtmp+' '+outtmp+' mul 100'
            run_cmd(cmd,'mris_calc compute percent change (pc1) problem (mul)?')
            outname = os.path.join(basestatsdir,options.out_pc1)
            outex = outname
            allpc1.append(outname)
            template = os.path.join(dirname,'template.'+options.out_pc1+'.dat')
            create_template_table(meas_target,options.meas+'-pc1',[ subjectid ], template)
            cmd ='mri_convert --out_stats_table --like '+template+' '+outtmp+' '+outname
            run_cmd(cmd,'converting spc to statstable failed?') 
        
        if options.do_rate: 
            outname = os.path.join(basestatsdir,options.out_rate)
            outex = outname
            allrate.append(outname)
            template = os.path.join(dirname,'template.'+options.out_rate+'.dat')
            create_template_table(meas_target,options.meas+'-rate',[ subjectid ], template)
            cmd ='mri_convert --out_stats_table --like '+template+' '+beta1+' '+outname
            run_cmd(cmd,'converting rate to statstable failed?') 
        
        if  options.do_avg:
            outname = os.path.join(basestatsdir,options.out_avg)
            outex = outname
            allavg.append(outname)
            template = os.path.join(dirname,'template.'+options.out_avg+'.dat')
            create_template_table(meas_target,options.meas+'-avg',[ subjectid ], template)
            cmd ='mri_convert --out_stats_table --like '+template+' '+betam+' '+outname
            run_cmd(cmd,'converting avg to statstable failed?') 
    
           
        # cleanup tmp dir:    
        shutil.rmtree(dirname)
        
               
        print 'You can look at the result with, e.g. (specify "Space" for separation and "Merge delimiters"):'
        print '  ooffice -calc '+os.path.join(basestatsdir,outex)
        print
          
    
    if options.stack_avg is not None:
        # out table: long.all.'+options.stats+'.'+options.meas+'-avg.dat
        cmd = 'merge_stats_tables --inputs '+" ".join(allavg)+' -t '+options.stack_avg+' --meas '+options.meas+'-avg'+" --all-segs"
        run_cmd(cmd,'merge_stats_tables failed (avg)?') 

    if options.stack_rate is not None:
        cmd = 'merge_stats_tables --inputs '+" ".join(allrate)+' -t '+options.stack_rate+' --meas '+options.meas+'-rate'+" --all-segs"
        run_cmd(cmd,'merge_stats_tables failed (rate)?') 

    if options.stack_pc1 is not None:
        cmd = 'merge_stats_tables --inputs '+" ".join(allpc1)+' -t '+options.stack_pc1+' --meas '+options.meas+'-pc1'+" --all-segs"
        run_cmd(cmd,'merge_stats_tables failed (pc1)?') 

    if options.stack_pc1fit is not None:
        cmd = 'merge_stats_tables --inputs '+" ".join(allpc1fit)+' -t '+options.stack_pc1fit+' --meas '+options.meas+'-pc1fit'+" --all-segs"
        run_cmd(cmd,'merge_stats_tables failed (pc1fit)?') 

    if options.stack_spc is not None:
        cmd = 'merge_stats_tables --inputs '+" ".join(allspc)+' -t '+options.stack_spc+' --meas '+options.meas+'-spc'+" --all-segs"
        run_cmd(cmd,'merge_stats_tables failed (spc)?') 

    if options.stack_resid is not None:
        cmd = 'merge_stats_tables --inputs '+" ".join(allresid)+' -t '+options.stack_resid+' --meas '+options.meas+'-resid'+str(options.resid)+" --all-segs"
        run_cmd(cmd,'merge_stats_tables failed (resid)?') 
    
    #print 'merge_stats_tables --inputs '+" ".join(allavg)+' -t long.all.'+options.stats+'.'+options.meas+'-avg.dat --meas '+options.meas+'-avg'
       
    # always exit with 0 exit code
    sys.exit(0)