@@ -330,7 +330,7 @@ class CompCorOutputSpec(TraitedSpec):
330330 desc = 'text file containing the noise components' )
331331
332332class CompCor (BaseInterface ):
333- '''
333+ """
334334 Interface with core CompCor computation, used in aCompCor and tCompCor
335335
336336 Example
@@ -342,7 +342,8 @@ class CompCor(BaseInterface):
342342 >>> ccinterface.inputs.num_components = 1
343343 >>> ccinterface.inputs.use_regress_poly = True
344344 >>> ccinterface.inputs.regress_poly_degree = 2
345- '''
345+
346+ """
346347 input_spec = CompCorInputSpec
347348 output_spec = CompCorOutputSpec
348349 references_ = [{'entry' : BibTeX ("@article{compcor_2007,"
@@ -465,8 +466,11 @@ def _make_headers(self, num_col):
465466
466467
467468class ACompCor (CompCor ):
468- ''' Anatomical compcor; for input/output, see CompCor.
469- If the mask provided is an anatomical mask, CompCor == ACompCor '''
469+ """
470+ Anatomical compcor: for inputs and outputs, see CompCor.
471+ When the mask provided is an anatomical mask, then CompCor
472+ is equivalent to ACompCor.
473+ """
470474
471475 def __init__ (self , * args , ** kwargs ):
472476 ''' exactly the same as compcor except the header '''
@@ -492,7 +496,7 @@ class TCompCorOutputSpec(CompCorInputSpec):
492496 desc = "voxels excedding the variance threshold" ))
493497
494498class TCompCor (CompCor ):
495- '''
499+ """
496500 Interface for tCompCor. Computes a ROI mask based on variance of voxels.
497501
498502 Example
@@ -505,7 +509,8 @@ class TCompCor(CompCor):
505509 >>> ccinterface.inputs.use_regress_poly = True
506510 >>> ccinterface.inputs.regress_poly_degree = 2
507511 >>> ccinterface.inputs.percentile_threshold = .03
508- '''
512+
513+ """
509514
510515 input_spec = TCompCorInputSpec
511516 output_spec = TCompCorOutputSpec
@@ -634,7 +639,8 @@ class TSNROutputSpec(TraitedSpec):
634639
635640
636641class TSNR (BaseInterface ):
637- """Computes the time-course SNR for a time series
642+ """
643+ Computes the time-course SNR for a time series
638644
639645 Typically you want to run this on a realigned time-series.
640646
@@ -719,80 +725,6 @@ def _run_interface(self, runtime):
719725 def _list_outputs (self ):
720726 return self ._results
721727
722- def is_outlier (points , thresh = 3.5 ):
723- """
724- Returns a boolean array with True if points are outliers and False
725- otherwise.
726-
727- Parameters:
728- -----------
729- points : An numobservations by numdimensions array of observations
730- thresh : The modified z-score to use as a threshold. Observations with
731- a modified z-score (based on the median absolute deviation) greater
732- than this value will be classified as outliers.
733-
734- Returns:
735- --------
736- mask : A numobservations-length boolean array.
737-
738- References:
739- ----------
740- Boris Iglewicz and David Hoaglin (1993), "Volume 16: How to Detect and
741- Handle Outliers", The ASQC Basic References in Quality Control:
742- Statistical Techniques, Edward F. Mykytka, Ph.D., Editor.
743- """
744- if len (points .shape ) == 1 :
745- points = points [:, None ]
746- median = np .median (points , axis = 0 )
747- diff = np .sum ((points - median ) ** 2 , axis = - 1 )
748- diff = np .sqrt (diff )
749- med_abs_deviation = np .median (diff )
750-
751- modified_z_score = 0.6745 * diff / med_abs_deviation
752-
753- timepoints_to_discard = 0
754- for i in range (len (modified_z_score )):
755- if modified_z_score [i ] <= thresh :
756- break
757- else :
758- timepoints_to_discard += 1
759-
760- return timepoints_to_discard
761-
762-
763- def regress_poly (degree , data , remove_mean = True , axis = - 1 ):
764- ''' returns data with degree polynomial regressed out.
765- Be default it is calculated along the last axis (usu. time).
766- If remove_mean is True (default), the data is demeaned (i.e. degree 0).
767- If remove_mean is false, the data is not.
768- '''
769- IFLOG .debug ('Performing polynomial regression on data of shape ' + str (data .shape ))
770-
771- datashape = data .shape
772- timepoints = datashape [axis ]
773-
774- # Rearrange all voxel-wise time-series in rows
775- data = data .reshape ((- 1 , timepoints ))
776-
777- # Generate design matrix
778- X = np .ones ((timepoints , 1 )) # quick way to calc degree 0
779- for i in range (degree ):
780- polynomial_func = Legendre .basis (i + 1 )
781- value_array = np .linspace (- 1 , 1 , timepoints )
782- X = np .hstack ((X , polynomial_func (value_array )[:, np .newaxis ]))
783-
784- # Calculate coefficients
785- betas = np .linalg .pinv (X ).dot (data .T )
786-
787- # Estimation
788- if remove_mean :
789- datahat = X .dot (betas ).T
790- else : # disregard the first layer of X, which is degree 0
791- datahat = X [:, 1 :].dot (betas [1 :, ...]).T
792- regressed_data = data - datahat
793-
794- # Back to original shape
795- return regressed_data .reshape (datashape )
796728
797729def compute_dvars (in_file , in_mask , remove_zerovariance = False ,
798730 intensity_normalization = 1000 ):
@@ -921,3 +853,78 @@ def plot_confound(tseries, figsize, name, units=None,
921853 ax .set_ylim (ylim )
922854 ax .set_yticklabels ([])
923855 return fig
856+
857+ def is_outlier (points , thresh = 3.5 ):
858+ """
859+ Returns a boolean array with True if points are outliers and False
860+ otherwise.
861+
862+ :param nparray points: an numobservations by numdimensions numpy array of observations
863+ :param float thresh: the modified z-score to use as a threshold. Observations with
864+ a modified z-score (based on the median absolute deviation) greater
865+ than this value will be classified as outliers.
866+
867+ :return: A bolean mask, of size numobservations-length array.
868+
869+ .. note:: References
870+
871+ Boris Iglewicz and David Hoaglin (1993), "Volume 16: How to Detect and
872+ Handle Outliers", The ASQC Basic References in Quality Control:
873+ Statistical Techniques, Edward F. Mykytka, Ph.D., Editor.
874+
875+ """
876+ if len (points .shape ) == 1 :
877+ points = points [:, None ]
878+ median = np .median (points , axis = 0 )
879+ diff = np .sum ((points - median ) ** 2 , axis = - 1 )
880+ diff = np .sqrt (diff )
881+ med_abs_deviation = np .median (diff )
882+
883+ modified_z_score = 0.6745 * diff / med_abs_deviation
884+
885+ timepoints_to_discard = 0
886+ for i in range (len (modified_z_score )):
887+ if modified_z_score [i ] <= thresh :
888+ break
889+ else :
890+ timepoints_to_discard += 1
891+
892+ return timepoints_to_discard
893+
894+
895+ def regress_poly (degree , data , remove_mean = True , axis = - 1 ):
896+ """
897+ Returns data with degree polynomial regressed out.
898+
899+ :param bool remove_mean: whether or not demean data (i.e. degree 0),
900+ :param int axis: numpy array axes along which regression is performed
901+
902+ """
903+ IFLOG .debug ('Performing polynomial regression on data of shape ' + str (data .shape ))
904+
905+ datashape = data .shape
906+ timepoints = datashape [axis ]
907+
908+ # Rearrange all voxel-wise time-series in rows
909+ data = data .reshape ((- 1 , timepoints ))
910+
911+ # Generate design matrix
912+ X = np .ones ((timepoints , 1 )) # quick way to calc degree 0
913+ for i in range (degree ):
914+ polynomial_func = Legendre .basis (i + 1 )
915+ value_array = np .linspace (- 1 , 1 , timepoints )
916+ X = np .hstack ((X , polynomial_func (value_array )[:, np .newaxis ]))
917+
918+ # Calculate coefficients
919+ betas = np .linalg .pinv (X ).dot (data .T )
920+
921+ # Estimation
922+ if remove_mean :
923+ datahat = X .dot (betas ).T
924+ else : # disregard the first layer of X, which is degree 0
925+ datahat = X [:, 1 :].dot (betas [1 :, ...]).T
926+ regressed_data = data - datahat
927+
928+ # Back to original shape
929+ return regressed_data .reshape (datashape )
930+
0 commit comments