#!/bin/tcsh -xef echo "auto-generated by afni_proc.py, Thu Feb 2 10:52:53 2023" echo "(version 7.50, December 16, 2022)" echo "execution started: `date`" # to execute via tcsh: # tcsh -xef proc.SM327_all4runs |& tee output.proc.SM327_all4runs # to execute via bash: # tcsh -xef proc.SM327_all4runs 2>&1 | tee output.proc.SM327_all4runs # =========================== auto block: setup ============================ # script setup # take note of the AFNI version afni -ver # check that the current AFNI version is recent enough afni_history -check_date 14 Nov 2022 if ( $status ) then echo "** this script requires newer AFNI binaries (than 14 Nov 2022)" echo " (consider: @update.afni.binaries -defaults)" exit endif # will run external programs, so be sure they are in PATH which tedana if ( $status ) then echo '** missing required external program: tedana' echo ' (perhaps a conda environment is needed)' exit 1 endif # the user may specify a single subject to run with if ( $#argv > 0 ) then set subj = $argv[1] else set subj = SM327_all4runs endif # assign output directory name set output_dir = $subj.results # verify that the results directory does not yet exist if ( -d $output_dir ) then echo output dir "$subj.results" already exists exit endif # set list of runs set runs = (`count -digits 2 1 1`) # note 12 echoes and registration echo index set echo_list = (`count -digits 2 1 12`) set echo_times = ( 12.0 31.7 51.4 ) set fave_echo = '02' # create results and stimuli directories mkdir -p $output_dir mkdir $output_dir/stimuli # copy stim files into stimulus directory cp ms327_faces_allruns.txt $output_dir/stimuli # copy anatomy to results dir 3dcopy anatSS.T1W.nii $output_dir/anatSS.T1W # copy template to results dir (for QC) 3dcopy /home/taliw/abin/MNI152_2009_template_SSW.nii.gz \ $output_dir/MNI152_2009_template_SSW.nii.gz # copy external -tlrc_NL_warped_dsets datasets 3dcopy anatQQ.T1W.nii $output_dir/anatQQ.T1W 3dcopy anatQQ.T1W.aff12.1D $output_dir/anatQQ.T1W.aff12.1D 3dcopy anatQQ.T1W_WARP.nii $output_dir/anatQQ.T1W_WARP.nii # ============================ auto block: tcat ============================ # apply 3dTcat to copy input dsets to results dir, # while removing the first 0 TRs # EPI runs for echo 1 (registration follower) 3dTcat -prefix $output_dir/pb00.$subj.r01.e01.tcat \ epi_run1_face_echo_001+orig'[0..$]' # EPI runs for echo 2 (fave_echo = registration driver) 3dTcat -prefix $output_dir/pb00.$subj.r01.e02.tcat \ epi_run1_face_echo_002+orig'[0..$]' # EPI runs for echo 3 (registration follower) 3dTcat -prefix $output_dir/pb00.$subj.r01.e03.tcat \ epi_run1_face_echo_003+orig'[0..$]' # EPI runs for echo 4 (registration follower) 3dTcat -prefix $output_dir/pb00.$subj.r01.e04.tcat \ epi_run2_face_echo_001+orig'[0..$]' # EPI runs for echo 5 (registration follower) 3dTcat -prefix $output_dir/pb00.$subj.r01.e05.tcat \ epi_run2_face_echo_002+orig'[0..$]' # EPI runs for echo 6 (registration follower) 3dTcat -prefix $output_dir/pb00.$subj.r01.e06.tcat \ epi_run2_face_echo_003+orig'[0..$]' # EPI runs for echo 7 (registration follower) 3dTcat -prefix $output_dir/pb00.$subj.r01.e07.tcat \ epi_run3_face_echo_001+orig'[0..$]' # EPI runs for echo 8 (registration follower) 3dTcat -prefix $output_dir/pb00.$subj.r01.e08.tcat \ epi_run3_face_echo_002+orig'[0..$]' # EPI runs for echo 9 (registration follower) 3dTcat -prefix $output_dir/pb00.$subj.r01.e09.tcat \ epi_run3_face_echo_003+orig'[0..$]' # EPI runs for echo 10 (registration follower) 3dTcat -prefix $output_dir/pb00.$subj.r01.e10.tcat \ epi_run4_face_echo_001+orig'[0..$]' # EPI runs for echo 11 (registration follower) 3dTcat -prefix $output_dir/pb00.$subj.r01.e11.tcat \ epi_run4_face_echo_002+orig'[0..$]' # EPI runs for echo 12 (registration follower) 3dTcat -prefix $output_dir/pb00.$subj.r01.e12.tcat \ epi_run4_face_echo_003+orig'[0..$]' # and make note of repetitions (TRs) per run set tr_counts = ( 466 ) # ------------------------------------------------------- # enter the results directory (can begin processing data) cd $output_dir # --------------------------------------------------------- # QC: compute correlations with spherical ~averages @radial_correlate -nfirst 0 -polort 4 -do_clean yes \ -rdir radcor.pb00.tcat \ pb00.$subj.r*.e$fave_echo.tcat+orig.HEAD # --------------------------------------------------------- # QC: look for columns of high variance find_variance_lines.tcsh -polort 4 -nerode 2 \ -rdir vlines.pb00.tcat \ pb00.$subj.r*.e$fave_echo.tcat+orig.HEAD |& tee \ out.vlines.pb00.tcat.txt # ========================== auto block: outcount ========================== # QC: compute outlier fraction for each volume touch out.pre_ss_warn.txt foreach run ( $runs ) 3dToutcount -automask -fraction -polort 4 -legendre \ pb00.$subj.r$run.e$fave_echo.tcat+orig > outcount.r$run.1D # censor outlier TRs per run, ignoring the first 0 TRs # - censor when more than 0.05 of automask voxels are outliers # - step() defines which TRs to remove via censoring 1deval -a outcount.r$run.1D -expr "1-step(a-0.05)" > rm.out.cen.r$run.1D # outliers at TR 0 might suggest pre-steady state TRs if ( `1deval -a outcount.r$run.1D"{0}" -expr "step(a-0.4)"` ) then echo "** TR #0 outliers: possible pre-steady state TRs in run $run" \ >> out.pre_ss_warn.txt endif end # catenate outlier counts into a single time series cat outcount.r*.1D > outcount_rall.1D # catenate outlier censor files into a single time series cat rm.out.cen.r*.1D > outcount_${subj}_censor.1D # get run number and TR index for minimum outlier volume set minindex = `3dTstat -argmin -prefix - outcount_rall.1D\'` set ovals = ( `1d_tool.py -set_run_lengths $tr_counts \ -index_to_run_tr $minindex` ) # save run and TR indices for extraction of vr_base_min_outlier set minoutrun = $ovals[1] set minouttr = $ovals[2] echo "min outlier: run $minoutrun, TR $minouttr" | tee out.min_outlier.txt # ================================ despike ================================= # apply 3dDespike to each run foreach run ( $runs ) foreach eind ( $echo_list ) 3dDespike -NEW -nomask -prefix pb01.$subj.r$run.e$eind.despike \ pb00.$subj.r$run.e$eind.tcat+orig end end # ================================= tshift ================================= # time shift data so all slice timing is the same foreach run ( $runs ) foreach eind ( $echo_list ) 3dTshift -tzero 0 -quintic -prefix pb02.$subj.r$run.e$eind.tshift \ pb01.$subj.r$run.e$eind.despike+orig end end # -------------------------------- # extract volreg registration base 3dbucket -prefix vr_base_min_outlier \ pb02.$subj.r$minoutrun.e$fave_echo.tshift+orig"[$minouttr]" # ================================= align ================================== # for e2a: compute anat alignment transformation to EPI registration base # (new anat will be current anatSS.T1W+orig) align_epi_anat.py -anat2epi -anat anatSS.T1W+orig \ -suffix _al_junk \ -epi vr_base_min_outlier+orig -epi_base 0 \ -epi_strip 3dAutomask \ -anat_has_skull no \ -giant_move \ -volreg off -tshift off # ================================== tlrc ================================== # nothing to do: have external -tlrc_NL_warped_dsets # warped anat : anatQQ.T1W+tlrc # affine xform : anatQQ.T1W.aff12.1D # non-linear warp : anatQQ.T1W_WARP.nii # ================================= volreg ================================= # align each dset to base volume, to anat, warp to tlrc space # verify that we have a +tlrc warp dataset if ( ! -f anatQQ.T1W+tlrc.HEAD ) then echo "** missing +tlrc warp dataset: anatQQ.T1W+tlrc.HEAD" exit endif # register and warp foreach run ( $runs ) # register each volume to the base image # (registration is driven by $fave_echo) 3dvolreg -verbose -zpad 1 -base vr_base_min_outlier+orig \ -1Dfile dfile.r$run.1D -prefix rm.epi.volreg.r$run.e$fave_echo \ -cubic \ -1Dmatrix_save mat.r$run.vr.aff12.1D \ pb02.$subj.r$run.e$fave_echo.tshift+orig # create an all-1 dataset to mask the extents of the warp 3dcalc -overwrite -a pb02.$subj.r$run.e$fave_echo.tshift+orig -expr 1 \ -prefix rm.epi.all1 # catenate volreg/epi2anat/tlrc xforms cat_matvec -ONELINE \ anatQQ.T1W.aff12.1D \ anatSS.T1W_al_junk_mat.aff12.1D -I \ mat.r$run.vr.aff12.1D > mat.r$run.warp.aff12.1D # apply catenated xform: volreg/epi2anat/tlrc/NLtlrc # then apply non-linear standard-space warp # (apply warps per echo - warps are fixed, per run) foreach eind ( $echo_list ) 3dNwarpApply -master anatQQ.T1W+tlrc -dxyz 2.5 \ -source pb02.$subj.r$run.e$eind.tshift+orig \ -nwarp "anatQQ.T1W_WARP.nii mat.r$run.warp.aff12.1D" \ -prefix rm.epi.nomask.r$run.e$eind end # warp the all-1 dataset for extents masking 3dNwarpApply -master anatQQ.T1W+tlrc -dxyz 2.5 \ -source rm.epi.all1+orig \ -nwarp "anatQQ.T1W_WARP.nii mat.r$run.warp.aff12.1D" \ -interp cubic \ -ainterp NN -quiet \ -prefix rm.epi.1.r$run # make an extents intersection mask of this run 3dTstat -min -prefix rm.epi.min.r$run rm.epi.1.r$run+tlrc end # make a single file of registration params cat dfile.r*.1D > dfile_rall.1D # ---------------------------------------- # create the extents mask: mask_epi_extents+tlrc # (this is a mask of voxels that have valid data at every TR) # (only 1 run, so just use 3dcopy to keep naming straight) 3dcopy rm.epi.min.r01+tlrc mask_epi_extents # and apply the extents mask to the EPI data # (delete any time series with missing data) foreach run ( $runs ) foreach eind ( $echo_list ) 3dcalc -a rm.epi.nomask.r$run.e$eind+tlrc -b mask_epi_extents+tlrc \ -expr 'a*b' -prefix pb03.$subj.r$run.e$eind.volreg end end # warp the volreg base EPI dataset to make a final version cat_matvec -ONELINE \ anatQQ.T1W.aff12.1D \ anatSS.T1W_al_junk_mat.aff12.1D -I > mat.basewarp.aff12.1D 3dNwarpApply -master anatQQ.T1W+tlrc -dxyz 2.5 \ -source vr_base_min_outlier+orig \ -nwarp "anatQQ.T1W_WARP.nii mat.basewarp.aff12.1D" \ -prefix final_epi_vr_base_min_outlier # create an anat_final dataset, aligned with stats 3dcopy anatQQ.T1W+tlrc anat_final.$subj # record final registration costs 3dAllineate -base final_epi_vr_base_min_outlier+tlrc -allcostX \ -input anat_final.$subj+tlrc |& tee out.allcostX.txt # --------------------------------------------------------- # QC: compute correlations with spherical ~averages @radial_correlate -nfirst 0 -polort 4 -do_clean yes \ -rdir radcor.pb03.volreg \ pb03.$subj.r*.e$fave_echo.volreg+tlrc.HEAD # ================================== mask ================================== # create 'full_mask' dataset (union mask) foreach run ( $runs ) 3dAutomask -prefix rm.mask_r$run pb03.$subj.r$run.e$fave_echo.volreg+tlrc end # create union of inputs, output type is byte 3dmask_tool -inputs rm.mask_r*+tlrc.HEAD -union -prefix full_mask.$subj # ---- create subject anatomy mask, mask_anat.$subj+tlrc ---- # (resampled from tlrc anat) 3dresample -master full_mask.$subj+tlrc -input anatQQ.T1W+tlrc \ -prefix rm.resam.anat # convert to binary anat mask; fill gaps and holes 3dmask_tool -dilate_input 5 -5 -fill_holes -input rm.resam.anat+tlrc \ -prefix mask_anat.$subj # compute tighter EPI mask by intersecting with anat mask 3dmask_tool -input full_mask.$subj+tlrc mask_anat.$subj+tlrc \ -inter -prefix mask_epi_anat.$subj # compute overlaps between anat and EPI masks 3dABoverlap -no_automask full_mask.$subj+tlrc mask_anat.$subj+tlrc \ |& tee out.mask_ae_overlap.txt # note Dice coefficient of masks, as well 3ddot -dodice full_mask.$subj+tlrc mask_anat.$subj+tlrc \ |& tee out.mask_ae_dice.txt # ---- create group anatomy mask, mask_group+tlrc ---- # (resampled from tlrc base anat, MNI152_2009_template_SSW.nii.gz) 3dresample -master full_mask.$subj+tlrc -prefix ./rm.resam.group \ -input /home/taliw/abin/MNI152_2009_template_SSW.nii.gz'[0]' # convert to binary group mask; fill gaps and holes 3dmask_tool -dilate_input 5 -5 -fill_holes -input rm.resam.group+tlrc \ -prefix mask_group # note Dice coefficient of anat and template masks 3ddot -dodice mask_anat.$subj+tlrc mask_group+tlrc \ |& tee out.mask_at_dice.txt # ================================ combine ================================= # combine multi-echo data per run, using method m_tedana # (tedana from MEICA group) # see also: https://tedana.readthedocs.io # note the version of tedana (only capure stdout) tedana --version | tee out.tedana_version.txt # (get MEICA tedana final result, dn_ts_OC.nii.gz) # first run tedana commands, to see if they all succeed foreach run ( $runs ) tedana -d pb03.$subj.r$run.e*.volreg+tlrc.HEAD \ -e $echo_times \ --mask mask_anat.$subj+tlrc.HEAD \ --out-dir tedana_r$run --convention orig end # now copy the tedana results foreach run ( $runs ) # copy, but get space/view from tedana input 3dcalc -a pb03.$subj.r$run.e$fave_echo.volreg+tlrc \ -b tedana_r$run/dn_ts_OC.nii.gz \ -prefix pb04.$subj.r$run.combine \ -expr b -datum float end # ================================== blur ================================== # blur each volume of each run foreach run ( $runs ) 3dBlurInMask -preserve -FWHM 4 -Mmask mask_anat.$subj+tlrc \ -prefix pb05.$subj.r$run.blur \ pb04.$subj.r$run.combine+tlrc end # ================================= scale ================================== # scale each voxel time series to have a mean of 100 # (be sure no negatives creep in) # (subject to a range of [0,200]) foreach run ( $runs ) 3dTstat -prefix rm.mean_r$run pb05.$subj.r$run.blur+tlrc 3dcalc -a pb05.$subj.r$run.blur+tlrc -b rm.mean_r$run+tlrc \ -c mask_anat.$subj+tlrc \ -expr 'c * min(200, a/b*100)*step(a)*step(b)' \ -prefix pb06.$subj.r$run.scale end # ================================ regress ================================= # compute de-meaned motion parameters (for use in regression) 1d_tool.py -infile dfile_rall.1D -set_nruns 1 \ -demean -write motion_demean.1D # compute motion parameter derivatives (just to have) 1d_tool.py -infile dfile_rall.1D -set_nruns 1 \ -derivative -demean -write motion_deriv.1D # convert motion parameters for per-run regression 1d_tool.py -infile motion_demean.1D -set_nruns 1 \ -split_into_pad_runs mot_demean # create censor file motion_${subj}_censor.1D, for censoring motion 1d_tool.py -infile dfile_rall.1D -set_nruns 1 \ -show_censor_count -censor_prev_TR \ -censor_motion 0.8 motion_${subj} # combine multiple censor files 1deval -a motion_${subj}_censor.1D -b outcount_${subj}_censor.1D \ -expr "a*b" > censor_${subj}_combined_2.1D # note TRs that were not censored set ktrs = `1d_tool.py -infile censor_${subj}_combined_2.1D \ -show_trs_uncensored encoded` # ------------------------------ # run the regression analysis 3dDeconvolve -input pb06.$subj.r*.scale+tlrc.HEAD \ -mask mask_anat.$subj+tlrc \ -censor censor_${subj}_combined_2.1D \ -ortvec mot_demean.r01.1D mot_demean_r01 \ -polort 4 \ -num_stimts 1 \ -stim_times 1 stimuli/ms327_faces_allruns.txt 'BLOCK(3)' \ -stim_label 1 faces \ -gltsym 'SYM: faces' \ -glt_label 1 Faces \ -gltsym 'SYM: -faces' \ -glt_label 2 ITI \ -fout -tout -x1D X.xmat.1D -xjpeg X.jpg \ -x1D_uncensored X.nocensor.xmat.1D \ -errts errts.${subj} \ -bucket stats.$subj # if 3dDeconvolve fails, terminate the script if ( $status != 0 ) then echo '---------------------------------------' echo '** 3dDeconvolve error, failing...' echo ' (consider the file 3dDeconvolve.err)' exit endif # display any large pairwise correlations from the X-matrix 1d_tool.py -show_cormat_warnings -infile X.xmat.1D |& tee out.cormat_warn.txt # display degrees of freedom info from X-matrix 1d_tool.py -show_df_info -infile X.xmat.1D |& tee out.df_info.txt # -- execute the 3dREMLfit script, written by 3dDeconvolve -- tcsh -x stats.REML_cmd # if 3dREMLfit fails, terminate the script if ( $status != 0 ) then echo '---------------------------------------' echo '** 3dREMLfit error, failing...' exit endif # create an all_runs dataset to match the fitts, errts, etc. 3dTcat -prefix all_runs.$subj pb06.$subj.r*.scale+tlrc.HEAD # -------------------------------------------------- # create a temporal signal to noise ratio dataset # signal: if 'scale' block, mean should be 100 # noise : compute standard deviation of errts 3dTstat -mean -prefix rm.signal.all all_runs.$subj+tlrc"[$ktrs]" 3dTstat -stdev -prefix rm.noise.all errts.${subj}_REML+tlrc"[$ktrs]" 3dcalc -a rm.signal.all+tlrc \ -b rm.noise.all+tlrc \ -expr 'a/b' -prefix TSNR.$subj # --------------------------------------------------- # compute and store GCOR (global correlation average) # (sum of squares of global mean of unit errts) 3dTnorm -norm2 -prefix rm.errts.unit errts.${subj}_REML+tlrc 3dmaskave -quiet -mask full_mask.$subj+tlrc rm.errts.unit+tlrc \ > mean.errts.unit.1D 3dTstat -sos -prefix - mean.errts.unit.1D\' > out.gcor.1D echo "-- GCOR = `cat out.gcor.1D`" # --------------------------------------------------- # compute correlation volume # (per voxel: correlation with masked brain average) 3dmaskave -quiet -mask full_mask.$subj+tlrc errts.${subj}_REML+tlrc \ > mean.errts.1D 3dTcorr1D -prefix corr_brain errts.${subj}_REML+tlrc mean.errts.1D # create fitts dataset from all_runs and errts 3dcalc -a all_runs.$subj+tlrc -b errts.${subj}+tlrc -expr a-b \ -prefix fitts.$subj # create fitts from REML errts 3dcalc -a all_runs.$subj+tlrc -b errts.${subj}_REML+tlrc -expr a-b \ -prefix fitts.$subj\_REML # create ideal files for fixed response stim types 1dcat X.nocensor.xmat.1D'[5]' > ideal_faces.1D # -------------------------------------------------- # extract non-baseline regressors from the X-matrix, # then compute their sum 1d_tool.py -infile X.nocensor.xmat.1D -write_xstim X.stim.xmat.1D 3dTstat -sum -prefix sum_ideal.1D X.stim.xmat.1D # ============================ blur estimation ============================= # compute blur estimates touch blur_est.$subj.1D # start with empty file # create directory for ACF curve files mkdir files_ACF # -- estimate blur for each run in epits -- touch blur.epits.1D # restrict to uncensored TRs, per run foreach run ( $runs ) set trs = `1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \ -show_trs_run $run` if ( $trs == "" ) continue 3dFWHMx -detrend -mask mask_anat.$subj+tlrc \ -ACF files_ACF/out.3dFWHMx.ACF.epits.r$run.1D \ all_runs.$subj+tlrc"[$trs]" >> blur.epits.1D end # compute average FWHM blur (from every other row) and append set blurs = ( `3dTstat -mean -prefix - blur.epits.1D'{0..$(2)}'\'` ) echo average epits FWHM blurs: $blurs echo "$blurs # epits FWHM blur estimates" >> blur_est.$subj.1D # compute average ACF blur (from every other row) and append set blurs = ( `3dTstat -mean -prefix - blur.epits.1D'{1..$(2)}'\'` ) echo average epits ACF blurs: $blurs echo "$blurs # epits ACF blur estimates" >> blur_est.$subj.1D # -- estimate blur for each run in errts -- touch blur.errts.1D # restrict to uncensored TRs, per run foreach run ( $runs ) set trs = `1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \ -show_trs_run $run` if ( $trs == "" ) continue 3dFWHMx -detrend -mask mask_anat.$subj+tlrc \ -ACF files_ACF/out.3dFWHMx.ACF.errts.r$run.1D \ errts.${subj}+tlrc"[$trs]" >> blur.errts.1D end # compute average FWHM blur (from every other row) and append set blurs = ( `3dTstat -mean -prefix - blur.errts.1D'{0..$(2)}'\'` ) echo average errts FWHM blurs: $blurs echo "$blurs # errts FWHM blur estimates" >> blur_est.$subj.1D # compute average ACF blur (from every other row) and append set blurs = ( `3dTstat -mean -prefix - blur.errts.1D'{1..$(2)}'\'` ) echo average errts ACF blurs: $blurs echo "$blurs # errts ACF blur estimates" >> blur_est.$subj.1D # -- estimate blur for each run in err_reml -- touch blur.err_reml.1D # restrict to uncensored TRs, per run foreach run ( $runs ) set trs = `1d_tool.py -infile X.xmat.1D -show_trs_uncensored encoded \ -show_trs_run $run` if ( $trs == "" ) continue 3dFWHMx -detrend -mask mask_anat.$subj+tlrc \ -ACF files_ACF/out.3dFWHMx.ACF.err_reml.r$run.1D \ errts.${subj}_REML+tlrc"[$trs]" >> blur.err_reml.1D end # compute average FWHM blur (from every other row) and append set blurs = ( `3dTstat -mean -prefix - blur.err_reml.1D'{0..$(2)}'\'` ) echo average err_reml FWHM blurs: $blurs echo "$blurs # err_reml FWHM blur estimates" >> blur_est.$subj.1D # compute average ACF blur (from every other row) and append set blurs = ( `3dTstat -mean -prefix - blur.err_reml.1D'{1..$(2)}'\'` ) echo average err_reml ACF blurs: $blurs echo "$blurs # err_reml ACF blur estimates" >> blur_est.$subj.1D # add 3dClustSim results as attributes to any stats dset mkdir files_ClustSim # run Monte Carlo simulations using method 'ACF' set params = ( `grep ACF blur_est.$subj.1D | tail -n 1` ) 3dClustSim -both -mask mask_anat.$subj+tlrc -acf $params[1-3] \ -cmd 3dClustSim.ACF.cmd -prefix files_ClustSim/ClustSim.ACF # run 3drefit to attach 3dClustSim results to stats set cmd = ( `cat 3dClustSim.ACF.cmd` ) $cmd stats.$subj+tlrc stats.${subj}_REML+tlrc # ========================= auto block: QC_review ========================== # generate quality control review scripts and HTML report # generate a review script for the unprocessed EPI data # (all echoes of all runs) gen_epi_review.py -script @epi_review.$subj \ -dsets pb00.$subj.r*.e*.tcat+orig.HEAD # ------------------------------------------------- # generate scripts to review single subject results # (try with defaults, but do not allow bad exit status) # write AP uvars into a simple txt file cat << EOF > out.ap_uvars.txt mot_limit : 0.8 out_limit : 0.05 copy_anat : anatSS.T1W+orig.HEAD combine_method : m_tedana mask_dset : mask_anat.$subj+tlrc.HEAD tlrc_base : MNI152_2009_template_SSW.nii.gz ss_review_dset : out.ss_review.$subj.txt vlines_tcat_dir : vlines.pb00.tcat EOF # and convert the txt format to JSON cat out.ap_uvars.txt | afni_python_wrapper.py -eval "data_file_to_json()" \ > out.ap_uvars.json # initialize gen_ss_review_scripts.py with out.ap_uvars.json gen_ss_review_scripts.py -exit0 \ -init_uvars_json out.ap_uvars.json \ -write_uvars_json out.ss_review_uvars.json # ========================== auto block: finalize ========================== # remove temporary files \rm -f rm.* # if the basic subject review script is here, run it # (want this to be the last text output) if ( -e @ss_review_basic ) then ./@ss_review_basic |& tee out.ss_review.$subj.txt # generate html ss review pages # (akin to static images from running @ss_review_driver) apqc_make_tcsh.py -review_style basic -subj_dir . \ -uvar_json out.ss_review_uvars.json tcsh @ss_review_html |& tee out.review_html apqc_make_html.py -qc_dir QC_$subj echo "\nconsider running: \n" echo " afni_open -b $subj.results/QC_$subj/index.html" echo "" endif # return to parent directory (just in case...) cd .. echo "execution finished: `date`" # ========================================================================== # script generated by the command: # # afni_proc.py -subj_id SM327_all4runs -script proc.SM327_all4runs \ # -scr_overwrite -blocks despike tshift align tlrc volreg mask combine \ # blur scale regress -radial_correlate_blocks tcat volreg -copy_anat \ # anatSS.T1W.nii -anat_has_skull no -dsets_me_run \ # epi_run1_face_echo_001+orig.HEAD epi_run1_face_echo_002+orig.HEAD \ # epi_run1_face_echo_003+orig.HEAD epi_run2_face_echo_001+orig.HEAD \ # epi_run2_face_echo_002+orig.HEAD epi_run2_face_echo_003+orig.HEAD \ # epi_run3_face_echo_001+orig.HEAD epi_run3_face_echo_002+orig.HEAD \ # epi_run3_face_echo_003+orig.HEAD epi_run4_face_echo_001+orig.HEAD \ # epi_run4_face_echo_002+orig.HEAD epi_run4_face_echo_003+orig.HEAD \ # -echo_times 12 31.7 51.4 -reg_echo 2 -tcat_remove_first_trs 0 \ # -align_opts_aea -giant_move -align_opts_aea -cost lpc+ZZ -check_flip \ # -volreg_align_to MIN_OUTLIER -volreg_align_e2a -blur_size 4 \ # -blur_in_mask yes -volreg_tlrc_warp -tlrc_base \ # MNI152_2009_template_SSW.nii.gz -tlrc_NL_warp -tlrc_NL_warped_dsets \ # ./anatQQ.T1W.nii ./anatQQ.T1W.aff12.1D ./anatQQ.T1W_WARP.nii \ # -mask_epi_anat yes -mask_apply anat -combine_method m_tedana \ # -regress_stim_times ms327_faces_allruns.txt -regress_stim_labels faces \ # -regress_basis 'BLOCK(3)' -regress_est_blur_errts \ # -regress_censor_outliers 0.05 -regress_censor_motion 0.8 \ # -regress_motion_per_run -regress_opts_3dD -gltsym 'SYM: faces' \ # -glt_label 1 Faces -gltsym 'SYM: -faces' -glt_label 2 ITI \ # -regress_compute_fitts -regress_reml_exec -regress_make_ideal_sum \ # sum_ideal.1D -regress_est_blur_epits -regress_est_blur_errts \ # -regress_run_clustsim yes -html_review_style basic