From noreply at svn.ci.uchicago.edu Tue Sep 1 19:10:30 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Tue, 1 Sep 2009 19:10:30 -0500 (CDT) Subject: [Swift-commit] r3101 - SwiftApps/SIDGrid/swift/projects/andric/ccf_emblem/NonParametric_lag_analy Message-ID: <20090902001030.607749CC91@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-01 19:10:29 -0500 (Tue, 01 Sep 2009) New Revision: 3101 Modified: SwiftApps/SIDGrid/swift/projects/andric/ccf_emblem/NonParametric_lag_analy/surfclust.swift Log: tested different rmm vals Modified: SwiftApps/SIDGrid/swift/projects/andric/ccf_emblem/NonParametric_lag_analy/surfclust.swift =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/ccf_emblem/NonParametric_lag_analy/surfclust.swift 2009-08-19 22:04:36 UTC (rev 3100) +++ SwiftApps/SIDGrid/swift/projects/andric/ccf_emblem/NonParametric_lag_analy/surfclust.swift 2009-09-02 00:10:29 UTC (rev 3101) @@ -11,7 +11,7 @@ string id = "interp"; string hemis[] = ["lh","rh"]; float vertexThresh[] = [8.8]; -float rmm_vals[] = [4.5, 5.1]; +float rmm_vals[] = [4.7, 4.8]; foreach h in hemis{ foreach thresh in vertexThresh{ From noreply at svn.ci.uchicago.edu Thu Sep 3 00:09:24 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Thu, 3 Sep 2009 00:09:24 -0500 (CDT) Subject: [Swift-commit] r3102 - SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/preprocessingDos Message-ID: <20090903050924.6276A9CC86@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-03 00:09:23 -0500 (Thu, 03 Sep 2009) New Revision: 3102 Modified: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/preprocessingDos/insertDataTS.py Log: inserted the mesh50 smooth cleanTS Modified: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/preprocessingDos/insertDataTS.py =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/preprocessingDos/insertDataTS.py 2009-09-02 00:10:29 UTC (rev 3101) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/preprocessingDos/insertDataTS.py 2009-09-03 05:09:23 UTC (rev 3102) @@ -20,9 +20,7 @@ def insertdata_to_db(h): print "que hora es?\n"+time.ctime() try: - #file = "'/disks/ci-gpfs/fmri/cnari/swift/projects/andric/peakfit_pilots/PK2/PK2surfaces/surfaceData/PK2_"+h+"_TSfordb.txt'" - #file = "'/disks/ci-gpfs/fmri/cnari/swift/projects/andric/peakfit_pilots/PK2/PK2surfaces/surfaceData/cleanTSsmooth_"+h+"_PK2fordb.txt'" - file = "'/gpfs/pads/fmri/cnari/swift/projects/andric/peakfit_pilots/PK2/preprocessingDos/cleanTSsmooth_"+h+"_PK2fordb.txt'" + file = "'/gpfs/pads/fmri/cnari/swift/projects/andric/peakfit_pilots/PK2/preprocessingDos/mesh50_"+h+"_PK2fordb.txt'" print "File loading: "+file insert_statement = "load data local infile "+file+" into table peakTS_data"+h+" fields terminated by ' ';" print "Insert statement: "+insert_statement From noreply at svn.ci.uchicago.edu Thu Sep 3 00:58:57 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Thu, 3 Sep 2009 00:58:57 -0500 (CDT) Subject: [Swift-commit] r3103 - SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2 Message-ID: <20090903055857.4DAA79CCA6@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-03 00:58:56 -0500 (Thu, 03 Sep 2009) New Revision: 3103 Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/ Log: for doing peak analysis on indiv runs From noreply at svn.ci.uchicago.edu Thu Sep 3 01:00:01 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Thu, 3 Sep 2009 01:00:01 -0500 (CDT) Subject: [Swift-commit] r3104 - in SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun: . Rscripts Message-ID: <20090903060001.886329CCA6@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-03 01:00:01 -0500 (Thu, 03 Sep 2009) New Revision: 3104 Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/ShellpeakMediatorPK2.R SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/peakfitv2v1.R SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/preprocessEnewsmooth.R Log: R code Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/ShellpeakMediatorPK2.R =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/ShellpeakMediatorPK2.R (rev 0) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/ShellpeakMediatorPK2.R 2009-09-03 06:00:01 UTC (rev 3104) @@ -0,0 +1,66 @@ +library(stats) +source("Rscripts/preprocessEnewsmooth.R") +source("Rscripts/peakfitv2v1.R") + +inputfilename <- Sys.getenv("R_INPUT") +print(inputfilename) +allargs <- Sys.getenv("R_SWIFT_ARGS") +print(allargs) +#inputfilename <- noquote(strsplit(allinputs," ")[[1]][1]) +outobjectname <- noquote(strsplit(allargs," ")[[1]][2]) +print(outobjectname) +outputPDFfilename <- paste(outobjectname,"pdf",sep="") +outputAfilename <- paste(outobjectname,"a",sep="") +outputBfilename <- paste(outobjectname,"b",sep="") + +dd <- as.matrix(read.table(inputfilename)) +dd <- data.frame(dd[1,]) +print(length(dd[,1])) +## shedding the first 3 values (subjectID, vert num, and roi label) +#dd <- data.frame(as.numeric(dd[4:length(dd[,1]),1])) +#dd <- as.numeric(dd[4:1811]) + +## taking the mean of the TS to be the baseline +#quickbase <- mean(dd) +quickbase <- mean(dd[,1]) +adjusted.input = dd[,1]-quickbase +res_preprocess <- preprocess_1(input = adjusted.input); + +test2_res_D2_nlminb <- nlminb( + start = res_preprocess$guess, + objective = fcn_L2, + gradient = test_D_fcn_L2, +#hessian = test_D2_fcn_L2, + control = list(eval.max = 50000, iter.max = 5000), +# the lower line with fewer iterations is just for testing purposes. the upper line is the real one +# control = list(eval.max = 100, iter.max = 50), + lower = rep(c(0),length(res_preprocess$guess)), + fcall = f, + n = res_preprocess$n, + x = res_preprocess$interpolated_x, + y_and_noise = res_preprocess$interpolated_y) + +pdf(file=outputPDFfilename) +op <- par(mfrow=c(1,1)) +plot(res_preprocess$interpolated_x,res_preprocess$interpolated_y,xlab = "",ylab = "BOLD (1E-3)",type = "b"); +plot.est(par = test2_res_D2_nlminb$par, fcall = f, n = res_preprocess$n, res_preprocess$interpolated_x,col = "blue"); +plot.est.individual(par = test2_res_D2_nlminb$par, fcall = f, n = res_preprocess$n, res_preprocess$interpolated_x,col = "blue"); +dev.off() +par(op) + +result1<- est.stats.summary( + par = test2_res_D2_nlminb$par, + sse = test2_res_D2_nlminb$objective, + x = res_preprocess$interpolated_x, + y = res_preprocess$interpolated_y) + +result2 <- est.summary( + par = test2_res_D2_nlminb$par, + sse = test2_res_D2_nlminb$objective, + n = res_preprocess$n, + x = res_preprocess$interpolated_x, + y = res_preprocess$interpolated_y) + + +write.table(result1,outputAfilename,row.name = F,col.name = F) +write.table(result2,outputBfilename,row.name = F,col.name = F) Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/peakfitv2v1.R =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/peakfitv2v1.R (rev 0) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/peakfitv2v1.R 2009-09-03 06:00:01 UTC (rev 3104) @@ -0,0 +1,385 @@ +# ---------------------------------------------------------------------- +# Savitzky-Golay Algorithm +# ---------------------------------------------------------------------- +# T2 <- sav.gol(T, fl, forder, dorder); +# +# Polynomial filtering method of Savitzky and Golay +# See Numerical Recipes, 1992, Chapter 14.8, for details. +# +# T = vector of signals to be filtered +# (the derivative is calculated for each ROW) +# fl = filter length (for instance fl = 51..151) +# forder = filter order (2 = quadratic filter, 4= quartic) +# dorder = derivative order (0 = smoothing, 1 = first derivative, etc.) +# +sav.gol <- function(T, fl, forder, dorder) +{ + m <- length(T) + dorder <- dorder + 1 + + # -- calculate filter coefficients -- + fc <- (fl-1)/2 # index: window left and right + X <- outer(-fc:fc, 0:forder, FUN="^") # polynomial terms and +coefficients + Y <- pinv(X); # pseudoinverse + + # -- filter via convolution and take care of the end points -- + T2 <- convolve(T, rev(Y[dorder,]), type="o") # convolve(...) + T2 <- T2[(fc+1):(length(T2)-fc)] +} +#----------------------------------------------------------------------- +# *** PseudoInvers of a Matrix *** +# using singular value decomposition +# +pinv <- function (A) +{ + s <- svd(A) + s$v %*% diag(1/s$d) %*% t(s$u) +} +#----------------------------------------------------------------------- +# +# +#------------gamma function used in peakfit------------------------------------------- +# +# | x-a1 | | (x-a1)/a2 + a3 -1 |^(a3-1) +# y = a0*exp|- -------|*| ------------------ | +# | a2 | | a3-1 | +# a0 = amplitude +# a1 = center +# a2 = width (>0) +# a3 = shape (> 1.01, < 167.92) +# + +f_peakfit <- function(a0,a1,a2,a3,n,x) { + expr <- rep(c(0),length(x)); + for (i in 1:n) + { + tmp <- rep(c(0),length(x)) + tmp <- a0[i]*exp(-(x - a1[i])/a2[i])*((x - a1[i])/(a2[i]*(a3[i] - 1)) + 1 )^(a3[i] - 1); +# when { b[i]/shape*(x -center[i]) + 1 } < 0, the power is meaningless. tmp[j] is "NaN" and will be replaced by zero + for (j in 1:length(x)) {if (tmp[j] == "NaN") {tmp[j] = 0} } + expr <- expr + tmp + } + eval(expr) +} + + +# define the function (a kind of simplified version from above) to be used to fit the data +# +# y = a*exp(-b*(x - center))*(b/shape*(x - center) + 1)^(shape) +# +# a = amplitude +# b = decay parameter (correspond to 1/a2) +# center = a1; +# shape = a3 - 1; later shape will be fixed as 8.6, which is a common number used in most of fMRI study +# +# +# ------------------------------------------------------------------------------------------- +# the following is a summation of gmma functions to fit raw data +# assumption: linear system +# x is the time points +# n is the number of peaks detected by second derivatives +# a, b and center are the parameters defined above and need to be estimated +# a, b and center are vectors with length corresponding to the number of peaks +# + + +# shape is fixed at 8.6 in all the following analysis +shape = 8.6 +f <- function(a,b,center,n,x) { + shape = 8.6; + expr <- rep(c(0),length(x)); + for (i in 1:n) + { + tmp <- rep(c(0),length(x)) + tmp <- a[i]*exp(-b[i]*(x - center[i]))*(b[i]/shape*(x -center[i]) + 1 )^shape +# when { b[i]/shape*(x -center[i]) + 1 } < 0, the power is meaningless. tmp[j] is "NaN" and will be replaced by zero + for (j in 1:length(x)) {if (tmp[j] == "NaN") {tmp[j] = 0} } + expr <- expr + tmp + } + eval(expr) +} + +# linear square fit, minmize funtion 0.5*|y - y_hat|^2. +# Use as an input function for constraint estimation +fcn_L2 <- function(par,n,x,y_and_noise,fcall) { + a <- par[1:n] + b <- par[(n+1):(2*n)] + center <- par[(2*n+1):(3*n)] + new_par <- c(list(a = a),list(b = b),list(center = center)) + res <- (y_and_noise - do.call("fcall",c( as.list(new_par), list(n = n), list(x = x) ) )); + 0.5*t(res) %*% res + } + + +# define the function (a kind of simplified version from above) to be used to fit the data +# +# y = a*exp(-b*(x - center))*(b/shape*(x - center) + 1)^(shape) +# +# a = amplitude +# b = decay parameter (correspond to 1/a2) +# center = a1; +# shape = a3 - 1; later shape will be fixed as 8.6, which is a common number used in most of fMRI study +# +# df_a = exp(-b*(x - center))*(b/shape*(x - center) + 1)^(shape) +# df_b = a*(-(x-center))*exp(-b*(x - center))*(b/shape*(x - center) + 1)^(shape) + a*exp(-b*(x - center))*shape*(b/shape*(x - center) + 1)^(shape-1)*(x - center)/shape +# df_center = a*b*exp(-b*(x - center))*(b/shape*(x - center) + 1)^(shape) + a*exp(-b*(x - center))*shape*(b/shape*(x - center) + 1)^(shape-1)*(-b/shape) + +D_fcn_L2 <- function(par,n,x,y_and_noise,fcall) { + df_a <- matrix(rep(c(0),n*length(x)),ncol = n, nrow = length(x)) + df_b <- matrix(rep(c(0),n*length(x)),ncol = n, nrow = length(x)) + df_center <- matrix(rep(c(0),n*length(x)),ncol = n, nrow = length(x)) + a <- par[1:n] + b <- par[(n+1):(2*n)] + center <- par[(2*n+1):(3*n)] + new_par <- c(list(a = a),list(b = b),list(center = center)) + for (i in 1:n) { + df_a[,i] <- exp(-b[i]*(x - center[i]))*(b[i]/shape *(x -center[i]) + 1 )^shape + df_b[,i] <- a[i]*(-(x - center[i]))*exp(-b[i]*(x - center[i]))*(b[i]/shape *(x -center[i]) + 1 )^shape + a[i]*exp(-b[i]*(x - center[i]))*shape*(b[i]/shape*(x -center[i]) + 1 )^(shape - 1)*(x - center[i])/shape + df_center[,i] <- a[i]*b[i]*exp(-b[i]*(x - center[i]))*(b[i]/shape *(x -center[i]) + 1 )^shape + a[i]*exp(-b[i]*(x - center[i]))*shape*(b[i]/shape*(x -center[i]) + 1 )^(shape -1 )*(-b[i]/shape) + } + tmp <- cbind(df_a,df_b,df_center) + for (i in 1:(dim(tmp)[1]) ) { + for (j in 1:(dim(tmp)[2]) ) { + if (tmp[i,j] == "NaN") {tmp[i,j] = 0} + } + } + j_f <- tmp + y_eval <- do.call("fcall",c(list(a = a), list(b = b), list(center = center), list(n = n), list(x = x))) + t(j_f) %*% (y_eval - y_and_noise) + } + +#---- deriv 3------------------------------------------------------------- +D_individual_f <- deriv3( + ~a*exp(-b*(x - center))*(b/shape*(x -center) + 1 )^shape, + c("a","b","center"), + function(a,b,center,x,shape) {}) + +test_D_fcn_L2 <- function(par,n,x,y_and_noise,fcall) { + shape = 8.6 + df_a <- matrix(rep(c(0),n*length(x)),ncol = n, nrow = length(x)) + df_b <- matrix(rep(c(0),n*length(x)),ncol = n, nrow = length(x)) + df_center <- matrix(rep(c(0),n*length(x)),ncol = n, nrow = length(x)) + a <- par[1:n] + b <- par[(n+1):(2*n)] + center <- par[(2*n+1):(3*n)] + new_par <- c(list(a = a),list(b = b),list(center = center)) + for (i in 1:n) { + tmp <- D_individual_f(a[i],b[i],center[i],x,shape) + g <- attr(tmp,"gradient") + df_a[,i] <- g[,1]; + df_b[,i] <- g[,2]; + df_center[,i] <- g[,3]; + } + tmp <- cbind(df_a,df_b,df_center) + for (i in 1:(dim(tmp)[1]) ) { + for (j in 1:(dim(tmp)[2]) ) { + if (tmp[i,j] == "NaN") {tmp[i,j] = 0} + } + } + j_f <- tmp + y_eval <- do.call("fcall",c(list(a = a), list(b = b), list(center = center), list(n = n), list(x = x))) + t(j_f) %*% (y_eval - y_and_noise) + } +#---------------------------------------------------------------------- + +# 2nd derivative +test_D2_fcn_L2 <- function(par,n,x,y_and_noise,fcall) { + shape = 8.6 + df_a <- matrix(rep(c(0),n*length(x)),ncol = n, nrow = length(x)) + df_b <- matrix(rep(c(0),n*length(x)),ncol = n, nrow = length(x)) + df_center <- matrix(rep(c(0),n*length(x)),ncol = n, nrow = length(x)) + a <- par[1:n] + b <- par[(n+1):(2*n)] + center <- par[(2*n+1):(3*n)] + new_par <- c(list(a = a),list(b = b),list(center = center)) + f <- do.call("fcall",c(list(a = a), list(b = b), list(center = center), list(n = n), list(x = x))) - y_and_noise + + for (i in 1:n) { + tmp <- D_individual_f(a[i],b[i],center[i],x,shape) + g <- attr(tmp,"gradient") + df_a[,i] <- g[,1]; + df_b[,i] <- g[,2]; + df_center[,i] <- g[,3]; + } + tmp <- cbind(df_a,df_b,df_center) + for (i in 1:(dim(tmp)[1]) ) { + for (j in 1:(dim(tmp)[2]) ) { + if (tmp[i,j] == "NaN") {tmp[i,j] = 0} + } + } + j_f <- tmp + term1 <- t(j_f) %*% j_f +# 1st term in the equation calculated, shall be a 3n x 3n matrix + + f_D2f = matrix(rep(c(0),9*n^2),ncol = 3*n,nrow = 3*n); + for (i in 1:n) { + tmp <- D_individual_f(a[i],b[i],center[i],x,shape) + h <- colSums(attr(tmp,"hessian")*f,1) + f_D2f[i,i] = h[1,1]; + f_D2f[n+i,i] = f_D2f[i,n+i] = h[1,2]; + f_D2f[2*n+i,i] = f_D2f[i,2*n+i] = h[1,3]; + f_D2f[n+i,n+i] = h[2,2]; + f_D2f[2*n+i,n+i] = f_D2f[n+i,2*n+i] = h[2,3]; + f_D2f[2*n+i,2*n+i] = h[3,3]; + } + for (i in 1:(dim(f_D2f)[1]) ) { + for (j in 1:(dim(f_D2f)[2]) ) { + if (f_D2f[i,j] == "NaN") {f_D2f[i,j] = 0} + } + } + + term1 + f_D2f + } + +#---------------------------------------------------------------------------------------- +# plot fitted curve with observed one +plot.est <- function(par,fcall,n,x,col) { + a <- par[1:n] + b <- par[(n+1):(2*n)] + center <- par[(2*n+1):(3*n)] + new_par <- c(list(a = a),list(b = b),list(center = center)) + y_est <- do.call("fcall",c( as.list(new_par), list(n = n), list(x = x) )) + lines(x,y_est,col = col,lwd = 2) + } + +# plot individual fitted curve +plot.est.individual <- function(par,fcall,n,x,col) { + a <- par[1:n] + b <- par[(n+1):(2*n)] + center <- par[(2*n+1):(3*n)] + for (i in 1:n) + { + new_par <- c(list(a = a[i]),list(b = b[i]),list(center = center[i])) + y_est <- do.call("fcall",c( as.list(new_par), list(n = 1), list(x = x) )) + lines(x,y_est,col = col,lwd = 1) + } +} + + +# fit summary + +# fit statistical summary for constraint estimation +est.stats.summary <- function(par,sse,x,y) { + n_x = length(x); # the number of observations + m = length(par); # the number of parameters + sse # sum of squares due to error + ave_y = mean(y); # the mean of observation + ssm = 0.5* t(y - ave_y) %*% (y - ave_y); # the sum of square about mean + r2 = 1 - sse/ssm; # R square, coefficient of determination + dof = n_x - m; # degree of freedom + r2_adjusted = 1 - sse*(n_x-1)/(ssm*(dof-1)); # adjusted R square + mse = sse/dof; # the mean square error + se = sqrt(mse); # the standard error of fit, the root mse + msr = (ssm - sse)/(m - 1); # mean square regression + f = msr/mse; # F-statistics + c(r2,dof,r2_adjusted,f,se) +# c("r2" = r2,"DF" = dof,"Adj r2" = r2_adjusted,"F" = f,"standard error of fit" = se) + } + +# fit summary includes two fitting parameters (amplitude and centerfwhm and analytical area under the curve +# -----fwhm------------ +# fwhm ~ 2.35*b^(1/2)*c +# b & c are from gamma variate function x^b*exp(-t/c) +# this approximation is found in ref "Event-related fMRI Contast When using Constant Interstimulus Interval: Theory and Experiment", pubished in Magnetic Resonance in Medicine 43:540-548 (2000) +# this approximation result is a little bit different from that from peakfit +# in our code,shape parameter is fixed at 8.6 & width related parameter is b +# fwhm ~ 2.35*sqrt(8.6)/b +# -----area under the curve------- +# after some derivations, area under the curve is a/b*exp(8.6)*8.6^(-8.6)*gamma(8.6+1); +# the derivation was confirmed with peakfit result +est.summary_1 <- function(fcall,par,sse,n,x,y) { + a <- par[1:n] # estimated a + b <- par[(n+1):(2*n)] # estimated b + center <- par[(2*n+1):(3*n)] # estimated center +# --calculate standard error of fit + n_x = length(x); # the number of observations + m = length(par); # the number of parameters + dof = n_x - m; # degree of freedom + sse # sum of squares due to error + mse = sse/dof; # the mean square error + se = sqrt(mse); # the standard error of fit, the root mse +#---standard error of parameters, se * 1/sqrt(diag(hessian matrix of 0.5*|y-y_hat|^2)) + std_par <- se * 1/sqrt(diag(test_D2_fcn_L2(par,n,x,y,f))) # std error of estimated parameters + std_a <- std_par[1:n]; + std_b <- std_par[(n+1):(2*n)]; + std_center <- std_par[(2*n+1):(3*n)]; +#---calculate fwhm, analytical area and percent area------ + shape <- 8.6 + fwhm <- matrix(c(0),ncol = 1,nrow = n); + area <- matrix(c(0),ncol = 1,nrow = n); + percent_area <- matrix(c(0),ncol = 1,nrow = n); for (i in 1:n) { + fwhm[i] <- 2.35*sqrt(shape)/b[i]; + area[i] <- a[i]/b[i]*exp(shape)*shape^(-shape)*gamma(shape+1); + } + percent_area <- area/sum(area)*100; +#---need to double check--------------------- + std_fwhm <- std_b * 2.35*sqrt(8.6) * 1/sqrt(b^4); +#---calculate partial F stats for each peak + y_est_col <- matrix(c(0),ncol = n, nrow = n_x); + for (i in 1:n) + { + new_par <- c(list(a = a[i]),list(b = b[i]),list(center = center[i])); + y_est_col[,i] <- do.call("fcall",c( as.list(new_par), list(n = 1), list(x = x) )); # each col corresponds to each peak fitted ts + } + + p_partial <- matrix(c(0),ncol = 1, nrow = n); + for (i in 1:n) + { + res_wo_i <- rowSums(y_est_col[,-i]) - y; # residual without ith peak + ssr <- 0.5*res_wo_i %*% res_wo_i; # sum square of error of residual + dof_diff <- 3; # degree of freedom difference between full model and reduced model (i.e. without ith peak) + F_partial <- ((ssr - sse)/dof_diff)/(sse/dof); # partial F stats for ith peak + p_partial[i] <- 1 - pf(F_partial,dof_diff,dof); + } + +#---summary of individual results + summary <- data.frame(a,std_a,center,std_center,fwhm,std_fwhm,area,percent_area,p_partial); + name_of_row <- paste("Peak",1:n,sep=""); + name_of_col <- c("Amplitude","e_Amplitude","Center","e_Center","FWHM","e_FWHM","Area","% Area","p_partial"); + dimnames(summary) <- list(name_of_row,name_of_col); + summary + } + +est.summary <- function(par,sse,n,x,y) { + a <- par[1:n] # estimated a + b <- par[(n+1):(2*n)] # estimated b + center <- par[(2*n+1):(3*n)] # estimated center +# --calculate standard error of fit + n_x = length(x); # the number of observations + m = length(par); # the number of parameters + dof = n_x - m; # degree of freedom + sse # sum of squares due to error + mse = sse/dof; # the mean square error + se = sqrt(mse); # the standard error of fit, the root mse +#---standard error of parameters, se * 1/sqrt(diag(hessian matrix of 0.5*|y-y_hat|^2)) + std_par <- se * 1/sqrt(diag(test_D2_fcn_L2(par,n,x,y,f))) # std error of estimated parameters + std_a <- std_par[1:n]; + std_b <- std_par[(n+1):(2*n)]; + std_center <- std_par[(2*n+1):(3*n)]; +#---calculate fwhm, analytical area and percent area------ + shape <- 8.6 + fwhm <- matrix(c(0),ncol = 1,nrow = n); + area <- matrix(c(0),ncol = 1,nrow = n); + percent_area <- matrix(c(0),ncol = 1,nrow = n); for (i in 1:n) { + fwhm[i] <- 2.35*sqrt(shape)/b[i]; + area[i] <- a[i]/b[i]*exp(shape)*shape^(-shape)*gamma(shape+1); + } + percent_area <- area/sum(area, na.rm=T)*100; +#---need to double check--------------------- + std_fwhm <- std_b * 2.35*sqrt(8.6) * 1/sqrt(b^4); +#---onset --------------- + onset <- center - shape/b; +#---summary of individual results + summary <- data.frame(a,std_a,center,std_center,fwhm,std_fwhm,area,percent_area,onset); + name_of_row <- paste("Peak",1:n,sep=""); + name_of_col <- c("Amplitude","e_Amplitude","Center","e_Center","FWHM","e_FWHM","Area","% Area","onset"); + dimnames(summary) <- list(name_of_row,name_of_col); + summary + } + + + + + + Property changes on: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/peakfitv2v1.R ___________________________________________________________________ Name: svn:executable + * Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/preprocessEnewsmooth.R =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/preprocessEnewsmooth.R (rev 0) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/preprocessEnewsmooth.R 2009-09-03 06:00:01 UTC (rev 3104) @@ -0,0 +1,76 @@ +preprocess_1 <- function(input) { + + # read file (read.table) and save as vector (as.matrix) + original_y <- input*1000; + + # original # of points before interpolation + original_x <- c(1:length(original_y)); + + #---------calculate 2nd derivative to find peaks------------------------------------ + # + # windowlength argument n + # p is order of polynimal + # D is the order of derivative + + windowlength <- 3 # 3 data points covered + # tested 4 data points, detail info might be lost + order_polynomial <- 2; # includes constant, linear and quadratic terms + order_derivative <- 2; # need the information of second derivatives + + derivative_original_y <- sav.gol(original_y, fl = windowlength, forder = order_polynomial, dorder = order_derivative); + + + # guess # of possible peaks by counting negative y derivatives and corresponding amplitude of y > noise level + noise_level <- 0; # so far noise level is set to 0 + + # guess peak location: use all negative second derivatives or use local min of second derivative + # peak_location = original_x[derivative_original_y < 0 & original_y > noise_level]; + + tmp <- rep(c(0),length(original_x)); + for ( i in 2:(length(original_x)-1) ) { + if ( (derivative_original_y[i] < derivative_original_y[i-1]) & (derivative_original_y[i] < derivative_original_y[i+1]) ) + { tmp[i] = i;} + } + peak_location = original_x[tmp > 0]; + + possible_peaks = length(peak_location) + + #------------------------------------------------------------------------------------- + + + #-----------------interpolation data--------------------------------------------------- + # + # always needs # of parameters < length of observation + # in the case that 2nd derivative detect many peaks, interpolation shall be used + stim_res = 2 # sympliy double the time points + + interpolation <- ksmooth(original_x,original_y,"normal",bandwidth = 1,x.points = seq(1,length(original_x),by = 1/stim_res)); + + #derivative_interpolation <- sav.gol(interpolation$y, fl = windowlength, forder = order_polynomial, dorder = order_derivative); + # calculatae new peak location + new_peak_location = stim_res*(peak_location - 1) + 1; + + # calculatae new peak amplitude + + peak_amplitude = rep(c(0),possible_peaks); + for (i in 1:possible_peaks ) { peak_amplitude[i] = interpolation$y[new_peak_location[i]] } + + #--------------------initializaation---------------------------------------- + # + # parameter a corresponds to peak amplitude + a_guess <- peak_amplitude; + + # parameter b corresponds to 1/peak width + b_guess <- rep(c(length(original_y)/possible_peaks), possible_peaks); + + #real_center_guess <- interpolation$x[new_peak_location]; + #center_guess <- b_guess*real_center_guess; + center_guess <- interpolation$x[new_peak_location]; + + guess <- c(a_guess,b_guess,center_guess); + n <- possible_peaks; + + out <- list(guess = guess,n = n,interpolated_x = interpolation$x,interpolated_y = interpolation$y); + out +} + \ No newline at end of file Property changes on: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/Rscripts/preprocessEnewsmooth.R ___________________________________________________________________ Name: svn:executable + * From noreply at svn.ci.uchicago.edu Thu Sep 3 01:00:39 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Thu, 3 Sep 2009 01:00:39 -0500 (CDT) Subject: [Swift-commit] r3105 - SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun Message-ID: <20090903060039.4B2C19CCA6@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-03 01:00:39 -0500 (Thu, 03 Sep 2009) New Revision: 3105 Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/swift_execute.sh Log: execute swift Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/swift_execute.sh =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/swift_execute.sh (rev 0) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/swift_execute.sh 2009-09-03 06:00:39 UTC (rev 3105) @@ -0,0 +1,3 @@ +#!/bin/tcsh + +swift PK2med_runs.swift -sites.file /gpfs/pads/fmri/cnari_svn/config/sites_ranger_peak.xml -tc.file /gpfs/pads/fmri/cnari_svn/config/tc.data -user="andric" Property changes on: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/swift_execute.sh ___________________________________________________________________ Name: svn:executable + * From noreply at svn.ci.uchicago.edu Thu Sep 3 01:01:46 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Thu, 3 Sep 2009 01:01:46 -0500 (CDT) Subject: [Swift-commit] r3106 - SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun Message-ID: <20090903060146.5D6409CCA6@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-03 01:01:46 -0500 (Thu, 03 Sep 2009) New Revision: 3106 Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/PK2med_runs.swift Log: peak analysis on each run in the time series Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/PK2med_runs.swift =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/PK2med_runs.swift (rev 0) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/PK2med_runs.swift 2009-09-03 06:01:46 UTC (rev 3106) @@ -0,0 +1,66 @@ +#--- peak analysis across the brain via Mediator +## type declarations: +type file{} +type Rscript; +## a type for simple mapping the 3 R scripts used: +type PeakfitR{ + Rscript ShellpeakMediatorPK2; + Rscript preprocessEnewsmooth; + Rscript peakfitv2v1; +} +## a type for simple mapping the two output files: +type PeakResult{ + file pdf; + file a; + file b; +} + +## Mediator app declaration: +app (file q_result, PeakResult outObj) run_query (string med_args, file config, PeakfitR code){ + Mediator med_args stdout=@filename(q_result) @filename(code.ShellpeakMediatorPK2); +} + +## this process sets parameters and calls Mediator: +loop_query(int vert, string user, string db, string host, string query_outline, PeakfitR code, file config, string subject, string h, int rr, int beginTS, int endTS){ + string theoutprefix = "FAH_Q"; + PeakResult outObj; + file q_result ; + string med_args = @strcat("--user ","andric"," --conf ", @filename(config)," --db ", db," --host ", host, + " --vox ", vert," --subject ", subject," --subquery tsTSVAR"," --begin_ts ",beginTS," --end_ts ",endTS, + " --query ", query_outline," --r_swift_args ", @filename(outObj)," --outprefix ", theoutprefix, " --r_script ", at filename(code.ShellpeakMediatorPK2)); + trace(med_args); + (q_result, outObj) = run_query(med_args, config, code); +} + +## needed parameters to use Mediator: +string user = @arg("user"); +string db = "HEL"; +string host = "tp-neurodb.ci.uchicago.edu"; +file config; + +## mapping the R code: +PeakfitR code; + +## variables to move across in the foreach loops: +string declarelist[] = ["PK2smth"]; +string hemilist[] = ["lh"]; +int vertices[] = [3]; + +int runs[] = [1:8]; +int starts[] = [0, 226, 452, 678, 904, 1130, 1356, 1582]; +#int starts[] = [1, 227, 453, 679, 905, 1131, 1357, 1583]; +int TSend = 225; + +foreach subject in declarelist{ + foreach h in hemilist{ + foreach rr in runs{ + int beginTS = starts[rr-1]; + int endTS = starts[rr-1]+TSend; + string query_outline = @strcat("SELECT SUBQUERY FROM peakTS_data",h," WHERE subject = '",subject,"' AND vertex=VOX"); + trace(query_outline); + foreach vert in vertices{ + loop_query(vert, user, db, host, query_outline, code, config, subject, h, rr, beginTS, endTS); + } + } + } +} From noreply at svn.ci.uchicago.edu Thu Sep 3 22:44:27 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Thu, 3 Sep 2009 22:44:27 -0500 (CDT) Subject: [Swift-commit] r3107 - SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun Message-ID: <20090904034427.A2DAC9CCA6@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-03 22:44:27 -0500 (Thu, 03 Sep 2009) New Revision: 3107 Modified: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/swift_execute.sh Log: updated sites location Modified: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/swift_execute.sh =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/swift_execute.sh 2009-09-03 06:01:46 UTC (rev 3106) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/runbyrun/swift_execute.sh 2009-09-04 03:44:27 UTC (rev 3107) @@ -1,3 +1,3 @@ #!/bin/tcsh -swift PK2med_runs.swift -sites.file /gpfs/pads/fmri/cnari_svn/config/sites_ranger_peak.xml -tc.file /gpfs/pads/fmri/cnari_svn/config/tc.data -user="andric" +swift PK2med_runs.swift -sites.file /ci/projects/cnari/config/coaster_ranger.xml -user="andric" From noreply at svn.ci.uchicago.edu Mon Sep 21 12:03:01 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Mon, 21 Sep 2009 12:03:01 -0500 (CDT) Subject: [Swift-commit] r3108 - SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis Message-ID: <20090921170301.D5FF89CC89@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-21 12:03:01 -0500 (Mon, 21 Sep 2009) New Revision: 3108 Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/ Log: doing cluster correction From noreply at svn.ci.uchicago.edu Mon Sep 21 12:04:09 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Mon, 21 Sep 2009 12:04:09 -0500 (CDT) Subject: [Swift-commit] r3109 - SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations Message-ID: <20090921170409.CDCA89CC89@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-21 12:04:09 -0500 (Mon, 21 Sep 2009) New Revision: 3109 Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/ClstrMasstrPerm.py Log: orig python script to parse clst_table Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/ClstrMasstrPerm.py =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/ClstrMasstrPerm.py (rev 0) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/ClstrMasstrPerm.py 2009-09-21 17:04:09 UTC (rev 3109) @@ -0,0 +1,45 @@ +#!/usr/bin/python + +import sys + +class ClstrMasstr: + + def __init__(self): + self.input = "" + self.outname = "" + + def get_opts (self,allargstr): + print "length of argstr "+str(len(allargstr)) + i = 0 + for o in allargstr: + print "arg is "+o + if o == "--input": + self.input = allargstr[i+1] + elif o == "--outputname": + self.outname = allargstr[i+1] + print "input: "+self.input + print "outname: "+self.outname + i = i+1 + + def run_clstrmass(self): + cluster_file = open(self.input,"r").read().split("\n") + cluster_file_length = len(cluster_file)-1 + clstrmasslist = "" + + if len(cluster_file)-1 == 1: + clstrmasslist = cluster_file[0]+" \n" + else: + numNd = int(cluster_file[16].split()[1]) + Mean = float(cluster_file[16].split()[3]) + mass = numNd*Mean + massval = "%.3f" % mass + clstrmasslist += massval+" \n" + + outfile = open(self.outname,"w") + outfile.write(clstrmasslist) + outfile.close() + + +clstmass = ClstrMasstr() +clstmass.get_opts(sys.argv) +clstmass.run_clstrmass() From noreply at svn.ci.uchicago.edu Mon Sep 21 12:04:30 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Mon, 21 Sep 2009 12:04:30 -0500 (CDT) Subject: [Swift-commit] r3110 - SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations Message-ID: <20090921170430.3BFC39CC89@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-21 12:04:30 -0500 (Mon, 21 Sep 2009) New Revision: 3110 Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/ClstrMasstr.py Log: python script to parse clst_table Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/ClstrMasstr.py =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/ClstrMasstr.py (rev 0) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/ClstrMasstr.py 2009-09-21 17:04:30 UTC (rev 3110) @@ -0,0 +1,101 @@ +#!/usr/bin/python + +## Sept.2009: This is for grabbing the cluster information (after running SurfClust). +## Can run 4 types of procedures. Specify after "--procedure" flag: +## 1) "NumberNodes" >>> Writing out the cluster size via number of nodes after SurfClust on a single input. +## 2) "PermNumberNodes" >>> Writing out the largest cluster's number of nodes on clustered permutation output +## 3) "ClusterMass" >>> calculating the cluster mass: the number of nodes times the statistic value on a single clustered input. +## 4) "PermClusterMass" >>> calculating the cluster mass on clustered permutation output, writing out the largest clustermass + +import sys + +class ClstrMasstr: + + def __init__(self): + self.input = "" + self.outname = "" + self.proc_type = "" + + def get_opts (self,allargstr): + print "length of argstr "+str(len(allargstr)) + i = 0 + for o in allargstr: + print "arg is "+o + if o == "--input": + self.input = allargstr[i+1] + elif o == "--outputname": + self.outname = allargstr[i+1] + elif o == "--procedure": + self.proc_type = allargstr[i+1] + print "input: "+self.input + print "outname: "+self.outname + print "procedure: "+self.proc_type + i = i+1 + + def get_clstr(self): + cluster_file = open(self.input,"r").read().split("\n") + cluster_file_length = len(cluster_file)-1 + clstrlist = "" + for i in range(16,cluster_file_length): + numNd = int(cluster_file[i].split()[1]) + clstrlist += ""+cluster_file[i].split()[0]+" "+`numNd`+"\n" + + outfile = open(self.outname,"w") + outfile.write(clstrlist) + outfile.close() + + def get_Perm_clstr(self): + cluster_file = open(self.input,"r").read().split("\n") + cluster_file_length = len(cluster_file)-1 + if len(cluster_file)-1 == 1: + clusterSize = cluster_file[0]+" \n" + else: + numNd = int(cluster_file[16].split()[1]) + clusterSize = `numNd`+" \n" + + outfile = open(self.outname,"w") + outfile.write(clusterSize) + outfile.close() + + def get_clstrmass(self): + cluster_file = open(self.input,"r").read().split("\n") + cluster_file_length = len(cluster_file)-1 + clstrmasslist = "" + for i in range(16,cluster_file_length): + numNd = int(cluster_file[i].split()[1]) + Mean = float(cluster_file[i].split()[3]) + mass = numNd*Mean + massval = "%.3f" % mass + clstrmasslist += ""+cluster_file[i].split()[0]+" "+massval+"\n" + + outfile = open(self.outname,"w") + outfile.write(clstrmasslist) + outfile.close() + + def get_Perm_clstrmass(self): + cluster_file = open(self.input,"r").read().split("\n") + if len(cluster_file)-1 == 1: + clstrmassval = cluster_file[0]+" \n" + else: + numNd = int(cluster_file[16].split()[1]) + Mean = float(cluster_file[16].split()[3]) + mass = numNd*Mean + massval = "%.3f" % mass + clstrmassval = massval+" \n" + + outfile = open(self.outname,"w") + outfile.write(clstrmassval) + outfile.close() + + + +clstr = ClstrMasstr() +clstr.get_opts(sys.argv) +if clstr.proc_type == "NumberNodes": + clstr.get_clstr() +elif clstr.proc_type == "PermNumberNodes": + clstr.get_Perm_clstr() +elif clstr.proc_type == "ClusterMass": + clstr.get_clstrmass() +elif clstr.proc_type == "PermClusterMass": + clstr.get_Perm_clstrmass() From noreply at svn.ci.uchicago.edu Mon Sep 21 12:05:38 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Mon, 21 Sep 2009 12:05:38 -0500 (CDT) Subject: [Swift-commit] r3111 - SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations Message-ID: <20090921170538.037BD9CC89@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-21 12:05:37 -0500 (Mon, 21 Sep 2009) New Revision: 3111 Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/clustermassPerms.swift Log: swift script for doing surface alphasim Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/clustermassPerms.swift =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/clustermassPerms.swift (rev 0) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/clustermassPerms.swift 2009-09-21 17:05:37 UTC (rev 3111) @@ -0,0 +1,63 @@ +##-- coded: 13.Sept.2009 +type file{} +type Rscript; + +## generate permutation brain +app (file permOUT) permJob (Rscript code){ + RInvoke @code @filename(permOUT); +} + +## smooth the permutation brain +app (file surfsmoothOutfile) SurfSmooth (file spec, file smoothwm, file permOUT){ + SurfSmooth "-spec" @spec "-surf_A" @smoothwm "-met" "HEAT_07" "-input" @permOUT + "-fwhm" 4 "-Niter" "-1" "-add_index" "-output" @filename(surfsmoothOutfile); +} + +## cluster the smoothed permutation brain +app (file surfclustOutput) SurfClust (file spec, file pial, file surfsmoothOutfile, float thresh, float rmm, string outPrefix){ + SurfClust "-spec" @spec "-surf_A" @pial "-input" @surfsmoothOutfile "1" "-rmm" rmm "-thresh_col" "1" + "-thresh" thresh "-amm2" "2" "-sort_n_nodes" "-prepend_node_index" "-prefix" outPrefix; +} + +## calculate the cluster mass and keep the biggest one +app (file clusterMassOUT) ClusterMass (file ClstrMassScript, file surfclustOutput){ + python @ClstrMassScript "--input" @filename(surfclustOutput) "--outputname" @filename(clusterMassOUT) "--procedure" "PermClusterMass"; +} + +(file permOUT, file surfsmoothOutfile, file surfclustOutput, file clusterMassOUT) PermClusterMass (Rscript code, file ClstrMassScript, file spec, file smoothwm, file pial, float thresh, float rmm, string outPrefix){ + permOUT = permJob(code); + surfsmoothOutfile = SurfSmooth(spec, smoothwm, permOUT); + surfclustOutput = SurfClust(spec, pial, surfsmoothOutfile, thresh, rmm, outPrefix); + clusterMassOUT = ClusterMass(ClstrMassScript, surfclustOutput); +} + + +Rscript code; +file ClstrMassScript; + +string idss[] = ["pa_vs_noise"]; +string way = "oneway"; +int lag = 0; +string h = "lh"; +float vertexThresh[] = [3.84]; +float rmm_vals[] = [3.1]; +#int permbrains[] = [1:1000:1]; +int permbrains[] = [1]; + +foreach id in idss{ + foreach perm in permbrains{ + foreach thresh in vertexThresh{ + foreach rmm in rmm_vals{ + file permOUT; + file spec; + file smoothwm; + file pial; + string outPrefix=@strcat("perm",perm,id,"_",h,"_lag",lag,way,"Thresh",thresh); + file surfsmoothOutfile; + file surfclustOutput; + file clusterMassOUT; + (permOUT, surfsmoothOutfile, surfclustOutput, clusterMassOUT) = PermClusterMass(code, ClstrMassScript, spec, smoothwm, pial, thresh, rmm, outPrefix); + } + } + } +} From noreply at svn.ci.uchicago.edu Tue Sep 22 13:28:15 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Tue, 22 Sep 2009 13:28:15 -0500 (CDT) Subject: [Swift-commit] r3112 - in SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations: . scripts Message-ID: <20090922182815.D26959CC98@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-22 13:28:15 -0500 (Tue, 22 Sep 2009) New Revision: 3112 Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/scripts/ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/scripts/generatePerm.R Log: includes needed R code Added: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/scripts/generatePerm.R =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/scripts/generatePerm.R (rev 0) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/scripts/generatePerm.R 2009-09-22 18:28:15 UTC (rev 3112) @@ -0,0 +1,7 @@ +## this is to generate a permutation +allinputs <- Sys.getenv("R_SWIFT_ARGS") +print(allinputs) +outname <- noquote(strsplit(allinputs," ")[[1]][1]) +print(outname) +brain <- rnorm(130934,mean=0.78121,sd=1.092181) +write.table(round(brain,3),outname,row.names=F,col.names=F,quote=F) From noreply at svn.ci.uchicago.edu Fri Sep 25 18:02:36 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Fri, 25 Sep 2009 18:02:36 -0500 (CDT) Subject: [Swift-commit] r3113 - in usertools/cio: bin libexec/falkon Message-ID: <20090925230236.A4FC79CC8C@vm-125-59.ci.uchicago.edu> Author: aespinosa Date: 2009-09-25 18:02:36 -0500 (Fri, 25 Sep 2009) New Revision: 3113 Added: usertools/cio/libexec/falkon/falkon-start-bgp_logging.sh Removed: usertools/cio/bin/bashrc usertools/cio/bin/bcast.sh usertools/cio/bin/wrapper.sh usertools/cio/libexec/falkon/falkon-start-bgp-logging.sh Modified: usertools/cio/bin/ciologic-bgp.sh usertools/cio/bin/falkon-start.sh usertools/cio/bin/swift_bgp.sh usertools/cio/libexec/falkon/falkon-start-bgp.sh Log: Used mosastore IFS instead of Chirp Deleted: usertools/cio/bin/bashrc =================================================================== --- usertools/cio/bin/bashrc 2009-09-22 18:28:15 UTC (rev 3112) +++ usertools/cio/bin/bashrc 2009-09-25 23:02:36 UTC (rev 3113) @@ -1,3 +0,0 @@ -export PATH=/home/zzhang/chirp/bin:$PATH -export PATH=/home/zzhang/ruby-1.8.7-p72/bin/bin:$PATH -PATH=/fuse/bin:/fuse/usr/bin:$PATH \ No newline at end of file Deleted: usertools/cio/bin/bcast.sh =================================================================== --- usertools/cio/bin/bcast.sh 2009-09-22 18:28:15 UTC (rev 3112) +++ usertools/cio/bin/bcast.sh 2009-09-25 23:02:36 UTC (rev 3113) @@ -1,16 +0,0 @@ -#!/fuse/bin/bash - -export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/home/zzhang/cio/lib -PATH=/fuse/bin:/fuse/usr/bin:$PATH - -IP=`/sbin/ifconfig | grep inet | tail -n 1 | cut -d ':' -f 2 |awk '{print $1}'` -#mkdir -p /dev/shm/share/common -#cp -r common/* /dev/shm/share/ -#chmod -R 755 /dev/shm/share/common -#exit 1 -# tree network path -DESTHOSTS=`seq 0 63 | sed "s/^/10.128.0./" | xargs` -echo ${DESTHOSTS/$IP/" "} -/home/zzhang/chirp/bin/chirp_distribute -a address -D 127.0.0.1 / ${DESTHOSTS/$IP/" "} - -exit 0 \ No newline at end of file Modified: usertools/cio/bin/ciologic-bgp.sh =================================================================== --- usertools/cio/bin/ciologic-bgp.sh 2009-09-22 18:28:15 UTC (rev 3112) +++ usertools/cio/bin/ciologic-bgp.sh 2009-09-25 23:02:36 UTC (rev 3113) @@ -3,38 +3,23 @@ # Script: ciologic-bgp.sh # Description: starts ciologic for the BlueGene -# Check sanity of environment - -if [ -z $CIOROOT ]; then - echo "CIOROOT not defined" - exit 1 -fi -if [ -z $CIOARCH ]; then - echo "CIOARCH note defined" - exit 1 -fi - -# BGP specific initialization - -LD_LIBRARY_PATH=/lib:/fuse/lib:/fuse/usr/lib -PATH=/fuse/bin:/fuse/usr/bin:$PATH - -/home/iskra/ZeptoOS/packages/cnip/prebuilt/cn-ipfwd & -while [ ! -f /tmp/ifconfig.cmd ]; do +# function: start_iptorus +start_iptorus(){ + /soft/apps/ZeptoOS-2.0-V1R3M0/cnbin/cn-ipfwd & + while [ ! -f /tmp/ifconfig.cmd ]; do sleep 1 -done -. /tmp/ifconfig.cmd + done + . /tmp/ifconfig.cmd +} # function: get_rank # return the rank of the node this is running on - get_rank(){ echo $CONTROL_INIT | awk -F, '{print $4}' } # function: get_ip [rank] # given a rank of a node. return its ip address - get_ip(){ rank=$1 echo "10.128.$(( rank / 256)).$((rank % 256))" @@ -44,28 +29,30 @@ stripe_size=$1 rank=`get_rank` - mkdir -p /dev/shm/share/stripe/root - cat > /dev/shm/share/.__acl << EOF -address:192.168.1.* rwlda -address:10.* rwlda -address:127.0.0.1 rwlda -EOF + echo "Rank $rank: Starting manager" + cat > /tmp/manager_config.cfg << EOF +# the recommended stripe width +# this is the number of benefactors the client will strip the data among in the write operation +stripe_width = $stripe_size - # Enable striping - cat /dev/shm/share/.__acl > /dev/shm/share/stripe/.__acl - cat /dev/shm/share/.__acl > /dev/shm/share/stripe/root/.__acl - echo bigvolfiles > /dev/shm/share/stripe/key - for (( i = 0; i < stripe_size; i++ )); do - slave_rank=$((rank + i)) - echo `get_ip $slave_rank` >> /dev/shm/share/stripe/hosts - done +# the size fo the chunk - not fully implemented yet +# chunk_size = 1048576 - mkdir -p /chirp - $CHIRPROOT/bin/chirp_fuse -a address /chirp - ln -sf /chirp/`get_ip $rank` /dataifs +# the maximum possible number of benefactors in the system +max_num_ben = 4096 - $CHIRPROOT/bin/chirp_server -r /dev/shm/share +# (Optional) Log mode could be : DEBUG, VERBOS, ERROR, FATAL, OFF +log_mode = OFF + +# (Optional) log file name +# if not provide and the log mode is not OFF, the log messages will be sent to stdout +log_file = /home/espinosa/log/manager_$rank.log +EOF + cd /tmp + /home/espinosa/bin/manager 7005 & + #/home/espinosa/bin/manager 7005 > /dev/null 2> /dev/null & + ifs_slave $rank } # function: ifs_slave [head] @@ -73,77 +60,199 @@ # [head] node. ifs_slave(){ - ifs_rank=$1 - # For Chirp, just start the server + ifs_rank=`get_ip $1` + rank=`get_rank` + + sleep 30 # Wait for manager + echo "Rank XX: starting benefactor" + mkdir -p /dev/shm/share - cat > /dev/shm/share/.__acl << EOF -address:192.168.1.* rwlda -address:10.* rwlda -address:127.0.0.1 rwlda + cat > /tmp/benefactor_config.cfg << EOF +# the hostname or the IP address of the manager +manager_name = $ifs_rank + +# manager port number +manager_port = 7005 + +# The path to the local directory where the benefactor will store the chunks +benefactor_path = /dev/shm/share + +# Aggregation type, this could be or +# is a typical setting for general workloads +# is under development +aggregation_type = DISK + +# The donated disk space size in MB +disk_space_size = 512 + +# The donated memory space size in MB, this is effective if aggregation type= MEMORY +memory_space_size = 512 + +# The manager update period in seconds +update_period = 5 + +# (Optional) The local address the benefactor should use - specially in multihomed machines +#benefactor_address = + +# (Optional) Log mode could be : DEBUG, VERBOS, ERROR, FATAL, OFF +log_mode = OFF + +# (Optional) log file name +# if not provide and the log mode is not OFF, the log messages will be sent to stdout +log_file = /home/espinosa/log/benefactor_$rank.log + EOF - - $CHIRPROOT/bin/chirp_server -r /dev/shm/share + cd /tmp + /home/espinosa/bin/benefactor } ifs_mount() { - ifs_rank=$1 - # For Chirp, just start the server - if [ -d /dev/shm/share ]; then - rm -rf /dev/shm/share - fi - mkdir -p /dev/shm/share - cat > /dev/shm/share/.__acl << EOF -address:192.168.1.* rwlda -address:10.* rwlda -address:127.0.0.1 rwlda + ifs_rank=`get_ip $1` + rank=`get_rank` + + sleep 30 # Wait for manager + echo "Rank $rank: Mounting IFS" + + cat > /tmp/flfs_config.cfg << EOF +# the hostname or the IP address of the manager +manager_name = $ifs_rank + + +# manager port number +manager_port = 7005 + + +# execution mode - not fully implemented yet - DEBUG is the only option for now +execution_mode = DEBUG + + +# Naming scheme - to name chunk by sequence number or by hash +# the chunks will be named by sequence number, this is a typical setting for general workloads +# this option is for content addressablity feature +chunk_naming = SEQNUM + + +# Commit scheme - specifies whether to overwrite, non-overwrite or version the previous copy. +# possible values include , , and +# : writing a new file with the same name as an existing file will fail. +# : if a new file is stored in the system with the same file name as an existing file name, +# the new file will overwrite the old file +# : if a new file is stored in the system with the same file name as an existing file name, +# the new file will be store as a new version of the file +commit_scheme = NOOVERWRITE + + +# Number of chunks to reserve in the repository +num_reserve_chunks = 1024 + +# Write Interface type - to select the write interface type, the following are the write interfaces +# : Sliding window write interface, this is the typical setting. +# : Incremental write interface +# : The Complete local write interface +# and use the local disk in the write operation. these two are not extensively tested. +write_interface_type = SLIDINGWINDOWWRITE + + +#the memory space allocated for the buffers in the write operations, in MB, +# effects Sliding window interface only +memory_size = 256 + + +# if is the selected write interface spacifies +# the size of the temporary local files in number of chunks +inc_write_file_size = 64 + + +# if of is the selected write interface spacifies +# the path to the directory where the temporary files will be saved +local_write_directory = /tmp/FLIncW + +# Read Interface type - to select the read interface type, currently is only implemented +# : Fixed Buffer per Request read interface +read_interface_type = FBR + + +# if is the selected read interface specifies +# the number of chunks allocated for every request +fbr_request_buffer_size = 4 + +# Number of threads per write agent ( there is an agent per benefactor ) +num_threads_per_agent = 1 + + +# Cache update period in seconds, if this value is set to 0 then the cache is disabled +cache_update_period = 5 + + +# (Optional) Log mode could be : DEBUG, VERBOS, ERROR, FATAL, OFF +log_mode = OFF + +# (Optional) log file name +# if not provide and the log mode is not OFF, the log messages will be sent to stdout +log_file = /home/espinosa/log/mosastore_$rank.log + EOF - - $CHIRPROOT/bin/chirp_server -r /dev/shm/share & - mkdir /chirp - $CHIRPROOT/bin/chirp_fuse -a address /chirp - # TODO: make a symlink to a slave's proper IFS to - # prevent always recalculating it in higher-level - # scripts - # ln -sf /chirp/`get_ip $ifs_rank`@stripe /ifsmount + mkdir -p /dataifs + cd /tmp + /home/espinosa/bin/mosastore -o direct_io -o sync_read /dataifs & + #/home/espinosa/bin/mosastore -o direct_io -o sync_read /dataifs -d 2> /dev/null > /dev/null & } +# Main +# + +# Check sanity of environment +if [ -z $CIOROOT ]; then + echo "CIOROOT not defined" + exit 1 +fi +if [ -z $CIOARCH ]; then + echo "CIOARCH note defined" + exit 1 +fi + +# BGP specific initialization +LD_LIBRARY_PATH=/lib:/fuse/lib:/fuse/usr/lib +PATH=/fuse/bin:/fuse/usr/bin:$PATH + +# Initiate IP over Torus +start_iptorus + +# Process args PART_SIZE=$8 # also BG_SIZE IFS_NUM=$9 STRIPE_SIZE=${10} +# Compute rank RANK=`get_rank` IFS_GRP=$(( PART_SIZE / IFS_NUM )) IFS_RANK=$(( RANK / IFS_GRP + 1 )) IFS_SLAVE=$(( IFS_RANK + STRIPE_SIZE )) - export IFS_RANK export CHIRP_ADD=`get_ip $IFS_RANK` -# Save information +# Save rank information echo $RANK > /dev/shm/RANK echo $IFS_RANK > /dev/shm/IFS_RANK +echo $IFS_SLAVE > /dev/shm/IFS_SLAVE -# Generate Hash services -DHT=$(( RANK % 128 )) -if [ -f /dev/shm/DHTlist ]; then - rm -f /dev/shm/DHTlist -fi -for (( i = 0; i < $PART_SIZE; i = i + 128 )); do - echo `get_ip $i` >> /dev/shm/DHTlist -done - -#Core MTIO logic -if [ $DHT -eq 0 ]; then - RUBY=/home/espinosa/local/bin/ruby - $RUBY $CIOROOT/libexec/hashserver.rb -elif [ $RANK -eq $IFS_RANK ]; then - ifs_head $STRIPE_SIZE - /home/zzhang/cio/bin/collector.sh -elif [ $RANK -lt $IFS_SLAVE ]; then - ifs_slave $IFS_RANK +#Core CDM logic +if [ $IFS_NUM -ne 0 ]; then + if [ $RANK -eq $IFS_RANK ]; then + ifs_head $STRIPE_SIZE + elif [[ $RANK -lt $IFS_SLAVE && $RANK -gt $IFS_RANK ]]; then + ifs_slave $IFS_RANK + else + if [ $RANK -eq 0 ]; then + ifs_mount $IFS_RANK + $CIOROOT/libexec/falkon/runworker-bgp.sh $1 $2 $3 $4 $5 $6 $7 + fi + fi else + if [ $RANK -eq 0 ]; then ifs_mount $IFS_RANK $CIOROOT/libexec/falkon/runworker-bgp.sh $1 $2 $3 $4 $5 $6 $7 + fi fi # Quick hack Modified: usertools/cio/bin/falkon-start.sh =================================================================== --- usertools/cio/bin/falkon-start.sh 2009-09-22 18:28:15 UTC (rev 3112) +++ usertools/cio/bin/falkon-start.sh 2009-09-25 23:02:36 UTC (rev 3113) @@ -12,4 +12,5 @@ echo "ERROR: CHIRPROOT not defined" exit 1 fi -$CIOROOT/libexec/falkon/falkon-start-$CIOARCH.sh $@ +#$CIOROOT/libexec/falkon/falkon-start-$CIOARCH.sh $@ +$CIOROOT/libexec/falkon/falkon-start-bgp_logging.sh $@ Modified: usertools/cio/bin/swift_bgp.sh =================================================================== --- usertools/cio/bin/swift_bgp.sh 2009-09-22 18:28:15 UTC (rev 3112) +++ usertools/cio/bin/swift_bgp.sh 2009-09-25 23:02:36 UTC (rev 3113) @@ -38,7 +38,7 @@ $workdir - 8 + 2.54 1000 @@ -55,4 +55,4 @@ exit 1 fi -swift & "$INFO" - echo "_____________________________________________________________________________" >& "$INFO" - echo >& "$INFO" - echo " $1" >& "$INFO" - echo "_____________________________________________________________________________" >& "$INFO" - echo >& "$INFO" -} - -info() { - infosection "uname -a" - uname -a 2>&1 >& "$INFO" - infosection "id" - id 2>&1 >& "$INFO" - infosection "env" - env 2>&1 >& "$INFO" - infosection "df" - df 2>&1 >& "$INFO" - infosection "/proc/cpuinfo" - cat /proc/cpuinfo 2>&1 >& "$INFO" - infosection "/proc/meminfo" - cat /proc/meminfo 2>&1 >& "$INFO" - infosection "command line" - echo $COMMANDLINE 2>&1 >& "$INFO" -} - -logstate() { - echo "Progress " `date +"%Y-%m-%d %H:%M:%S"` " $@" >& "$INFO" -} - -log() { - echo "$@" >& "$INFO" -} - -fail() { - EC=$1 - shift - log $@ - info - exit $EC -} - -checkError() { - if [ "$?" != "0" ]; then - fail $@ - fi -} - -checkEmpty() { - if [ "$1" == "" ]; then - shift - fail 254 $@ - fi -} - -getarg() { - NAME=$1 - shift - VALUE="" - SHIFTCOUNT=0 - if [ "$1" == "$NAME" ]; then - shift - let "SHIFTCOUNT=$SHIFTCOUNT+1" - while [ "${1:0:1}" != "-" ] && [ "$#" != "0" ]; do - VALUE="$VALUE $1" - shift - let "SHIFTCOUNT=$SHIFTCOUNT+1" - done - else - fail 254 "Missing $NAME argument" - fi - VALUE="${VALUE:1}" -} - -openinfo() { - exec 3<> $1 - INFO=3 -} - -closeinfo() { - exec 3>&- -} - -cioinput() { - INPUT=$1 - FILEPATH=`dirname $INPUT` - FILENAME=`basename $INPUT` - TYPE=${INPUT%%/*} - echo "INPUT_TYPE: $TYPE" >> /dev/shm/cio - if [ "$TYPE" == "common" ] && [ -e /dev/shm/share/$FILENAME ]; then - echo "cioinput(): link for common input $INPUT" >> /dev/shm/cio - ln -s "/dev/shm/share/$FILENAME" "$DIR/$L" - elif [ "$TYPE" == "_concurrent" ]; then - echo "cioinput(): toruscp for intermediate data $INPUT" >> /dev/shm/cio - echo DIR: `pwd` >> /dev/shm/torus - mkdir -p $DIR/$FILEPATH - echo "cioinput(): $INPUT" >> /dev/shm/cio - /home/zzhang/DHT/bin/DHTcp.sh $INPUT $DIR - else - echo "cioinput(): copy from GPFS $INPUT pwd:`pwd` " >> /dev/shm/cio - cp "$PWD/shared/$L" "$DIR/$L" - fi -} -ciooutput() { - OUTPUT=$1 - FILEPATH=`dirname $OUTPUT` - FILENAME=`basename $OUTPUT` - TYPE=${OUTPUT%%/*} - echo "OUTPUT_TYPE: $TYPE" >> /dev/shm/cio - - if [ "$TYPE" == "_concurrent" ]; then - echo "ciooutput(): write intermediate data $OUTPUT" >> /dev/shm/cio - echo `pwd` >> /dev/shm/cio - /home/zzhang/DHT/bin/DHTregister.sh $OUTPUT $RANK - else - echo "ciooutput(): write regular data $OUTPUT" >> /dev/shm/cio - #dd if="$OUTPUT" of="$WFDIR/shared/$OUTPUT" bs=128k - #echo "$OUTPUT /chirp/multi/${CHIRP_ADD}@stripe/" >> /dev/shm/chirp_add - cp "$OUTPUT" /chirp/multi/${CHIRP_ADD}@stripe/ - fi -} - -extractfile() -{ - ARCHIVE="$1" - START_LOCK=$tmp/mtio-lock-started - END_LOCK=$tmp/mtio-lock-finished - current=`pwd` - cd $tmp - mkdir ${START_LOCK} - - EXIT_CODE=$? - # EXIT_CODE=0 ###### FOR TESTING - forces creation of new dir - - if [ "${EXIT_CODE}" -ne "0" ]; then - echo "waiting for data to be extracted" - ((i = 0)) - while (( i == 0 )) - do - if [ -d "${END_LOCK}" ]; then - ((i = 1)) - fi - - if [ ! -d "${END_LOCK}" ]; then - /bin/sleep 1 - fi - done - else - if [ ! -f "${ARCHIVE}" ]; then - echo "archive doesn't exist... exiting" - rmdir $START_LOCK - exit -2 - fi - - echo "extract the archive" - cd $tmp - echo "extracted" >> /dev/shm/extract - tar xf $ARCHIVE - - EXIT_CODE=$? - - if [ "${EXIT_CODE}" -ne "0" ]; then - echo "Error in untar of ${ARCHIVE} /... exit code ${EXIT_CODE}" - exit ${EXIT_CODE} - rmdir $START_LOCK - fi - - mkdir -p ${END_LOCK} - - EXIT_CODE=$? - if [ "${EXIT_CODE}" -ne "0" ]; then - echo "Error in mkdir ${END_LOCK}... exit code ${EXIT_CODE}" - rmdir $START_LOCK - exit ${EXIT_CODE} - fi - fi - cd $current -} -#/home/zzhang/bashtest/tar.sh -tmp=/dev/shm/share -extractfile common.tar -cd $PWD - -RANK=`echo $CONTROL_INIT | awk -F, '{print $4}'` -echo $@ >> /dev/shm/log -COMMANDLINE=$@ -WFDIR=$PWD -ID=$1 -checkEmpty "$ID" "Missing job ID" - -shift - -getarg "-jobdir" "$@" -JOBDIR=$VALUE -shift $SHIFTCOUNT - -checkEmpty "$JOBDIR" "Missing job directory prefix" -mkdir -p /dev/shm/swift-info/$JOBDIR - -closeinfo -openinfo "/dev/shm/swift-info/$JOBDIR/${ID}-info" -#openinfo "/dev/null" - -logstate "LOG_START" - -getarg "-e" "$@" -EXEC=$VALUE -shift $SHIFTCOUNT - -getarg "-out" "$@" -STDOUT=$VALUE -shift $SHIFTCOUNT - -getarg "-err" "$@" -STDERR=$VALUE -shift $SHIFTCOUNT - -getarg "-i" "$@" -STDIN=$VALUE -shift $SHIFTCOUNT - -getarg "-d" "$@" -DIRS=$VALUE -shift $SHIFTCOUNT - -getarg "-if" "$@" -INF=$VALUE -shift $SHIFTCOUNT - -getarg "-of" "$@" -OUTF=$VALUE -shift $SHIFTCOUNT - -getarg "-k" "$@" -KICKSTART=$VALUE -shift $SHIFTCOUNT - -if [ "$1" == "-a" ]; then - shift -else - fail 254 "Missing arguments (-a option)" -fi - -if [ "X$SWIFT_JOBDIR_PATH" != "X" ]; then - DIR=${SWIFT_JOBDIR_PATH}/$JOBDIR/$ID - COPYNOTLINK=1 -else - DIR=/dev/shm/swift-work/$JOBDIR/$ID - COPYNOTLINK=0 -fi - -PATH=$PATH:/bin:/usr/bin - -if [ "$PATHPREFIX" != "" ]; then -export PATH=$PATHPREFIX:$PATH -fi - -IFS="^" - -logstate "CREATE_JOBDIR" -mkdir -p $DIR - -logstate "CREATE_INPUTDIR" - -for D in $DIRS ; do - mkdir -p "$DIR/$D" - checkError 254 "Failed to create input directory $D" -done - -#cd $DIR -logstate "LINK_INPUTS" -for L in $INF ; do - if [ $COPYNOTLINK = 1 ]; then - cp "$PWD/shared/$L" "$DIR/$L" - checkError 254 "Failed to copy input file $L" - else - cioinput $L - #cp "$PWD/shared/$L" "$DIR/$L" - checkError 254 "Failed to link input file $L `ls -l $DIR/$L`" - fi -done - -logstate "EXECUTE" - -cd $DIR - -if [ "$KICKSTART" == "" ]; then - if [ "$STDIN" == "" ]; then - "$EXEC" "$@" 1>"$STDOUT" 2>"$STDERR" - else - "$EXEC" "$@" 1>"$STDOUT" 2>"$STDERR" <"$STDIN" - fi - checkError $? "Exit code $?" -else - if [ ! -f "$KICKSTART" ]; then - fail 254 "The Kickstart executable ($KICKSTART) was not found" - elif [ ! -x "$KICKSTART" ]; then - fail 254 "The Kickstart executable ($KICKSTART) does not have the executable bit set" - else - mkdir -p $WFDIR/kickstart/$JOBDIR - if [ "$STDIN" == "" ]; then - "$KICKSTART" -H -o "$STDOUT" -e "$STDERR" "$TMPEXEC" "$@" 1>kickstart.xml 2>"$STDERR" - else - "$KICKSTART" -H -o "$STDOUT" -i "$STDIN" -e "$STDERR" "$TMPEXEC" "$@" 1>kickstart.xml 2>"$STDERR" - fi - export APPEXIT=$? - mv -f kickstart.xml "$WFDIR/kickstart/$JOBDIR/$ID-kickstart.xml" 2>&1 >& "$INFO" - checkError 254 "Failed to copy Kickstart record to shared directory" - if [ "$APPEXIT" != "0" ]; then - fail $APPEXIT "Exit code $APPEXIT" - fi - fi -fi - -logstate "EXECUTE_DONE" - -MISSING= -for O in $OUTF ; do - if [ ! -f "$DIR/$O" ]; then - if [ "$MISSING" == "" ]; then - MISSING=$O - else - MISSING="$MISSING, $O" - fi - fi -done -if [ "$MISSING" != "" ]; then - fail 254 "The following output files were not created by the application: $MISSING" -fi - -logstate "COPYING_OUTPUTS" -for O in $OUTF ; do - #cp "$DIR/$O" "$WFDIR/shared/$O" 2>&1 >& "$INFO" - #cp "$DIR/$O" "$WFDIR/shared/$O" - #dd if="$DIR/$O" of="$WFDIR/shared/$JOBDIR/$O" bs=128k - #dd if="$DIR/$O" of="$WFDIR/shared/$O" bs=128k - ciooutput $O - checkError 254 "Failed to copy output file $O to shared directory" -done - -logstate "RM_JOBDIR" - -closeinfo -#rm -f "$WFDIR/info/$JOBDIR/${ID}-info" -#echo "$WFDIR/info/$JOBDIR/${ID}-info" >> /dev/shm/log -#mkdir -p "$WFDIR/info/$JOBDIR/" -#dd if=/dev/shm/swift-info/$JOBDIR/${ID}-info of="$WFDIR/info/$JOBDIR/${ID}-info" bs=128k -#dd if=/dev/shm/swift-info/$JOBDIR/${ID}-info of="/fuse/tmp/${ID}-info" bs=128k Deleted: usertools/cio/libexec/falkon/falkon-start-bgp-logging.sh =================================================================== --- usertools/cio/libexec/falkon/falkon-start-bgp-logging.sh 2009-09-22 18:28:15 UTC (rev 3112) +++ usertools/cio/libexec/falkon/falkon-start-bgp-logging.sh 2009-09-25 23:02:36 UTC (rev 3113) @@ -1,148 +0,0 @@ -#!/bin/bash - -# Patched falkon-start-bgp-ram.sh which logs GPFS IO activity in ZeptoOS -CIOROOT=/home/espinosa/cio - - - if [ -z "$3" ]; then - echo "usage: $0 " - echo "usage: $0 prod 1024 60" - echo "-- or --" - echo "usage: $0 " - echo "usage: $0 prod 1024 60 4" - exit 1 - fi - - -if [ -z "${FALKON_HOME}" ]; then - echo "ERROR: environment variable FALKON_HOME not defined" - exit 1 -fi - - -QUEUE_NAME=$1 -PROFILE_NAME="zeptocn-log" -NUM_NODES=$2 -let NUM_ION=NUM_NODES/64 -MAX_TIME_MIN=$3 -SERVICE_IP="192.168.1.254" -SERVICE_PORT1=55000 -SERVICE_PORT2=55001 -WORKERS_PER_NODE=4 -if [ ! -z $4 ];then - WORKERS_PER_NODE=$4 -fi - - - -cp $CIOROOT/tools/zoid/logging-script.sh ${HOME}/zoid-user-script.sh -chmod +x ${HOME}/zoid-user-script.sh - -FALKON_JOB_ID=`falkon-id-get.sh N/A` -EXIT_CODE=$? - - - if [ "${EXIT_CODE}" -ne "0" ]; then - echo "Error in geting a unique falkon ID.. 'falkon-get-id.sh N/A'" - cqdel ${ZOID_JOB_ID} - exit ${EXIT_CODE} - fi - - - -ZOID_JOB_ID=`cqsub -q ${QUEUE_NAME} -k ${PROFILE_NAME} -C ${HOME} -t ${MAX_TIME_MIN} -n ${NUM_NODES} -e LD_LIBRARY_PATH=/lib:/fuse/lib:/fuse/usr/lib /bgsys/linux/1.2.020080512/bin/bash /fuse/${FALKON_WORKER_HOME}/run.worker-c-bgp.sh ${SERVICE_IP} ${SERVICE_PORT1} ${SERVICE_PORT2} ${WORKERS_PER_NODE} ${USER} ${FALKON_JOB_ID} ${FALKON_ROOT}` - -echo $ZOID_JOB_ID $FALKON_JOB_ID $QUEUE_NAME $NUM_NODES $MAX_TIME_MIN $WORKERS_PER_NODE >>$HOME/.falkonjobs - -EXIT_CODE=$? - - if [ "${EXIT_CODE}" -ne "0" ]; then - echo "Error in submitting job to Cobalt.. 'cqsub -q ${QUEUE_NAME} -k ${PROFILE_NAME} -C ${HOME} -t ${MAX_TIME_MIN} -n ${NUM_NODES} -e LD_LIBRARY_PATH=/lib:/fuse/lib:/fuse/usr/lib /bgsys/linux/1.2.020080512/bin/bash /fuse/${FALKON_WORKER_HOME}/run.worker-c-bgp.sh ${SERVICE_IP} ${SERVICE_PORT1} ${SERVICE_PORT2} ${WORKERS_PER_NODE} ${USER} ${FALKON_JOB_ID} ${FALKON_ROOT}' " - exit ${EXIT_CODE} - fi - -falkon-id-update.sh ${FALKON_JOB_ID} ${ZOID_JOB_ID} - -EXIT_CODE=$? - - if [ "${EXIT_CODE}" -ne "0" ]; then - echo "Error in updating cobalt job info for falkon.. 'falkon-update-id.sh ${ZOID_JOB_ID}'" - cqdel ${ZOID_JOB_ID} - exit ${EXIT_CODE} - fi - - -FALKON_JOBID_HOME=${FALKON_ROOT}/users/${USER}/${FALKON_JOB_ID} - -echo "Submitted job ${ZOID_JOB_ID} to Cobalt, creating the job specific Falkon tree for logs and configuration in ${FALKON_JOBID_HOME}..." - -mkdir -p ${FALKON_JOBID_HOME} - -cp ${FALKON_HOME}/falkon.env.bgp* ${FALKON_JOBID_HOME}/ -cp -r ${FALKON_HOME}/config ${FALKON_JOBID_HOME}/ -cp ${FALKON_JOBID_HOME}/config/Client-service-URIs.config2 ${FALKON_JOBID_HOME}/config/Client-service-URIs.config -mkdir -p ${FALKON_JOBID_HOME}/logs/client ${FALKON_JOBID_HOME}/logs/service ${FALKON_JOBID_HOME}/logs/provisioner ${FALKON_JOBID_HOME}/logs/worker - - -DATE=`date +%s` -echo "$DATE: pre-creating log dirs for Falkon service..." -RACK_START=0 -RACK_END=48 -SEQUENCE_DIR=`seq -w ${RACK_START} ${RACK_END}` -PSET_START=1 -PSET_END=16 -for a in ${SEQUENCE_DIR} -do - for ((b=${PSET_START}; b <= ${PSET_END} ; b++)) # Double parentheses, and "LIMIT" with no "$". - do - DIR_NAME="ion-R${a}-${b}" - mkdir -p ${FALKON_JOBID_HOME}/logs/service/$DIR_NAME - done -done - -for ((b=${PSET_START}; b <= ${PSET_END} ; b++)) # Double parentheses, and "LIMIT" with no "$". -do - DIR_NAME="ion-${b}" - mkdir -p ${FALKON_JOBID_HOME}/logs/service/$DIR_NAME -done - - -DATE=`date +%s` -echo "$DATE: done creating log dirs for Falkon service!" - - - -FALKON_HOME_RAM=/tmp/${USER}/falkon - -ln -s ${FALKON_HOME}/apps ${FALKON_JOBID_HOME}/apps -ln -s ${FALKON_HOME_RAM}/container ${FALKON_JOBID_HOME}/container -ln -s ${FALKON_HOME}/service ${FALKON_JOBID_HOME}/service -ln -s ${FALKON_HOME}/worker ${FALKON_JOBID_HOME}/worker -ln -s ${FALKON_HOME}/AstroPortal ${FALKON_JOBID_HOME}/AstroPortal -ln -s ${FALKON_HOME}/client ${FALKON_JOBID_HOME}/client -ln -s ${FALKON_HOME}/monitor ${FALKON_JOBID_HOME}/monitor -ln -s ${FALKON_HOME}/bin ${FALKON_JOBID_HOME}/bin -ln -s ${FALKON_HOME}/config ${FALKON_JOBID_HOME}/config -ln -s ${FALKON_HOME}/ploticus ${FALKON_JOBID_HOME}/ploticus -ln -s ${FALKON_HOME}/webserver ${FALKON_JOBID_HOME}/webserver -ln -s ${FALKON_HOME}/workloads ${FALKON_JOBID_HOME}/workloads -ln -s ${FALKON_HOME}/id ${FALKON_JOBID_HOME}/id -ln -s ${FALKON_HOME}/apache-ant-1.7.0 ${FALKON_JOBID_HOME}/apache-ant-1.7.0 -ln -s ${FALKON_HOME}/ibm-java2-ppc64-50 ${FALKON_JOBID_HOME}/ibm-java2-ppc64-50 -ln -s ${FALKON_HOME_RAM}/ibm-java2-ppc-50 ${FALKON_JOBID_HOME}/ibm-java2-ppc-50 -ln -s ${FALKON_HOME}/falkon.tgz ${FALKON_JOBID_HOME}/falkon.tgz - - -if [ ! -d "${FALKON_JOBID_HOME}" ]; then - echo "ERROR: invalid path ${FALKON_JOBID_HOME}... exiting" - cqdel ${ZOID_JOB_ID} - exit 1 -fi - -echo "Succesfully submitted the job to Cobalt, and setup job specific Falkon tree!" -echo "To monitor the job status, type 'cqstat | grep ${USER}'; once it is in running state, you can use the Falkon specific command ...." -echo "To submit your Falkon-based workload, type: ....; you can do this any time, the falkon workload will wait for the resources to come online, and will only be submitted when everything is ready; the script is run in the background, so the workload will run even if the ssh session gets disconnected." -echo "" -echo "Remember, your job id is ${ZOID_JOB_ID}, and if you need to look through the logs manually for anything, remember that you can find them at ${HOME}/${ZOID_JOB_ID}.output, ${HOME}/${ZOID_JOB_ID}.error, and ${FALKON_JOBID_HOME}/logs/..." - - Modified: usertools/cio/libexec/falkon/falkon-start-bgp.sh =================================================================== --- usertools/cio/libexec/falkon/falkon-start-bgp.sh 2009-09-22 18:28:15 UTC (rev 3112) +++ usertools/cio/libexec/falkon/falkon-start-bgp.sh 2009-09-25 23:02:36 UTC (rev 3113) @@ -24,7 +24,7 @@ exit 1 fi -PROFILE_NAME="zeptocn-swift" +PROFILE_NAME="zeptoos" QUEUE_NAME=$1 NUM_NODES=$2 let NUM_ION=NUM_NODES/64 @@ -46,7 +46,7 @@ STR=$6 fi -cp $CIOROOT/libexec/zoid/logging-script.sh ${HOME}/zoid-user-script.sh +cp ${FALKON_HOME}/bin/zoid-user-script.sh ${HOME}/zoid-user-script.sh chmod +x ${HOME}/zoid-user-script.sh FALKON_JOB_ID=`falkon-id-get.sh N/A` Copied: usertools/cio/libexec/falkon/falkon-start-bgp_logging.sh (from rev 2853, usertools/cio/libexec/falkon/falkon-start-bgp-logging.sh) =================================================================== --- usertools/cio/libexec/falkon/falkon-start-bgp_logging.sh (rev 0) +++ usertools/cio/libexec/falkon/falkon-start-bgp_logging.sh 2009-09-25 23:02:36 UTC (rev 3113) @@ -0,0 +1,165 @@ +#!/bin/bash + +if [ $# -lt 3 ]; then + cat << EOF +Usage: $0 +Example: $0 prod 1024 60 +--or-- +Usage: $0 \ + +EOF + exit 1 +fi + +if [ -z $FALKON_HOME ]; then + echo "ERROR: environment variable FALKON_HOME not defined" + exit 1 +fi +if [ -z $CIOROOT ]; then + echo "ERROR: CIOROOT env not defined" + exit 1 +fi +if [[ $CIOARCH != "bgp" ]]; then + echo "ERROR: Wrong architecture. Must be bgp" + exit 1 +fi + +PROFILE_NAME="zeptoos" +QUEUE_NAME=$1 +NUM_NODES=$2 +let NUM_ION=NUM_NODES/64 +MAX_TIME_MIN=$3 +SERVICE_IP="192.168.1.254" +SERVICE_PORT1=55000 +SERVICE_PORT2=55001 + +WORKERS_PER_NODE=4 +DATA_NUM=1 +STR=1 +if [ -n "$4" ];then + WORKERS_PER_NODE=$4 +fi +if [ -n "$5" ];then + DATA_NUM=$5 +fi +if [ -n "$6" ];then + STR=$6 +fi + +cp $CIOROOT/libexec/zoid/logging-script.sh ${HOME}/zoid-user-script.sh +chmod +x ${HOME}/zoid-user-script.sh + +FALKON_JOB_ID=`falkon-id-get.sh N/A` +EXIT_CODE=$? + +if [ "${EXIT_CODE}" -ne "0" ]; then + echo "Error in geting a unique falkon ID.. 'falkon-get-id.sh N/A'" + cqdel ${ZOID_JOB_ID} + exit ${EXIT_CODE} +fi + + + +WORKER_SCRIPT=$CIOROOT/bin/ciologic-$CIOARCH.sh +SUBMIT_CMD="cqsub -q ${QUEUE_NAME} -k ${PROFILE_NAME} -C ${HOME} -t \ + ${MAX_TIME_MIN} -n ${NUM_NODES} \ + -e LD_LIBRARY_PATH=/lib:/fuse/lib:/fuse/usr/lib:CIOROOT=$CIOROOT:CIOARCH=$CIOARCH:CHIRPROOT=$CHIRPROOT \ + /bgsys/linux/1.2.020080512/bin/bash $WORKER_SCRIPT \ + ${SERVICE_IP} ${SERVICE_PORT1} ${SERVICE_PORT2} ${WORKERS_PER_NODE} \ + ${USER} ${FALKON_JOB_ID} ${FALKON_ROOT} \ + $NUM_NODES $DATA_NUM $STR" # MTIO parameters + +ZOID_JOB_ID=`$SUBMIT_CMD` +EXIT_CODE=$? + +if [ "${EXIT_CODE}" -ne "0" ]; then + echo "Error in submitting job to Cobalt.. $SUBMIT_CMD" + exit ${EXIT_CODE} +fi + +falkon-id-update.sh ${FALKON_JOB_ID} ${ZOID_JOB_ID} + +EXIT_CODE=$? + +if [ "${EXIT_CODE}" -ne "0" ]; then + echo "Error in updating cobalt job info for falkon.. 'falkon-update-id.sh ${ZOID_JOB_ID}'" + cqdel ${ZOID_JOB_ID} + exit ${EXIT_CODE} +fi + + +FALKON_JOBID_HOME=${FALKON_ROOT}/users/${USER}/${FALKON_JOB_ID} + +echo "Submitted job ${ZOID_JOB_ID} to Cobalt, creating the job specific Falkon tree for logs and configuration in ${FALKON_JOBID_HOME}..." + +mkdir -p ${FALKON_JOBID_HOME} + +cp ${FALKON_HOME}/falkon.env.bgp* ${FALKON_JOBID_HOME}/ +cp -r ${FALKON_HOME}/config ${FALKON_JOBID_HOME}/ +cp ${FALKON_JOBID_HOME}/config/Client-service-URIs.config2 ${FALKON_JOBID_HOME}/config/Client-service-URIs.config +mkdir -p ${FALKON_JOBID_HOME}/logs/client ${FALKON_JOBID_HOME}/logs/service ${FALKON_JOBID_HOME}/logs/provisioner ${FALKON_JOBID_HOME}/logs/worker + + +DATE=`date +%s` +echo "$DATE: pre-creating log dirs for Falkon service..." +RACK_START=0 +RACK_END=48 +SEQUENCE_DIR=`seq -w ${RACK_START} ${RACK_END}` +PSET_START=1 +PSET_END=16 +for a in ${SEQUENCE_DIR} +do + for ((b=${PSET_START}; b <= ${PSET_END} ; b++)) # Double parentheses, and "LIMIT" with no "$". + do + DIR_NAME="ion-R${a}-${b}" + mkdir -p ${FALKON_JOBID_HOME}/logs/service/$DIR_NAME + done +done + +for ((b=${PSET_START}; b <= ${PSET_END} ; b++)) # Double parentheses, and "LIMIT" with no "$". +do + DIR_NAME="ion-${b}" + mkdir -p ${FALKON_JOBID_HOME}/logs/service/$DIR_NAME +done + + +DATE=`date +%s` +echo "$DATE: done creating log dirs for Falkon service!" + +FALKON_HOME_RAM=/tmp/${USER}/falkon + +ln -s ${FALKON_HOME}/apps ${FALKON_JOBID_HOME}/apps +ln -s ${FALKON_HOME_RAM}/container ${FALKON_JOBID_HOME}/container +ln -s ${FALKON_HOME}/service ${FALKON_JOBID_HOME}/service +ln -s ${FALKON_HOME}/worker ${FALKON_JOBID_HOME}/worker +ln -s ${FALKON_HOME}/AstroPortal ${FALKON_JOBID_HOME}/AstroPortal +ln -s ${FALKON_HOME}/client ${FALKON_JOBID_HOME}/client +ln -s ${FALKON_HOME}/monitor ${FALKON_JOBID_HOME}/monitor +ln -s ${FALKON_HOME}/bin ${FALKON_JOBID_HOME}/bin +ln -s ${FALKON_HOME}/config ${FALKON_JOBID_HOME}/config +ln -s ${FALKON_HOME}/ploticus ${FALKON_JOBID_HOME}/ploticus +ln -s ${FALKON_HOME}/webserver ${FALKON_JOBID_HOME}/webserver +ln -s ${FALKON_HOME}/workloads ${FALKON_JOBID_HOME}/workloads +ln -s ${FALKON_HOME}/id ${FALKON_JOBID_HOME}/id +ln -s ${FALKON_HOME}/apache-ant-1.7.0 ${FALKON_JOBID_HOME}/apache-ant-1.7.0 +ln -s ${FALKON_HOME}/ibm-java2-ppc64-50 ${FALKON_JOBID_HOME}/ibm-java2-ppc64-50 +ln -s ${FALKON_HOME_RAM}/ibm-java2-ppc-50 ${FALKON_JOBID_HOME}/ibm-java2-ppc-50 +ln -s ${FALKON_HOME}/falkon.tgz ${FALKON_JOBID_HOME}/falkon.tgz + + +if [ ! -d "${FALKON_JOBID_HOME}" ]; then + echo "ERROR: invalid path ${FALKON_JOBID_HOME}... exiting" + cqdel ${ZOID_JOB_ID} + exit 1 +fi + +echo $ZOID_JOB_ID $FALKON_JOB_ID $QUEUE_NAME \ + $NUM_NODES $MAX_TIME_MIN $WORKERS_PER_NODE >>$HOME/.falkonjobs + +echo "Succesfully submitted the job to Cobalt, and setup job specific Falkon tree!" +echo "To monitor the job status, type 'cqstat | grep ${USER}'; once it is in running state, you can use the Falkon specific command ...." +echo "To submit your Falkon-based workload, type: ....; you can do this any time, the falkon workload will wait for the resources to come online, and will only be submitted when everything is ready; the script is run in the background, so the workload will run even if the ssh session gets disconnected." +echo "" +echo "Remember, your job id is ${ZOID_JOB_ID}, and if you need to look through the logs manually for anything, remember that you can find them at ${HOME}/${ZOID_JOB_ID}.output, ${HOME}/${ZOID_JOB_ID}.error, and ${FALKON_JOBID_HOME}/logs/..." + + From noreply at svn.ci.uchicago.edu Tue Sep 29 12:42:30 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Tue, 29 Sep 2009 12:42:30 -0500 (CDT) Subject: [Swift-commit] r3114 - SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations Message-ID: <20090929174230.414A89CC8A@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-29 12:42:29 -0500 (Tue, 29 Sep 2009) New Revision: 3114 Modified: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/clustermassPerms.swift Log: production version Modified: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/clustermassPerms.swift =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/clustermassPerms.swift 2009-09-25 23:02:36 UTC (rev 3113) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/clustermassPerms.swift 2009-09-29 17:42:29 UTC (rev 3114) @@ -41,8 +41,8 @@ string h = "lh"; float vertexThresh[] = [3.84]; float rmm_vals[] = [3.1]; -#int permbrains[] = [1:1000:1]; -int permbrains[] = [1]; +int permbrains[] = [1:1000:1]; +#int permbrains[] = [1]; foreach id in idss{ foreach perm in permbrains{ From noreply at svn.ci.uchicago.edu Tue Sep 29 12:43:18 2009 From: noreply at svn.ci.uchicago.edu (noreply at svn.ci.uchicago.edu) Date: Tue, 29 Sep 2009 12:43:18 -0500 (CDT) Subject: [Swift-commit] r3115 - SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/scripts Message-ID: <20090929174318.66B5A9CC8A@vm-125-59.ci.uchicago.edu> Author: andric Date: 2009-09-29 12:43:18 -0500 (Tue, 29 Sep 2009) New Revision: 3115 Modified: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/scripts/generatePerm.R Log: sampling from chi sq distribution Modified: SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/scripts/generatePerm.R =================================================================== --- SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/scripts/generatePerm.R 2009-09-29 17:42:29 UTC (rev 3114) +++ SwiftApps/SIDGrid/swift/projects/andric/peakfit_pilots/PK2/turnpointAnalysis/permutations/scripts/generatePerm.R 2009-09-29 17:43:18 UTC (rev 3115) @@ -3,5 +3,5 @@ print(allinputs) outname <- noquote(strsplit(allinputs," ")[[1]][1]) print(outname) -brain <- rnorm(130934,mean=0.78121,sd=1.092181) +brain <- rchisq(130934,df=1) write.table(round(brain,3),outname,row.names=F,col.names=F,quote=F)