nFactors/0000755000176200001440000000000013637562616012050 5ustar liggesusersnFactors/NAMESPACE0000644000176200001440000000276413637464436013301 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method(boxplot,structureSim) S3method(plot,nScree) S3method(plot,structureSim) S3method(print,nFactors) S3method(print,nScree) S3method(print,structureSim) S3method(summary,nFactors) S3method(summary,nScree) S3method(summary,structureSim) export(bentlerParameters) export(componentAxis) export(corFA) export(diagReplace) export(eigenBootParallel) export(eigenComputes) export(eigenFrom) export(generateStructure) export(is.nFactors) export(is.nScree) export(is.structureSim) export(iterativePrincipalAxis) export(makeCor) export(moreStats) export(nBartlett) export(nBentler) export(nCng) export(nMreg) export(nScree) export(nSeScree) export(parallel) export(plotParallel) export(plotnScree) export(plotuScree) export(principalAxis) export(principalComponents) export(rRecovery) export(structureSim) export(studySim) import(lattice) importFrom(MASS,ginv) importFrom(MASS,mvrnorm) importFrom(graphics,abline) importFrom(graphics,boxplot) importFrom(graphics,lines) importFrom(graphics,par) importFrom(graphics,plot) importFrom(graphics,plot.default) importFrom(graphics,text) importFrom(psych,sim.structure) importFrom(stats,coef) importFrom(stats,cor) importFrom(stats,cov) importFrom(stats,cov2cor) importFrom(stats,dnorm) importFrom(stats,factanal) importFrom(stats,lm) importFrom(stats,median) importFrom(stats,nlminb) importFrom(stats,pchisq) importFrom(stats,pt) importFrom(stats,qnorm) importFrom(stats,sd) nFactors/data/0000755000176200001440000000000013636677340012761 5ustar liggesusersnFactors/data/dFactors.rda0000644000176200001440000000203513634757216015215 0ustar liggesusersVkLTG}B,i,Vc4Ƙ֜J}DEbㅽ(>ckk?LhC"JĀ ,JP4@ >V̜u$d̜}gs?fs $9S uD$A]?)Mvg(gI?_\EKs?e bɋBik &G#o[)Qu$jսY)[b)ɗW{o8W4eB+'RKc{`n)c/6CH\ O706An{542'Zা8"3PZ7$ű =g_HMgqW؇V-[ŖtKΉu h~ ~ I6]*BƽCA‘'ܕnwOvT0_t{-kЮ7ρrC?cܣlI>o\#Ľ4^$5a}s`߹, Cliff datasets: Cliff (1970, p. 165) Raiche dataset: Raiche, Langevin, Riopel and Mauffette (2006) Raiche dataset: Raiche, Riopel and Blais (2006, p. 9) Tucker datasets: Tucker \emph{et al}. (1969, p. 442) } \usage{ dFactors } \description{ Classical examples of eigenvalues vectors used to study the number of factors to retain in the litterature. These examples generally give the number of subjects use to obtain these eigenvalues. The number of subjects is used with the parallel analysis. } \details{ Other datasets will be added in future versions of the package. } \examples{ # EXAMPLES FROM DATASET data(dFactors) # COMMAND TO VISUALIZE THE CONTENT AND ATTRIBUTES OF THE DATASETS names(dFactors) attributes(dFactors) dFactors$Cliff1$eigenvalues dFactors$Cliff1$nsubjects # SCREE PLOT OF THE Cliff1 DATASET plotuScree(dFactors$Cliff1$eigenvalues) } \references{ Bartholomew, D. J., Steele, F., Moustaki, I. and Galbraith, J. I. (2002). \emph{The analysis and interpretation of multivariate data for social scientists}. Boca Raton, FL: Chapman and Hall. Bentler, P. M. and Yuan, K.-H. (1998). Tests for linear trend in the smallest eigenvalues of the correlation matrix. \emph{Psychometrika, 63}(2), 131-144. Buja, A. and Eyuboglu, N. (1992). Remarks on parallel analysis. \emph{Multivariate Behavioral Research, 27}(4), 509-540. Cliff, N. (1970). The relation between sample and population characteristic vectors. \emph{Psychometrika, 35}(2), 163-178. Hand, D. J., Daly, F., Lunn, A. D., McConway, K. J. and Ostrowski, E. (1994). \emph{A handbook of small data sets}. Boca Raton, FL: Chapman and Hall. Lawley, D. N. and Maxwell, A. E. (1971). \emph{Factor analysis as a statistical method} (2nd edition). London: Butterworth. Raiche, G., Langevin, L., Riopel, M. and Mauffette, Y. (2006). Etude exploratoire de la dimensionnalite et des facteurs expliques par une traduction francaise de l'Inventaire des approches d'enseignement de Trigwell et Prosser dans trois universite quebecoises. \emph{Mesure et Evaluation en Education, 29}(2), 41-61. Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. Tucker, L. D., Koopman, R. F. and Linn, R. L. (1969). Evaluation of factor analytic research procedures by mean of simulated correlation matrices. \emph{Psychometrika, 34}(4), 421-459. Zoski, K. and Jurs, S. (1993). Using multiple regression to determine the number of factors to retain in factor analysis. \emph{Multiple Linear Regression Viewpoint, 20}(1), 5-9. } \keyword{datasets} nFactors/man/plotnScree.Rd0000644000176200001440000000476013635511660015226 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plotnScree.R \name{plotnScree} \alias{plotnScree} \title{Scree Plot According to a nScree Object Class} \usage{ plotnScree(nScree, legend = TRUE, ylab = "Eigenvalues", xlab = "Components", main = "Non Graphical Solutions to Scree Test") } \arguments{ \item{nScree}{Results of a previous \code{nScree} analysis} \item{legend}{Logical indicator of the presence or not of a legend} \item{ylab}{Label of the y axis (default to \code{"Eigenvalue"})} \item{xlab}{Label of the x axis (default to \code{"Component"})} \item{main}{Main title (default to \code{"Non Graphical Solutions to the Scree Test"})} } \value{ Nothing returned. } \description{ Plot a scree plot adding information about a non graphical \code{nScree} analysis. } \examples{ ## INITIALISATION data(dFactors) # Load the nFactors dataset attach(dFactors) vect <- Raiche # Use the second example from Buja and Eyuboglu # (1992, p. 519, nsubjects not specified by them) eigenvalues <- vect$eigenvalues # Extract the observed eigenvalues nsubjects <- vect$nsubjects # Extract the number of subjects variables <- length(eigenvalues) # Compute the number of variables rep <- 100 # Number of replications for the parallel analysis cent <- 0.95 # Centile value of the parallel analysis ## PARALLEL ANALYSIS (qevpea for the centile criterion, mevpea for the mean criterion) aparallel <- parallel(var = variables, subject = nsubjects, rep = rep, cent = cent)$eigen$qevpea # The 95 centile ## NOMBER OF FACTORS RETAINED ACCORDING TO DIFFERENT RULES results <- nScree(eig = eigenvalues, aparallel = aparallel ) results ## PLOT ACCORDING TO THE nScree CLASS plotnScree(results) } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotParallel}}, \code{\link{parallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{Graphics} nFactors/man/bentlerParameters.Rd0000644000176200001440000001643013620574733016570 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bentlerParameters.r \name{bentlerParameters} \alias{bentlerParameters} \title{Bentler and Yuan's Computation of the LRT Index and the Linear Trend Coefficients} \usage{ bentlerParameters(x, N, nFactors, log = TRUE, cor = TRUE, minPar = c(min(lambda) - abs(min(lambda)) + 0.001, 0.001), maxPar = c(max(lambda), lm(lambda ~ I(length(lambda):1))$coef[2]), resParx = c(0.01, 2), resPary = c(0.01, 2), graphic = TRUE, resolution = 30, typePlot = "wireframe", ...) } \arguments{ \item{x}{numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data} \item{N}{numeric: number of subjects.} \item{nFactors}{numeric: number of components to test.} \item{log}{logical: if \code{TRUE} the minimization is applied on the log values.} \item{cor}{logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix} \item{minPar}{numeric: minimums for the coefficient of the linear trend.} \item{maxPar}{numeric: maximums for the coefficient of the linear trend.} \item{resParx}{numeric: restriction on the \eqn{\alpha} coefficient (x) to graph the function to minimize.} \item{resPary}{numeric: restriction on the \eqn{\beta} coefficient (y) to graph the function to minimize.} \item{graphic}{logical: if \code{TRUE} plots the minimized function \code{"wireframe"}, \code{"contourplot"} or \code{"levelplot"}.} \item{resolution}{numeric: resolution of the 3D graph (number of points from \eqn{\alpha} and from \eqn{\beta}).} \item{typePlot}{character: plots the minimized function according to a 3D plot: \code{"wireframe"}, \code{"contourplot"} or \code{"levelplot"}.} \item{...}{variable: additionnal parameters from the \code{"wireframe"}, \code{"contourplot"} or \code{"levelplot"} \code{lattice} functions. Also additionnal parameters for the \code{eigenFrom} function.} } \value{ \item{nFactors}{ numeric: vector of the number of factors retained by the Bentler and Yuan's procedure. } \item{details}{ numeric: matrix of the details of the computation.} } \description{ This function computes the Bentler and Yuan's (1996, 1998) \emph{LRT} index for the linear trend in eigenvalues of a covariance matrix. The related \eqn{\chi^2} and \emph{p}-value are also computed. This function is generally called from the \code{nBentler} function. But it could be of use for graphing the linear trend function and to study it's behavior. } \details{ The implemented Bentler and Yuan's procedure must be used with care because the minimized function is not always stable. In many cases, constraints must applied to obtain a solution. The actual implementation did, but the user can modify these constraints. The hypothesis tested (Bentler and Yuan, 1996, equation 10) is: \cr \cr (1) \eqn{\qquad \qquad H_k: \lambda_{k+i} = \alpha + \beta x_i, (i = 1, \ldots, q)} \cr The solution of the following simultaneous equations is needed to find \eqn{(\alpha, \beta) \in} \cr (2) \eqn{\qquad \qquad f(x) = \sum_{i=1}^q \frac{ [ \lambda_{k+j} - N \alpha + \beta x_j ] x_j}{(\alpha + \beta x_j)^2} = 0} \cr \cr and \eqn{\qquad \qquad g(x) = \sum_{i=1}^q \frac{ \lambda_{k+j} - N \alpha + \beta x_j x_j}{(\alpha + \beta x_j)^2} = 0} \cr The solution to this system of equations was implemented by minimizing the following equation: \cr (3) \eqn{\qquad \qquad (\alpha, \beta) \in \inf{[h(x)]} = \inf{\log{[f(x)^2 + g(x)^2}}]} \cr The likelihood ratio test \eqn{LRT} proposed by Bentler and Yuan (1996, equation 7) follows a \eqn{\chi^2} probability distribution with \eqn{q-2} degrees of freedom and is equal to: \cr (4) \eqn{\qquad \qquad LRT = N(k - p)\left\{ {\ln \left( {{n \over N}} \right) + 1} \right\} - N\sum\limits_{j = k + 1}^p {\ln \left\{ {{{\lambda _j } \over {\alpha + \beta x_j }}} \right\}} + n\sum\limits_{j = k + 1}^p {\left\{ {{{\lambda _j } \over {\alpha + \beta x_j }}} \right\}} } \cr With \eqn{p} beeing the number of eigenvalues, \eqn{k} the number of eigenvalues to test, \eqn{q} the \eqn{p-k} remaining eigenvalues, \eqn{N} the sample size, and \eqn{n = N-1}. Note that there is an error in the Bentler and Yuan equation, the variables \eqn{N} and \eqn{n} beeing inverted in the preceeding equation 4. A better strategy proposed by Bentler an Yuan (1998) is to use a minimized \eqn{\chi^2} solution. This strategy will be implemented in a future version of the \pkg{nFactors} package. } \examples{ ## ................................................ ## SIMPLE EXAMPLE OF THE BENTLER AND YUAN PROCEDURE # Bentler (1996, p. 309) Table 2 - Example 2 ............. n=649 bentler2<-c(5.785, 3.088, 1.505, 0.582, 0.424, 0.386, 0.360, 0.337, 0.303, 0.281, 0.246, 0.238, 0.200, 0.160, 0.130) results <- nBentler(x=bentler2, N=n, details=TRUE) results # Two different figures to verify the convergence problem identified with # the 2th component bentlerParameters(x=bentler2, N=n, nFactors= 2, graphic=TRUE, typePlot="contourplot", resParx=c(0,9), resPary=c(0,9), cor=FALSE) bentlerParameters(x=bentler2, N=n, nFactors= 4, graphic=TRUE, drape=TRUE, resParx=c(0,9), resPary=c(0,9), scales = list(arrows = FALSE) ) plotuScree(x=bentler2, model="components", main=paste(results$nFactors, " factors retained by the Bentler and Yuan's procedure (1996, p. 309)", sep="")) # ........................................................ # Bentler (1998, p. 140) Table 3 - Example 1 ............. n <- 145 example1 <- c(8.135, 2.096, 1.693, 1.502, 1.025, 0.943, 0.901, 0.816, 0.790,0.707, 0.639, 0.543,0.533, 0.509, 0.478, 0.390, 0.382, 0.340, 0.334, 0.316, 0.297,0.268, 0.190, 0.173) results <- nBentler(x=example1, N=n, details=TRUE) results # Two different figures to verify the convergence problem identified with # the 10th component bentlerParameters(x=example1, N=n, nFactors= 10, graphic=TRUE, typePlot="contourplot", resParx=c(0,0.4), resPary=c(0,0.4)) bentlerParameters(x=example1, N=n, nFactors= 10, graphic=TRUE, drape=TRUE, resParx=c(0,0.4), resPary=c(0,0.4), scales = list(arrows = FALSE) ) plotuScree(x=example1, model="components", main=paste(results$nFactors, " factors retained by the Bentler and Yuan's procedure (1998, p. 140)", sep="")) # ........................................................ } \references{ Bentler, P. M. and Yuan, K.-H. (1996). Test of linear trend in eigenvalues of a covariance matrix with application to data analysis. \emph{British Journal of Mathematical and Statistical Psychology, 49}, 299-312. Bentler, P. M. and Yuan, K.-H. (1998). Test of linear trend in the smallest eigenvalues of the correlation matrix. \emph{Psychometrika, 63}(2), 131-144. } \seealso{ \code{\link{nBartlett}}, \code{\link{nBentler}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege \cr \email{David.Magis@ulg.ac.be} } \keyword{multivariate} nFactors/man/iterativePrincipalAxis.Rd0000644000176200001440000000772013620574733017576 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/iterativePrincipalAxis.r \name{iterativePrincipalAxis} \alias{iterativePrincipalAxis} \title{Iterative Principal Axis Analysis} \usage{ iterativePrincipalAxis(R, nFactors = 2, communalities = "component", iterations = 20, tolerance = 0.001) } \arguments{ \item{R}{numeric: correlation or covariance matrix} \item{nFactors}{numeric: number of factors to retain} \item{communalities}{character: initial values for communalities (\code{"component", "maxr", "ginv" or "multiple"})} \item{iterations}{numeric: maximum number of iterations to obtain a solution} \item{tolerance}{numeric: minimal difference in the estimated communalities after a given iteration} } \value{ values numeric: variance of each component varExplained numeric: variance explained by each component varExplained numeric: cumulative variance explained by each component loadings numeric: loadings of each variable on each component iterations numeric: maximum number of iterations to obtain a solution tolerance numeric: minimal difference in the estimated communalities after a given iteration } \description{ The \code{iterativePrincipalAxis} function returns a principal axis analysis with iterated communality estimates. Four different choices of initial communality estimates are given: maximum correlation, multiple correlation (usual and generalized inverse) or estimates based on the sum of the squared principal component analysis loadings. Generally, statistical packages initialize the communalities at the multiple correlation value. Unfortunately, this strategy cannot always deal with singular correlation or covariance matrices. If a generalized inverse, the maximum correlation or the estimated communalities based on the sum of loadings are used instead, then a solution can be computed. } \examples{ ## ................................................ # Example from Kim and Mueller (1978, p. 10) # Population: upper diagonal # Simulated sample: lower diagnonal R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, .5600, 1.000, .4749, .2196, .1912, .2979, .4800, .4200, 1.000, .2079, .2010, .2445, .2240, .1960, .1680, 1.000, .4334, .3197, .1920, .1680, .1440, .4200, 1.000, .4207, .1600, .1400, .1200, .3500, .3000, 1.000), nrow=6, byrow=TRUE) # Factor analysis: Principal axis factoring with iterated communalities # Kim and Mueller (1978, p. 23) # Replace upper diagonal with lower diagonal RU <- diagReplace(R, upper=TRUE) nFactors <- 2 fComponent <- iterativePrincipalAxis(RU, nFactors=nFactors, communalities="component") fComponent rRecovery(RU,fComponent$loadings, diagCommunalities=FALSE) fMaxr <- iterativePrincipalAxis(RU, nFactors=nFactors, communalities="maxr") fMaxr rRecovery(RU,fMaxr$loadings, diagCommunalities=FALSE) fMultiple <- iterativePrincipalAxis(RU, nFactors=nFactors, communalities="multiple") fMultiple rRecovery(RU,fMultiple$loadings, diagCommunalities=FALSE) # ....................................................... } \references{ Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to factor analysis. What it is and how to do it}. Beverly Hills, CA: Sage. Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical methods and practical issues}. Beverly Hills, CA: Sage. } \seealso{ \code{\link{componentAxis}}, \code{\link{principalAxis}}, \code{\link{rRecovery}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege \cr \email{David.Magis@ulg.ac.be} } \keyword{multivariate} nFactors/man/eigenBootParallel.Rd0000644000176200001440000000622313620574733016500 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/eigenBootParallel.r \name{eigenBootParallel} \alias{eigenBootParallel} \title{Bootstrapping of the Eigenvalues From a Data Frame} \usage{ eigenBootParallel(x, quantile = 0.95, nboot = 30, option = "permutation", cor = TRUE, model = "components", ...) } \arguments{ \item{x}{data.frame: data from which a correlation matrix will be obtained} \item{quantile}{numeric: eigenvalues quantile to be reported} \item{nboot}{numeric: number of bootstrap samples} \item{option}{character: \code{"permutation"} or \code{"bootstrap"}} \item{cor}{logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix (\code{eigenComputes})} \item{model}{character: bootstraps from a principal component analysis (\code{"components"}) or from a factor analysis (\code{"factors"})} \item{...}{variable: additionnal parameters to give to the \code{cor} or \code{cov} functions} } \value{ \item{values}{ data.frame: mean, median, quantile, standard deviation, minimum and maximum of bootstrapped eigenvalues } } \description{ The \code{eigenBootParallel} function samples observations from a \code{data.frame} to produce correlation or covariance matrices from which eigenvalues are computed. The function returns statistics about these bootstrapped eigenvalues. Their means or their quantile could be used later to replace the eigenvalues inputted to a parallel analysis. The \code{eigenBootParallel} can also compute random eigenvalues from empirical data by column permutation (Buja and Eyuboglu, 1992). } \examples{ # ....................................................... # Example from the iris data eigenvalues <- eigenComputes(x=iris[,-5]) # Permutation parallel analysis distribution aparallel <- eigenBootParallel(x=iris[,-5], quantile=0.95)$quantile # Number of components to retain results <- nScree(x = eigenvalues, aparallel = aparallel) results$Components plotnScree(results) # ...................................................... # ...................................................... # Bootstrap distributions study of the eigenvalues from iris data # with different correlation methods eigenBootParallel(x=iris[,-5],quantile=0.05, option="bootstrap",method="pearson") eigenBootParallel(x=iris[,-5],quantile=0.05, option="bootstrap",method="spearman") eigenBootParallel(x=iris[,-5],quantile=0.05, option="bootstrap",method="kendall") } \references{ Buja, A. and Eyuboglu, N. (1992). Remarks on parallel analysis. \emph{Multivariate Behavioral Research, 27}(4), 509-540. Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules for determining the number of components to retain. \emph{Psychological bulletin, 99}, 432-442. } \seealso{ \code{\link{principalComponents}}, \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/plotuScree.Rd0000644000176200001440000000314513620574734015236 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plotuScree.R \name{plotuScree} \alias{plotuScree} \title{Plot of the Usual Cattell's Scree Test} \usage{ plotuScree(Eigenvalue, x = Eigenvalue, model = "components", ylab = "Eigenvalues", xlab = "Components", main = "Scree Plot", ...) } \arguments{ \item{Eigenvalue}{depreciated parameter: eigenvalues to analyse (not used if x is used, recommended)} \item{x}{numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data} \item{model}{character: \code{"components"} or \code{"factors"}} \item{ylab}{character: label of the y axis (default is \code{Eigenvalue})} \item{xlab}{character: label of the x axis (default is \code{Component})} \item{main}{character: title of the plot (default is \code{Scree Plot})} \item{...}{variable: additionnal parameters to give to the \code{eigenComputes} function} } \value{ Nothing returned with this function. } \description{ \code{uScree} plot a usual scree test of the eigenvalues of a correlation matrix. } \examples{ ## SCREE PLOT data(dFactors) attach(dFactors) eig = Cliff1$eigenvalues plotuScree(x=eig) } \references{ Cattell, R. B. (1966). The scree test for the number of factors. \emph{Multivariate Behavioral Research, 1}, 245-276. } \seealso{ \code{\link{nScree}}, \code{\link{parallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{Graphics} nFactors/man/structureSim.Rd0000644000176200001440000000773113620574734015627 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/structureSim.r \name{structureSim} \alias{structureSim} \title{Population or Simulated Sample Correlation Matrix from a Given Factor Structure Matrix} \usage{ structureSim(fload, reppar = 30, repsim = 100, N, quantile = 0.95, model = "components", adequacy = FALSE, details = TRUE, r2limen = 0.75, all = FALSE) } \arguments{ \item{fload}{matrix: loadings of the factor structure} \item{reppar}{numeric: number of replications for the parallel analysis} \item{repsim}{numeric: number of replications of the matrix correlation simulation} \item{N}{numeric: number of subjects} \item{quantile}{numeric: quantile for the parallel analysis} \item{model}{character: \code{"components"} or \code{"factors"}} \item{adequacy}{logical: if \code{TRUE} prints the recovered population matrix from the factor structure} \item{details}{logical: if \code{TRUE} outputs details of the \code{repsim} simulations} \item{r2limen}{numeric: R2 limen value for the R2 Nelson index} \item{all}{logical: if \code{TRUE} computes the Bentler and Yuan index (very long computing time to consider)} } \value{ \item{values}{ the output depends of the logical value of details. If \code{FALSE}, returns only statistics about the eigenvalues: mean, median, quantile, standard deviation, minimum and maximum. If \code{TRUE}, returns also details about the \code{repsim} simulations. If \code{adequacy} = \code{TRUE} returns the recovered factor structure} } \description{ The \code{structureSim} function returns a population and a sample correlation matrices from a predefined congeneric factor structure. } \examples{ \dontrun{ # ....................................................... # Example inspired from Zwick and Velicer (1986, table 2, p. 437) ## ................................................................... nFactors <- 3 unique <- 0.2 loadings <- 0.5 nsubjects <- 180 repsim <- 30 zwick <- generateStructure(var=36, mjc=nFactors, pmjc=12, loadings=loadings, unique=unique) ## ................................................................... # Produce statistics about a replication of a parallel analysis on # 30 sampled correlation matrices mzwick.fa <- structureSim(fload=as.matrix(zwick), reppar=30, repsim=repsim, N=nsubjects, quantile=0.5, model="factors") mzwick <- structureSim(fload=as.matrix(zwick), reppar=30, repsim=repsim, N=nsubjects, quantile=0.5, all=TRUE) # Very long execution time that could be used only with model="components" # mzwick <- structureSim(fload=as.matrix(zwick), reppar=30, # repsim=repsim, N=nsubjects, quantile=0.5, all=TRUE) par(mfrow=c(2,1)) plot(x=mzwick, nFactors=nFactors, index=c(1:14), cex.axis=0.7, col="red") plot(x=mzwick.fa, nFactors=nFactors, index=c(1:11), cex.axis=0.7, col="red") par(mfrow=c(1,1)) par(mfrow=c(2,1)) boxplot(x=mzwick, nFactors=3, cex.axis=0.8, vLine="blue", col="red") boxplot(x=mzwick.fa, nFactors=3, cex.axis=0.8, vLine="blue", col="red", xlab="Components") par(mfrow=c(1,1)) # ...................................................... } } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules for determining the number of components to retain. \emph{Psychological Bulletin, 99}, 432-442. } \seealso{ \code{\link{principalComponents}}, \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/structureSimObjectMethods.Rd0000644000176200001440000000706413634756467020313 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/structureSimObjectMethods.r \name{summary.structureSim} \alias{summary.structureSim} \alias{boxplot.structureSim} \alias{is.structureSim} \alias{plot.structureSim} \alias{print.structureSim} \title{Utility Functions for nScree Class Objects} \usage{ \method{summary}{structureSim}(object, index = c(1:15), eigenSelect = NULL, ...) \method{print}{structureSim}(x, index = NULL, ...) \method{boxplot}{structureSim}(x, nFactors = NULL, eigenSelect = NULL, vLine = "green", xlab = "Factors", ylab = "Eigenvalues", main = "Eigen Box Plot", ...) \method{plot}{structureSim}(x, nFactors = NULL, index = NULL, main = "Index Acuracy Plot", ...) is.structureSim(object) } \arguments{ \item{object}{structureSim: an object of the class \code{structureSim}} \item{index}{numeric: vector of the index of the selected indices} \item{eigenSelect}{numeric: vector of the index of the selected eigenvalues} \item{...}{variable: additionnal parameters to give to the \code{boxplot}, \code{plot}, \code{print} and \code{summary functions.}} \item{x}{structureSim: an object of the class \code{structureSim}} \item{nFactors}{numeric: if known, number of factors} \item{vLine}{character: color of the vertical indicator line of the initial number of factors in the eigen boxplot} \item{xlab}{character: x axis label} \item{ylab}{character: y axis label} \item{main}{character: main title} } \value{ Generic functions for the \code{structureSim} class: \item{boxplot.structureSim }{ graphic: plots an eigen boxplot } \item{is.structureSim}{ logical: is the object of the class \code{structureSim}? } \item{plot.structureSim }{ graphic: plots an index acuracy plot} \item{print.structureSim }{ numeric: data.frame of statistics about the number of components/factors to retain according to different indices following a \code{structureSim} simulation} \item{summary.structureSim }{ list: two data.frame, the first with the details of the simulated eigenvalues, the second with the details of the simulated indices} } \description{ Utility functions for \code{structureSim} class objects. Note that with the \code{plot.structureSim} a dotted black vertical line shows the median number of factors retained by all the different indices. } \examples{ \dontrun{ ## INITIALISATION library(xtable) library(nFactors) nFactors <- 3 unique <- 0.2 loadings <- 0.5 nsubjects <- 180 repsim <- 10 var <- 36 pmjc <- 12 reppar <- 10 zwick <- generateStructure(var=var, mjc=nFactors, pmjc=pmjc, loadings=loadings, unique=unique) ## SIMULATIONS mzwick <- structureSim(fload=as.matrix(zwick), reppar=reppar, repsim=repsim, details=TRUE, N=nsubjects, quantile=0.5) ## TEST OF structureSim METHODS is(mzwick) summary(mzwick, index=1:5, eigenSelect=1:10, digits=3) print(mzwick, index=1:10) plot(x=mzwick, index=c(1:10), cex.axis=0.7, col="red") boxplot(x=mzwick, nFactors=3, vLine="blue", col="red") } } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. } \seealso{ \code{\link{nFactors-package}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/makeCor.Rd0000644000176200001440000000300013620574733014457 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/makeCor.r \name{makeCor} \alias{makeCor} \title{Create a Full Correlation/Covariance Matrix from a Matrix With Lower Part Filled and Upper Part With Zeros} \usage{ makeCor(x) } \arguments{ \item{x}{numeric: matrix} } \value{ numeric: full correlation matrix } \description{ This function creates a full correlation/covariance matrix from a matrix with lower part filled and upper part with zeros. } \examples{ ## ................................................ ## LOWER CORRELATION MATRIX WITH ZEROS ON UPPER PART ## From Gorsuch (table 1.3.1) gorsuch <- c( 1,0,0,0,0,0,0,0,0,0, .6283, 1,0,0,0,0,0,0,0,0, .5631, .7353, 1,0,0,0,0,0,0,0, .8689, .7055, .8444, 1,0,0,0,0,0,0, .9030, .8626, .6890, .8874, 1,0,0,0,0,0, .6908, .9028, .9155, .8841, .8816, 1,0,0,0,0, .8633, .7495, .7378, .9164, .9109, .8572, 1,0,0,0, .7694, .7902, .7872, .8857, .8835, .8884, .7872, 1,0,0, .8945, .7929, .7656, .9494, .9546, .8942, .9434, .9000, 1,0, .5615, .6850, .8153, .7004, .6583, .7720, .6201, .6141, .6378, 1) ## UPPER CORRELATION MATRIX FILLED WITH UPPER CORRELATION MATRIX gorsuch <- makeCor(gorsuch) gorsuch } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/moreStats.Rd0000644000176200001440000000253213620574733015070 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/moreStats.r \name{moreStats} \alias{moreStats} \title{Statistical Summary of a Data Frame} \usage{ moreStats(x, quantile = 0.95, show = FALSE) } \arguments{ \item{x}{numeric: matrix or \code{data.frame}} \item{quantile}{numeric: quantile of the distribution} \item{show}{logical: if \code{TRUE} prints the quantile choosen} } \value{ numeric: \code{data.frame} of statistics: mean, median, quantile, standard deviation, minimum and maximum } \description{ This function produces another summary of a \code{data.frame}. This function was proposed in order to apply some functions globally on a \code{data.frame}: \code{quantile}, \code{median}, \code{min} and \code{max}. The usual \emph{R} version cannot do so. } \examples{ ## ................................................ ## GENERATION OF A MATRIX OF 100 OBSERVATIONS AND 10 VARIABLES x <- matrix(rnorm(1000),ncol=10) ## STATISTICS res <- moreStats(x, quantile=0.05, show=TRUE) res } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/nFactorsObjectMethods.Rd0000644000176200001440000000451513636745424017350 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nFactorsObjectMethods.r \name{is.nFactors} \alias{is.nFactors} \alias{print.nFactors} \alias{summary.nFactors} \title{Utility Functions for nFactors Class Objects} \usage{ is.nFactors(x) \method{print}{nFactors}(x, ...) \method{summary}{nFactors}(object, ...) } \arguments{ \item{x}{nFactors: an object of the class nFactors} \item{...}{variable: additionnal parameters to give to the \code{print} function with \code{print.nFactors} or to the \code{summary} function with \code{summary.nFactors}} \item{object}{nFactors: an object of the class nFactors} } \value{ Generic functions for the nFactors class: \item{is.nFactors}{ logical: is the object of the class nFactors? } \item{print.nFactors }{ numeric: vector of the number of components/factors to retain: same as the \code{nFactors} vector from the \code{nFactors} object} \item{summary.nFactors }{ data.frame: details of the results from a nFactors object: same as the \code{details} data.frame from the \code{nFactors} object, but with easier control of the number of decimals with the \code{digits} parameter} } \description{ Utility functions for \code{nFactors} class objects. } \examples{ ## SIMPLE EXAMPLE data(dFactors) eig <- dFactors$Raiche$eigenvalues N <- dFactors$Raiche$nsubjects res <- nBartlett(eig,N); res; is.nFactors(res); summary(res, digits=2) res <- nBentler(eig,N); res; is.nFactors(res); summary(res, digits=2) res <- nCng(eig); res; is.nFactors(res); summary(res, digits=2) res <- nMreg(eig); res; is.nFactors(res); summary(res, digits=2) res <- nSeScree(eig); res; is.nFactors(res); summary(res, digits=2) ## SIMILAR RESULTS, BUT NOT A nFactors OBJECT res <- nScree(eig); res; is.nFactors(res); summary(res, digits=2) } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. } \seealso{ \code{\link{nBentler}}, \code{\link{nBartlett}}, \code{\link{nCng}}, \code{\link{nMreg}}, \code{\link{nSeScree}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/nScreeObjectMethods.Rd0000644000176200001440000000601513634755254017005 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nScreeObjectMethods.r \name{summary.nScree} \alias{summary.nScree} \alias{print.nScree} \alias{plot.nScree} \alias{is.nScree} \title{Utility Functions for nScree Class Objects} \usage{ \method{summary}{nScree}(object, ...) \method{print}{nScree}(x, ...) \method{plot}{nScree}(x, ...) is.nScree(object) } \arguments{ \item{object}{nScree: an object of the class \code{nScree}} \item{...}{variable: additionnal parameters to give to the \code{print} function with \code{print.nScree}, the \code{plotnScree} with \code{plot.nScree} or to the \code{summary} function with \code{summary.nScree}} \item{x}{Results of a previous \code{nScree} analysis} } \value{ Generic functions for the nScree class: \item{is.nScree}{ logical: is the object of the class \code{nScree}? } \item{plot.nScree }{ graphic: plots a figure according to the \code{plotnScree} function} \item{print.nScree }{ numeric: vector of the number of components/factors to retain: same as the \code{Components} vector from the \code{nScree} object} \item{summary.nScree }{ data.frame: details of the results from a nScree analysis: same as the \code{Analysis} data.frame from the \code{nScree} object, but with easier control of the number of decimals with the \code{digits} parameter} } \description{ Utility functions for \code{nScree} class objects. Some of these functions are already implemented in the \code{nFactors} package, but are easier to use with generic functions like these. } \examples{ ## INITIALISATION data(dFactors) # Load the nFactors dataset attach(dFactors) vect <- Raiche # Use the example from Raiche eigenvalues <- vect$eigenvalues # Extract the observed eigenvalues nsubjects <- vect$nsubjects # Extract the number of subjects variables <- length(eigenvalues) # Compute the number of variables rep <- 100 # Number of replications for the parallel analysis cent <- 0.95 # Centile value of the parallel analysis ## PARALLEL ANALYSIS (qevpea for the centile criterion, mevpea for the mean criterion) aparallel <- parallel(var = variables, subject = nsubjects, rep = rep, cent = cent )$eigen$qevpea # The 95 centile ## NOMBER OF FACTORS RETAINED ACCORDING TO DIFFERENT RULES results <- nScree(x=eigenvalues, aparallel=aparallel) is.nScree(results) results summary(results) ## PLOT ACCORDING TO THE nScree CLASS plot(results) } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/rRecovery.Rd0000644000176200001440000000600213620574734015064 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/rRecovery.r \name{rRecovery} \alias{rRecovery} \title{Test of Recovery of a Correlation or a Covariance matrix from a Factor Analysis Solution} \usage{ rRecovery(R, loadings, diagCommunalities = FALSE) } \arguments{ \item{R}{numeric: initial correlation or covariance matrix} \item{loadings}{numeric: loadings from a factor analysis solution} \item{diagCommunalities}{logical: if \code{TRUE}, the correlation between the initial solution and the estimated one will use a correlation of one in the diagonal. If \code{FALSE} (default) the diagonal is not used in the computation of this correlation.} } \value{ \item{R}{ numeric: initial correlation or covariance matrix } \item{recoveredR}{ numeric: recovered estimated correlation or covariance matrix } \item{difference}{ numeric: difference between initial and recovered estimated correlation or covariance matrix} \item{cor}{ numeric: Pearson correlation between initial and recovered estimated correlation or covariance matrix. Computations depend on the logical value of the \code{communalities} argument. } } \description{ The \code{rRecovery} function returns a verification of the quality of the recovery of the initial correlation or covariance matrix by the factor solution. } \examples{ # ....................................................... # Example from Kim and Mueller (1978, p. 10) # Population: upper diagonal # Simulated sample: lower diagnonal R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, .5600, 1.000, .4749, .2196, .1912, .2979, .4800, .4200, 1.000, .2079, .2010, .2445, .2240, .1960, .1680, 1.000, .4334, .3197, .1920, .1680, .1440, .4200, 1.000, .4207, .1600, .1400, .1200, .3500, .3000, 1.000), nrow=6, byrow=TRUE) # Replace upper diagonal with lower diagonal RU <- diagReplace(R, upper=TRUE) nFactors <- 2 loadings <- principalAxis(RU, nFactors=nFactors, communalities="component")$loadings rComponent <- rRecovery(RU,loadings, diagCommunalities=FALSE)$cor loadings <- principalAxis(RU, nFactors=nFactors, communalities="maxr")$loadings rMaxr <- rRecovery(RU,loadings, diagCommunalities=FALSE)$cor loadings <- principalAxis(RU, nFactors=nFactors, communalities="multiple")$loadings rMultiple <- rRecovery(RU,loadings, diagCommunalities=FALSE)$cor round(c(rComponent = rComponent, rmaxr = rMaxr, rMultiple = rMultiple), 3) # ....................................................... } \seealso{ \code{\link{componentAxis}}, \code{\link{iterativePrincipalAxis}}, \code{\link{principalAxis}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{utilities} nFactors/man/nFactors.Rd0000644000176200001440000000135413635511660014663 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nFactors.R \docType{package} \name{nFactors} \alias{nFactors} \alias{nFactors-package} \title{nFactors: Number of factor or components to retain in a factor analysis} \description{ A package for determining the number of factor or components to retain in a factor analysis. The methods are all based on eigenvalues. } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } nFactors/man/corFA.Rd0000644000176200001440000000303013610160240014052 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/corFA.r \name{corFA} \alias{corFA} \title{Insert Communalities in the Diagonal of a Correlation or a Covariance Matrix} \usage{ corFA(R, method = "ginv") } \arguments{ \item{R}{An integer matrix or a data.frame of correlations} \item{method}{A character vector: inversion method} } \value{ A correlation matrix with coerced variables with communalities in the diagonal. } \description{ This function inserts communalities in the diagonal of a correlation/covariance matrix. } \examples{ ## LOWER CORRELATION MATRIX WITH ZEROS ON UPPER PART ## From Gorsuch (table 1.3.1) gorsuch <- c( 1,0,0,0,0,0,0,0,0,0, .6283, 1,0,0,0,0,0,0,0,0, .5631, .7353, 1,0,0,0,0,0,0,0, .8689, .7055, .8444, 1,0,0,0,0,0,0, .9030, .8626, .6890, .8874, 1,0,0,0,0,0, .6908, .9028, .9155, .8841, .8816, 1,0,0,0,0, .8633, .7495, .7378, .9164, .9109, .8572, 1,0,0,0, .7694, .7902, .7872, .8857, .8835, .8884, .7872, 1,0,0, .8945, .7929, .7656, .9494, .9546, .8942, .9434, .9000, 1,0, .5615, .6850, .8153, .7004, .6583, .7720, .6201, .6141, .6378, 1) ## UPPER CORRELATION MATRIX FILLED WITH UPPER CORRELATION MATRIX gorsuch <- makeCor(gorsuch) ## REPLACE DIAGONAL WITH COMMUNALITIES gorsuchCfa <- corFA(gorsuch) gorsuchCfa } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} } \author{ Gilles Raiche, Universite du Quebec a Montreal (\email{raiche.gilles@uqam.ca}) } \keyword{manip} nFactors/man/figures/0000755000176200001440000000000013636677340014267 5ustar liggesusersnFactors/man/figures/essai.png.png0000644000176200001440000000347513617350207016662 0ustar liggesusersPNG  IHDRs0"E$iTXtXML:com.adobe.xmp ,sRGB, pHYs&?9IDATx{UU4JZڨ1X6"{X%dQP 23i$Gj=YdLYsctgpp^{[H$D"H$D"H$Ey]ukoʎ<.a39Bl:{ܴ ?%}/8k*5\X"m2GE*QɏKxxp<+KX'Nok8s1 !X"G`l3 KOkb 7$<s_Dj_K"wHB{ȞM#J[цs53qqRavAəccs'm[Fnu)v7)ȗs g;t[4\Z* =x&6F9'J7T;s=5X <˨6\9&rNZOV&%x-p_΍-9Ό*Mv!ݙɺ>;ZYf}y5{ZܳjF>7~fn.D@3tH7m}i6Ƹ3E>FfX1&2~6-יssSwo0@076T(fCw,V{Ã%3s儵 Qd˔X>9,1@x<`6Ze9'ΟM-їA/TL{Q7{zQX2F-Jvܐ\hsp.Pa4R*/慨\qv5FobDj)QsPDUMG8tDc_g +=)D"H$D"H$Db(/Xt"$1IENDB`nFactors/man/studySim.Rd0000644000176200001440000000713213620574734014732 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/studySim.r \name{studySim} \alias{studySim} \title{Simulation Study from Given Factor Structure Matrices and Conditions} \usage{ studySim(var, nFactors, pmjc, loadings, unique, N, repsim, reppar, stats = 1, quantile = 0.5, model = "components", r2limen = 0.75, all = FALSE, dir = NA, trace = TRUE) } \arguments{ \item{var}{numeric: vector of the number of variables} \item{nFactors}{numeric: vector of the number of components/factors} \item{pmjc}{numeric: vector of the number of major loadings on each component/factor} \item{loadings}{numeric: vector of the major loadings on each component/factor} \item{unique}{numeric: vector of the unique loadings on each component/factor} \item{N}{numeric: vector of the number of subjects/observations} \item{repsim}{numeric: number of replications of the matrix correlation simulation} \item{reppar}{numeric: number of replications for the parallel and permutation analysis} \item{stats}{numeric: vector of the statistics to return: mean(1), median(2), sd(3), quantile(4), min(5), max(6)} \item{quantile}{numeric: quantile for the parallel and permutation analysis} \item{model}{character: \code{"components"} or \code{"factors"}} \item{r2limen}{numeric: R2 limen value for the R2 Nelson index} \item{all}{logical: if \code{TRUE} computes the Bentler and Yuan index (very long computing time to consider)} \item{dir}{character: directory where to save output. Default to NA} \item{trace}{logical: if \code{TRUE} outputs details of the status of the simulations} } \value{ \item{values}{ Returns selected statistics about the number of components/factors to retain: mean, median, quantile, standard deviation, minimum and maximum.} } \description{ The \code{structureSim} function returns statistical results from simulations from predefined congeneric factor structures. The main ideas come from the methodology applied by Zwick and Velicer (1986). } \examples{ \dontrun{ # .................................................................... # Example inspired from Zwick and Velicer (1986) # Very long computimg time # ................................................................... # 1. Initialisation # reppar <- 30 # repsim <- 5 # quantile <- 0.50 # 2. Simulations # X <- studySim(var=36,nFactors=3, pmjc=c(6,12), loadings=c(0.5,0.8), # unique=c(0,0.2), quantile=quantile, # N=c(72,180), repsim=repsim, reppar=reppar, # stats=c(1:6)) # 3. Results (first 10 results) # print(X[1:10,1:14],2) # names(X) # 4. Study of the error done in the determination of the number # of components/factors. A positive value is associated to over # determination. # results <- X[X$stats=="mean",] # residuals <- results[,c(11:25)] - X$nfactors # BY <- c("nsubjects","var","loadings") # round(aggregate(residuals, by=results[BY], mean),0) } } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules for determining the number of components to retain. \emph{Psychological Bulletin, 99}, 432-442. } \seealso{ \code{\link{generateStructure}}, \code{\link{structureSim}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/plotParallel.Rd0000644000176200001440000000420713635511660015537 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/plotParallel.R \name{plotParallel} \alias{plotParallel} \title{Plot a Parallel Analysis Class Object} \usage{ plotParallel(parallel, eig = NA, x = eig, model = "components", legend = TRUE, ylab = "Eigenvalues", xlab = "Components", main = "Parallel Analysis", ...) } \arguments{ \item{parallel}{numeric: vector of the results of a previous parallel analysis} \item{eig}{depreciated parameter: eigenvalues to analyse (not used if x is used, recommended)} \item{x}{numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data} \item{model}{character: \code{"components"} or \code{"factors"}} \item{legend}{logical: indicator of the presence or not of a legend} \item{ylab}{character: label of the y axis} \item{xlab}{character: label of the x axis} \item{main}{character: title of the plot} \item{...}{variable: additionnal parameters to give to the \code{cor} or \code{cov} functions} } \value{ Nothing returned. } \description{ Plot a scree plot adding information about a parallel analysis. } \details{ If \code{eig} is \code{FALSE} the plot shows only the parallel analysis without eigenvalues. } \examples{ ## SIMPLE EXAMPLE OF A PARALLEL ANALYSIS ## OF A CORRELATION MATRIX WITH ITS PLOT data(dFactors) eig <- dFactors$Raiche$eigenvalues subject <- dFactors$Raiche$nsubjects var <- length(eig) rep <- 100 cent <- 0.95 results <- parallel(subject,var,rep,cent) results ## PARALLEL ANALYSIS SCREE PLOT plotParallel(results, x=eig) plotParallel(results) } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{parallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{Graphics} nFactors/man/nSeScree.Rd0000644000176200001440000000731513620574733014622 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nSeScree.r \name{nSeScree} \alias{nSeScree} \title{Standard Error Scree and Coefficient of Determination Procedures to Determine the Number of Components/Factors} \usage{ nSeScree(x, cor = TRUE, model = "components", details = TRUE, r2limen = 0.75, ...) } \arguments{ \item{x}{numeric: eigenvalues.} \item{cor}{logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix} \item{model}{character: \code{"components"} or \code{"factors"}} \item{details}{logical: if \code{TRUE} also returns details about the computation for each eigenvalue.} \item{r2limen}{numeric: criterion value retained for the coefficient of determination indices.} \item{...}{variable: additionnal parameters to give to the \code{eigenComputes} and \code{cor} or \code{cov} functions} } \value{ \item{nFactors}{ numeric: number of components/factors retained by the seScree procedure. } \item{details}{ numeric: matrix of the details for each index.} } \description{ This function computes the \emph{seScree} (\eqn{S_{Y \bullet X}}) indices (Zoski and Jurs, 1996) and the coefficient of determination indices of Nelson (2005) \eqn{R^2} for determining the number of components/factors to retain. } \details{ The Zoski and Jurs \eqn{S_{Y \bullet X}} index is the standard error of the estimate (predicted) eigenvalues by the regression from the \eqn{(k+1, \ldots, p)} subsequent ranks of the eigenvalues. The standard error is computed as: (1) \eqn{\qquad \qquad S_{Y \bullet X} = \sqrt{ \frac{(\lambda_k - \hat{\lambda}_k)^2} {p-2} } } \cr A value of \eqn{1/p} is choosen as the criteria to determine the number of components or factors to retain, \emph{p} corresponding to the number of variables. The Nelson \eqn{R^2} index is simply the multiple regresion coefficient of determination for the \eqn{k+1, \ldots, p} eigenvalues. Note that Nelson didn't give formal prescriptions for the criteria for this index. He only suggested that a value of 0.75 or more must be considered. More is to be done to explore adequate values. } \examples{ ## SIMPLE EXAMPLE OF SESCREE AND R2 ANALYSIS data(dFactors) eig <- dFactors$Raiche$eigenvalues results <- nSeScree(eig) results plotuScree(eig, main=paste(results$nFactors[1], " or ", results$nFactors[2], " factors retained by the sescree and R2 procedures", sep="")) } \references{ Nasser, F. (2002). The performance of regression-based variations of the visual scree for determining the number of common factors. \emph{Educational and Psychological Measurement, 62(3)}, 397-419. Nelson, L. R. (2005). Some observations on the scree test, and on coefficient alpha. \emph{Thai Journal of Educational Research and Measurement, 3(1)}, 1-17. Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. Zoski, K. and Jurs, S. (1993). Using multiple regression to determine the number of factors to retain in factor analysis. \emph{Multiple Linear Regression Viewpoints, 20}(1), 5-9. Zoski, K. and Jurs, S. (1996). An objective counterpart to the visuel scree test for factor analysis: the standard error scree. \emph{Educational and Psychological Measurement, 56}(3), 443-451. } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/parallel.Rd0000644000176200001440000000727513620574733014714 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/parallel.R \name{parallel} \alias{parallel} \title{Parallel Analysis of a Correlation or Covariance Matrix} \usage{ parallel(subject = 100, var = 10, rep = 100, cent = 0.05, quantile = cent, model = "components", sd = diag(1, var), ...) } \arguments{ \item{subject}{numeric: nmber of subjects (default is 100)} \item{var}{numeric: number of variables (default is 10)} \item{rep}{numeric: number of replications of the correlation matrix (default is 100)} \item{cent}{depreciated numeric (use quantile instead): quantile of the distribution on which the decision is made (default is 0.05)} \item{quantile}{numeric: quantile of the distribution on which the decision is made (default is 0.05)} \item{model}{character: \code{"components"} or \code{"factors"}} \item{sd}{numeric: vector of standard deviations of the simulated variables (for a parallel analysis on a covariance matrix)} \item{...}{variable: other parameters for the \code{"mvrnorm"}, \code{corr} or \code{cov} functions} } \value{ \item{eigen}{ Data frame consisting of the mean and the quantile of the eigenvalues distribution } \item{eigen$mevpea}{ Mean of the eigenvalues distribution} \item{eigen$sevpea}{ Standard deviation of the eigenvalues distribution} \item{eigen$qevpea}{ quantile of the eigenvalues distribution} \item{eigen$sqevpea}{ Standard error of the quantile of the eigenvalues distribution} \item{subject}{ Number of subjects} \item{variables}{ Number of variables} \item{centile}{ Selected quantile} Otherwise, returns a summary of the parallel analysis. } \description{ This function gives the distribution of the eigenvalues of correlation or a covariance matrices of random uncorrelated standardized normal variables. The mean and a selected quantile of this distribution are returned. } \details{ Note that if the decision is based on a quantile value rather than on the mean, care must be taken with the number of replications (\code{rep}). In fact, the smaller the quantile (\code{cent}), the bigger the number of necessary replications. } \examples{ ## SIMPLE EXAMPLE OF A PARALLEL ANALYSIS ## OF A CORRELATION MATRIX WITH ITS PLOT data(dFactors) eig <- dFactors$Raiche$eigenvalues subject <- dFactors$Raiche$nsubjects var <- length(eig) rep <- 100 quantile <- 0.95 results <- parallel(subject, var, rep, quantile) results ## IF THE DECISION IS BASED ON THE CENTILE USE qevpea INSTEAD ## OF mevpea ON THE FIRST LINE OF THE FOLLOWING CALL plotuScree(x = eig, main = "Parallel Analysis" ) lines(1:var, results$eigen$qevpea, type="b", col="green" ) ## ANOTHER SOLUTION IS SIMPLY TO plotParallel(results) } \references{ Drasgow, F. and Lissak, R. (1983) Modified parallel analysis: a procedure for examining the latent dimensionality of dichotomously scored item responses. \emph{Journal of Applied Psychology, 68}(3), 363-373. Hoyle, R. H. and Duvall, J. L. (2004). Determining the number of factors in exploratory and confirmatory factor analysis. In D. Kaplan (Ed.): \emph{The Sage handbook of quantitative methodology for the social sciences}. Thousand Oaks, CA: Sage. Horn, J. L. (1965). A rationale and test of the number of factors in factor analysis. \emph{Psychometrika, 30}, 179-185. } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/componentAxis.Rd0000644000176200001440000000424113620574733015735 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/componentAxis.r \name{componentAxis} \alias{componentAxis} \title{Principal Component Analysis With Only n First Components Retained} \usage{ componentAxis(R, nFactors = 2) } \arguments{ \item{R}{numeric: correlation or covariance matrix} \item{nFactors}{numeric: number of components/factors to retain} } \value{ \item{values}{ numeric: variance of each component/factor retained } \item{varExplained}{ numeric: variance explained by each component/factor retained } \item{varExplained}{ numeric: cumulative variance explained by each component/factor retained } \item{loadings}{ numeric: loadings of each variable on each component/factor retained } } \description{ The \code{componentAxis} function returns a principal component analysis with the first \emph{n} components retained. } \examples{ # ....................................................... # Example from Kim and Mueller (1978, p. 10) # Simulated sample: lower diagnonal R <- matrix(c( 1.000, 0.560, 0.480, 0.224, 0.192, 0.16, 0.560, 1.000, 0.420, 0.196, 0.168, 0.14, 0.480, 0.420, 1.000, 0.168, 0.144, 0.12, 0.224, 0.196, 0.168, 1.000, 0.420, 0.35, 0.192, 0.168, 0.144, 0.420, 1.000, 0.30, 0.160, 0.140, 0.120, 0.350, 0.300, 1.00), nrow=6, byrow=TRUE) # Factor analysis: Selected principal components - Kim and Mueller # (1978, p. 20) componentAxis(R, nFactors=2) # ....................................................... } \references{ Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to factor analysis. What it is and how to do it}. Beverly Hills, CA: Sage. Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical methods and practical issues}. Beverly Hills, CA: Sage. } \seealso{ \code{\link{principalComponents}}, \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/generateStructure.Rd0000644000176200001440000001121713621045621016610 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/generateStructure.r \name{generateStructure} \alias{generateStructure} \title{Generate a Factor Structure Matrix} \usage{ generateStructure(var, mjc, pmjc, loadings, unique) } \arguments{ \item{var}{numeric: number of variables} \item{mjc}{numeric: number of major factors (factors with practical significance)} \item{pmjc}{numeric: number of variables that load significantly on each major factor} \item{loadings}{numeric: loadings on the significant variables on each major factor} \item{unique}{numeric: loadings on the non significant variables on each major factor} } \value{ values numeric matrix: factor structure } \description{ The \code{generateStructure} function returns a \emph{mjc} factor structure matrix. The number of variables per major factor \emph{pmjc} is equal for each factor. The argument \emph{pmjc} must be divisible by \emph{nVar}. The arguments are strongly inspired from Zick and Velicer (1986, p. 435-436) methodology. } \examples{ # ....................................................... # Example inspired from Zwick and Velicer (1986, table 2, p. 437) ## ................................................................... unique=0.2; loadings=0.5 zwick1 <- generateStructure(var=36, mjc=6, pmjc= 6, loadings=loadings, unique=unique) zwick2 <- generateStructure(var=36, mjc=3, pmjc=12, loadings=loadings, unique=unique) zwick3 <- generateStructure(var=72, mjc=9, pmjc= 8, loadings=loadings, unique=unique) zwick4 <- generateStructure(var=72, mjc=6, pmjc=12, loadings=loadings, unique=unique) sat=0.8 ## ................................................................... zwick5 <- generateStructure(var=36, mjc=6, pmjc= 6, loadings=loadings, unique=unique) zwick6 <- generateStructure(var=36, mjc=3, pmjc=12, loadings=loadings, unique=unique) zwick7 <- generateStructure(var=72, mjc=9, pmjc= 8, loadings=loadings, unique=unique) zwick8 <- generateStructure(var=72, mjc=6, pmjc=12, loadings=loadings, unique=unique) ## ................................................................... # nsubjects <- c(72, 144, 180, 360) # require(psych) # Produce an usual correlation matrix from a congeneric model nsubjects <- 72 mzwick5 <- psych::sim.structure(fx=as.matrix(zwick5), n=nsubjects) mzwick5$r # Factor analysis: recovery of the factor structure iterativePrincipalAxis(mzwick5$model, nFactors=6, communalities="ginv")$loadings iterativePrincipalAxis(mzwick5$r , nFactors=6, communalities="ginv")$loadings factanal(covmat=mzwick5$model, factors=6) factanal(covmat=mzwick5$r , factors=6) # Number of components to retain eigenvalues <- eigen(mzwick5$r)$values aparallel <- parallel(var = length(eigenvalues), subject = nsubjects, rep = 30, quantile = 0.95, model="components")$eigen$qevpea results <- nScree(x = eigenvalues, aparallel = aparallel) results$Components plotnScree(results) # Number of factors to retain eigenvalues.fa <- eigen(corFA(mzwick5$r))$values aparallel.fa <- parallel(var = length(eigenvalues.fa), subject = nsubjects, rep = 30, quantile = 0.95, model="factors")$eigen$qevpea results.fa <- nScree(x = eigenvalues.fa, aparallel = aparallel.fa, model ="factors") results.fa$Components plotnScree(results.fa) # ...................................................... } \references{ Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules for determining the number of components to retain. \emph{Psychological Bulletin, 99}, 432-442. } \seealso{ \code{\link{principalComponents}}, \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege \cr \email{David.Magis@ulg.ac.be} } \keyword{multivariate} nFactors/man/principalAxis.Rd0000644000176200001440000000646313620574734015725 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/principalAxis.r \name{principalAxis} \alias{principalAxis} \title{Principal Axis Analysis} \usage{ principalAxis(R, nFactors = 2, communalities = "component") } \arguments{ \item{R}{numeric: correlation or covariance matrix} \item{nFactors}{numeric: number of factors to retain} \item{communalities}{character: initial values for communalities (\code{"component", "maxr", "ginv" or "multiple"})} } \value{ \item{values}{ numeric: variance of each component/factor } \item{varExplained}{ numeric: variance explained by each component/factor } \item{varExplained}{ numeric: cumulative variance explained by each component/factor } \item{loadings}{ numeric: loadings of each variable on each component/factor } } \description{ The \code{PrincipalAxis} function returns a principal axis analysis without iterated communalities estimates. Three different choices of communalities estimates are given: maximum corelation, multiple correlation or estimates based on the sum of the squared principal component analysis loadings. Generally statistical packages initialize the the communalities at the multiple correlation value (usual inverse or generalized inverse). Unfortunately, this strategy cannot deal with singular correlation or covariance matrices. If a generalized inverse, the maximum correlation or the estimated communalities based on the sum of loading are used instead, then a solution can be computed. } \examples{ # ....................................................... # Example from Kim and Mueller (1978, p. 10) # Population: upper diagonal # Simulated sample: lower diagnonal R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, .5600, 1.000, .4749, .2196, .1912, .2979, .4800, .4200, 1.000, .2079, .2010, .2445, .2240, .1960, .1680, 1.000, .4334, .3197, .1920, .1680, .1440, .4200, 1.000, .4207, .1600, .1400, .1200, .3500, .3000, 1.000), nrow=6, byrow=TRUE) # Factor analysis: Principal axis factoring # without iterated communalities - # Kim and Mueller (1978, p. 21) # Replace upper diagonal with lower diagonal RU <- diagReplace(R, upper=TRUE) principalAxis(RU, nFactors=2, communalities="component") principalAxis(RU, nFactors=2, communalities="maxr") principalAxis(RU, nFactors=2, communalities="multiple") # Replace lower diagonal with upper diagonal RL <- diagReplace(R, upper=FALSE) principalAxis(RL, nFactors=2, communalities="component") principalAxis(RL, nFactors=2, communalities="maxr") principalAxis(RL, nFactors=2, communalities="multiple") # ....................................................... } \references{ Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to factor analysis. What it is and how to do it}. Beverly Hills, CA: Sage. Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical methods and practical issues}. Beverly Hills, CA: Sage. } \seealso{ \code{\link{componentAxis}}, \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/eigenComputes.Rd0000644000176200001440000000403613620574733015717 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/eigenComputes.r \name{eigenComputes} \alias{eigenComputes} \title{Computes Eigenvalues According to the Data Type} \usage{ eigenComputes(x, cor = TRUE, model = "components", ...) } \arguments{ \item{x}{numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data} \item{cor}{logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix} \item{model}{character: \code{"components"} or \code{"factors"}} \item{...}{variable: additionnal parameters to give to the \code{cor} or \code{cov} functions} } \value{ numeric: return a vector of eigenvalues } \description{ The \code{eigenComputes} function computes eigenvalues from the identified data type. It is used internally in many fonctions of the \pkg{nFactors} package in order to apply these to a vector of eigenvalues, a matrix of correlations or covariance or a data frame. } \examples{ # ....................................................... # Different data types # Vector of eigenvalues data(dFactors) x1 <- dFactors$Cliff1$eigenvalues eigenComputes(x1) # Data from a data.frame x2 <- data.frame(matrix(20*rnorm(100), ncol=5)) eigenComputes(x2, cor=TRUE, use="everything") eigenComputes(x2, cor=FALSE, use="everything") eigenComputes(x2, cor=TRUE, use="everything", method="spearman") eigenComputes(x2, cor=TRUE, use="everything", method="kendall") x3 <- cov(x2) eigenComputes(x3, cor=TRUE, use="everything") eigenComputes(x3, cor=FALSE, use="everything") x4 <- cor(x2) eigenComputes(x4, use="everything") # ....................................................... } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege \cr \email{David.Magis@ulg.ac.be} } \keyword{multivariate} nFactors/man/nBartlett.Rd0000644000176200001440000001262113620574733015046 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nBartlett.r \name{nBartlett} \alias{nBartlett} \title{Bartlett, Anderson and Lawley Procedures to Determine the Number of Components/Factors} \usage{ nBartlett(x, N, alpha = 0.05, cor = TRUE, details = TRUE, correction = TRUE, ...) } \arguments{ \item{x}{numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data (eigenFrom)} \item{N}{numeric: number of subjects} \item{alpha}{numeric: statistical significance level} \item{cor}{logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix} \item{details}{logical: if \code{TRUE} also returns detains about the computation for each eigenvalue} \item{correction}{logical: if \code{TRUE} uses a correction for the degree of freedom after the first eigenvalue} \item{...}{variable: additionnal parameters to give to the \code{cor} or \code{cov} functions} } \value{ \item{nFactors}{numeric: vector of the number of factors retained by the Bartlett, Anderson and Lawley procedures.} \item{details}{numeric: matrix of the details for each index.} } \description{ This function computes the Bartlett, Anderson and Lawley indices for determining the number of components/factors to retain. } \details{ Note: the latex formulas are available only in the pdf version of this help file. The hypothesis tested is: \cr (1) \eqn{\qquad \qquad H_k: \lambda_{k+1} = \ldots = \lambda_p} \cr This hypothesis is verified by the application of different version of a \eqn{\chi^2} test with different values for the degrees of freedom. Each of these tests shares the compution of a \eqn{V_k} value: \cr (2) \eqn{\qquad \qquad V_k = \prod\limits_{i = k + 1}^p {\left\{ {{{\lambda _i } \over {{\raise0.7ex\hbox{$1$} \!\mathord{\left/ {\vphantom {1 q}}\right.\kern-\nulldelimiterspace} \!\lower0.7ex\hbox{$q$}}\sum\limits_{i = k + 1}^p {\lambda _i } }}} \right\}} } \eqn{p} is the number of eigenvalues, \eqn{k} the number of eigenvalues to test, and \eqn{q} the \eqn{p-k} remaining eigenvalues. \eqn{n} is equal to the sample size minus 1 (\eqn{n = N-1}). \cr The Anderson statistic is distributed as a \eqn{\chi^2} with \eqn{(q + 2)(q - 1)/2} degrees of freedom and is equal to: \cr (3) \eqn{\qquad \qquad - n\log (V_k ) \sim \chi _{(q + 2)(q - 1)/2}^2 } \cr An improvement of this statistic from Bartlett (Bentler, and Yuan, 1996, p. 300; Horn and Engstrom, 1979, equation 8) is distributed as a \eqn{\chi^2} with \eqn{(q)(q - 1)/2} degrees of freedom and is equal to: \cr (4) \eqn{\qquad \qquad - \left[ {n - k - {{2q^2 q + 2} \over {6q}}} \right]\log (V_k ) \sim \chi _{(q + 2)(q - 1)/2}^2 } \cr Finally, Anderson (1956) and James (1969) proposed another statistic. \cr (5) \eqn{\qquad \qquad - \left[ {n - k - {{2q^2 q + 2} \over {6q}} + \sum\limits_{i = 1}^k {{{\bar \lambda _q^2 } \over {\left( {\lambda _i - \bar \lambda _q } \right)^2 }}} } \right]\log (V_k ) \sim \chi _{(q + 2)(q - 1)/2}^2 } \cr Bartlett (1950, 1951) proposed a correction to the degrees of freedom of these \eqn{\chi^2} after the first significant test: \eqn{(q+2)(q - 1)/2}. \cr } \examples{ ## ................................................ ## SIMPLE EXAMPLE OF A BARTLETT PROCEDURE data(dFactors) eig <- dFactors$Raiche$eigenvalues results <- nBartlett(x=eig, N= 100, alpha=0.05, details=TRUE) results plotuScree(eig, main=paste(results$nFactors[1], ", ", results$nFactors[2], " or ", results$nFactors[3], " factors retained by the LRT procedures", sep="")) } \references{ Anderson, T. W. (1963). Asymptotic theory for principal component analysis. \emph{Annals of Mathematical Statistics, 34}, 122-148. Bartlett, M. S. (1950). Tests of significance in factor analysis. \emph{British Journal of Psychology, 3}, 77-85. Bartlett, M. S. (1951). A further note on tests of significance. \emph{British Journal of Psychology, 4}, 1-2. Bentler, P. M. and Yuan, K.-H. (1996). Test of linear trend in eigenvalues of a covariance matrix with application to data analysis. \emph{British Journal of Mathematical and Statistical Psychology, 49}, 299-312. Bentler, P. M. and Yuan, K.-H. (1998). Test of linear trend in the smallest eigenvalues of the correlation matrix. \emph{Psychometrika, 63}(2), 131-144. Horn, J. L. and Engstrom, R. (1979). Cattell's scree test in relation to Bartlett's chi-square test and other observations on the number of factors problem. \emph{Multivariate Behavioral Reasearch, 14}(3), 283-300. James, A. T. (1969). Test of equality of the latent roots of the covariance matrix. \emph{In} P. K. Krishna (Eds): \emph{Multivariate analysis, volume 2}.New-York, NJ: Academic Press. Lawley, D. N. (1956). Tests of significance for the latent roots of covarianceand correlation matrix. \emph{Biometrika, 43}(1/2), 128-136. } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/nScree.Rd0000644000176200001440000001633713636702030014324 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nScree.R \name{nScree} \alias{nScree} \title{Non Graphical Cattel's Scree Test} \usage{ nScree(eig = NULL, x = eig, aparallel = NULL, cor = TRUE, model = "components", criteria = NULL, ...) } \arguments{ \item{eig}{depreciated parameter (use x instead): eigenvalues to analyse} \item{x}{numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data} \item{aparallel}{numeric: results of a parallel analysis. Defaults eigenvalues fixed at \eqn{\lambda >= \bar{\lambda}} (Kaiser and related rule) or \eqn{\lambda >= 0} (CFA analysis)} \item{cor}{logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix} \item{model}{character: \code{"components"} or \code{"factors"}} \item{criteria}{numeric: by default fixed at \eqn{\bar{\lambda}}. When the \eqn{\lambda}s are computed from a principal component analysis on a correlation matrix, it corresponds to the usual Kaiser \eqn{\lambda >= 1} rule. On a covariance matrix or from a factor analysis, it is simply the mean. To apply \eqn{\lambda >= 0}, sometimes used with factor analysis, fix the criteria to \eqn{0}.} \item{...}{variabe: additionnal parameters to give to the \code{cor} or \code{cov} functions} } \value{ \item{Components }{ Data frame for the number of components/factors according to different rules } \item{Components$noc }{ Number of components/factors to retain according to optimal coordinates \emph{oc}} \item{Components$naf }{ Number of components/factors to retain according to the acceleration factor \emph{af}} \item{Components$npar.analysis }{Number of components/factors to retain according to parallel analysis } \item{Components$nkaiser }{ Number of components/factors to retain according to the Kaiser rule } \item{Analysis }{ Data frame of vectors linked to the different rules } \item{Analysis$Eigenvalues }{ Eigenvalues } \item{Analysis$Prop }{ Proportion of variance accounted by eigenvalues } \item{Analysis$Cumu }{ Cumulative proportion of variance accounted by eigenvalues } \item{Analysis$Par.Analysis }{ Centiles of the random eigenvalues generated by the parallel analysis. } \item{Analysis$Pred.eig }{ Predicted eigenvalues by each optimal coordinate regression line } \item{Analysis$OC}{ Critical optimal coordinates \emph{oc}} \item{Analysis$Acc.factor }{ Acceleration factor \emph{af}} \item{Analysis$AF}{ Critical acceleration factor \emph{af}} Otherwise, returns a summary of the analysis. } \description{ The \code{nScree} function returns an analysis of the number of component or factors to retain in an exploratory principal component or factor analysis. The function also returns information about the number of components/factors to retain with the Kaiser rule and the parallel analysis. } \details{ The \code{nScree} function returns an analysis of the number of components/factors to retain in an exploratory principal component or factor analysis. Different solutions are given. The classical ones are the Kaiser rule, the parallel analysis, and the usual scree test (\code{\link{plotuScree}}). Non graphical solutions to the Cattell subjective scree test are also proposed: an acceleration factor (\emph{af}) and the optimal coordinates index \emph{oc}. The acceleration factor indicates where the elbow of the scree plot appears. It corresponds to the acceleration of the curve, i.e. the second derivative. The optimal coordinates are the extrapolated coordinates of the previous eigenvalue that allow the observed eigenvalue to go beyond this extrapolation. The extrapolation is made by a linear regression using the last eigenvalue coordinates and the \eqn{k+1} eigenvalue coordinates. There are \eqn{k-2} regression lines like this. The Kaiser rule or a parallel analysis criterion (\code{\link{parallel}}) must also be simultaneously satisfied to retain the components/factors, whether for the acceleration factor, or for the optimal coordinates. If \eqn{\lambda_i} is the \eqn{i^{th}} eigenvalue, and \eqn{LS_i} is a location statistics like the mean or a centile (generally the followings: \eqn{1^{st}, \ 5^{th}, \ 95^{th}, \ or \ 99^{th}}). The Kaiser rule is computed as: \deqn{ n_{Kaiser} = \sum_{i} (\lambda_{i} \ge \bar{\lambda}).} Note that \eqn{\bar{\lambda}} is equal to 1 when a correlation matrix is used. The parallel analysis is computed as: \deqn{n_{parallel} = \sum_{i} (\lambda_{i} \ge LS_i).} The acceleration factor (\eqn{AF}) corresponds to a numerical solution to the elbow of the scree plot: \deqn{n_{AF} \equiv \ If \ \left[ (\lambda_{i} \ge LS_i) \ and \ max(AF_i) \right].} The optimal coordinates (\eqn{OC}) corresponds to an extrapolation of the preceeding eigenvalue by a regression line between the eigenvalue coordinates and the last eigenvalue coordinates: \deqn{n_{OC} = \sum_i \left[(\lambda_i \ge LS_i) \cap (\lambda_i \ge (\lambda_{i \ predicted}) \right].} } \examples{ ## INITIALISATION data(dFactors) # Load the nFactors dataset attach(dFactors) vect <- Raiche # Uses the example from Raiche eigenvalues <- vect$eigenvalues # Extracts the observed eigenvalues nsubjects <- vect$nsubjects # Extracts the number of subjects variables <- length(eigenvalues) # Computes the number of variables rep <- 100 # Number of replications for PA analysis cent <- 0.95 # Centile value of PA analysis ## PARALLEL ANALYSIS (qevpea for the centile criterion, mevpea for the ## mean criterion) aparallel <- parallel(var = variables, subject = nsubjects, rep = rep, cent = cent )$eigen$qevpea # The 95 centile ## NUMBER OF FACTORS RETAINED ACCORDING TO DIFFERENT RULES results <- nScree(x=eigenvalues, aparallel=aparallel) results summary(results) ## PLOT ACCORDING TO THE nScree CLASS plotnScree(results) } \references{ Cattell, R. B. (1966). The scree test for the number of factors. \emph{Multivariate Behavioral Research, 1}, 245-276. Dinno, A. (2009). \emph{Gently clarifying the application of Horn's parallel analysis to principal component analysis versus factor analysis}. Portland, Oregon: Portland Sate University. Guttman, L. (1954). Some necessary conditions for common factor analysis. \emph{Psychometrika, 19, 149-162}. Horn, J. L. (1965). A rationale for the number of factors in factor analysis. \emph{Psychometrika, 30}, 179-185. Kaiser, H. F. (1960). The application of electronic computer to factor analysis. \emph{Educational and Psychological Measurement, 20}, 141-151. Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. } \seealso{ \code{\link{plotuScree}}, \code{\link{plotnScree}}, \code{\link{parallel}}, \code{\link{plotParallel}}, } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/nCng.Rd0000644000176200001440000000566513620574733014006 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nCng.r \name{nCng} \alias{nCng} \title{Cattell-Nelson-Gorsuch CNG Indices} \usage{ nCng(x, cor = TRUE, model = "components", details = TRUE, ...) } \arguments{ \item{x}{numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data} \item{cor}{logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix} \item{model}{character: \code{"components"} or \code{"factors"}} \item{details}{logical: if \code{TRUE} also returns detains about the computation for each eigenvalue.} \item{...}{variable: additionnal parameters to give to the \code{eigenComputes} function} } \value{ \item{nFactors}{ numeric: number of factors retained by the CNG procedure. } \item{details}{ numeric: matrix of the details for each index.} } \description{ This function computes the \emph{CNG} indices for the eigenvalues of a correlation/covariance matrix (Gorsuch and Nelson, 1981; Nasser, 2002, p. 400; Zoski and Jurs, 1993, p. 6). } \details{ Note that the \code{nCng} function is only valid when more than six eigenvalues are used and that these are obtained in the context of a principal component analysis. For a factor analysis, some eigenvalues could be negative and the function will stop and give an error message. The slope of all possible sets of three adjacent eigenvalues are compared, so \emph{CNG} indices can be applied only when more than six eigenvalues are used. The eigenvalue at which the greatest difference between two successive slopes occurs is the indicator of the number of components/factors to retain. } \examples{ ## SIMPLE EXAMPLE OF A CNG ANALYSIS data(dFactors) eig <- dFactors$Raiche$eigenvalues results <- nCng(eig, details=TRUE) results plotuScree(eig, main=paste(results$nFactors, " factors retained by the CNG procedure", sep="")) } \references{ Gorsuch, R. L. and Nelson, J. (1981). \emph{CNG scree test: an objective procedure for determining the number of factors}. Presented at the annual meeting of the Society for multivariate experimental psychology. Nasser, F. (2002). The performance of regression-based variations of the visual scree for determining the number of common factors. \emph{Educational and Psychological Measurement, 62(3)}, 397-419. Zoski, K. and Jurs, S. (1993). Using multiple regression to determine the number of factors to retain in factor analysis. \emph{Multiple Linear Regression Viewpoints, 20}(1), 5-9. } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/nMreg.Rd0000644000176200001440000000524213620574733014160 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nMreg.r \name{nMreg} \alias{nMreg} \title{Multiple Regression Procedure to Determine the Number of Components/Factors} \usage{ nMreg(x, cor = TRUE, model = "components", details = TRUE, ...) } \arguments{ \item{x}{numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data (eigenFrom)} \item{cor}{logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix} \item{model}{character: \code{"components"} or \code{"factors"}} \item{details}{logical: if \code{TRUE} also returns details about the computation for each eigenvalue.} \item{...}{variable: additionnal parameters to give to the \code{eigenComputes} and \code{cor} or \code{cov} functions} } \value{ \item{nFactors}{ numeric: number of components/factors retained by the \emph{MREG} procedures. } \item{details}{ numeric: matrix of the details for each indices.} } \description{ This function computes the \eqn{\beta} indices, like their associated Student \emph{t} and probability (Zoski and Jurs, 1993, 1996, p. 445). These three values can be used as three different indices for determining the number of components/factors to retain. } \details{ When the associated Student \emph{t} test is applied, the following hypothesis is considered: \cr (1) \eqn{\qquad \qquad H_k: \beta (\lambda_1 \ldots \lambda_k) - \beta (\lambda_{k+1} \ldots \lambda_p), (k = 3, \ldots, p-3) = 0} \cr } \examples{ ## SIMPLE EXAMPLE OF A MREG ANALYSIS data(dFactors) eig <- dFactors$Raiche$eigenvalues results <- nMreg(eig) results plotuScree(eig, main=paste(results$nFactors[1], ", ", results$nFactors[2], " or ", results$nFactors[3], " factors retained by the MREG procedures", sep="")) } \references{ Zoski, K. and Jurs, S. (1993). Using multiple regression to determine the number of factors to retain in factor analysis. \emph{Multiple Linear Regression Viewpoints, 20}(1), 5-9. Zoski, K. and Jurs, S. (1996). An objective counterpart to the visual scree test for factor analysis: the standard error scree test. \emph{Educational and Psychological Measurement, 56}(3), 443-451. } \seealso{ \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{multivariate} nFactors/man/diagReplace.Rd0000644000176200001440000000331613620574733015310 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/diagReplace.r \name{diagReplace} \alias{diagReplace} \title{Replacing Upper or Lower Diagonal of a Correlation or Covariance Matrix} \usage{ diagReplace(R, upper = TRUE) } \arguments{ \item{R}{numeric: correlation or covariance matrix} \item{upper}{logical: if \code{TRUE} upper diagonal is replaced with lower diagonal. If \code{FALSE}, lower diagonal is replaced with upper diagonal.} } \value{ \item{R }{ numeric: correlation or covariance matrix } } \description{ The \code{diagReplace} function returns a modified correlation or covariance matrix by replacing upper diagonal with lower diagonal, or lower diagonal with upper diagonal. } \examples{ # ....................................................... # Example from Kim and Mueller (1978, p. 10) # Population: upper diagonal # Simulated sample: lower diagnonal R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, .5600, 1.000, .4749, .2196, .1912, .2979, .4800, .4200, 1.000, .2079, .2010, .2445, .2240, .1960, .1680, 1.000, .4334, .3197, .1920, .1680, .1440, .4200, 1.000, .4207, .1600, .1400, .1200, .3500, .3000, 1.000), nrow=6, byrow=TRUE) # Replace upper diagonal with lower diagonal RU <- diagReplace(R, upper=TRUE) # Replace lower diagonal with upper diagonal RL <- diagReplace(R, upper=FALSE) # ....................................................... } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} } \keyword{manip} nFactors/man/eigenFrom.Rd0000644000176200001440000000370713620574733015027 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/eigenFrom.r \name{eigenFrom} \alias{eigenFrom} \title{Identify the Data Type to Obtain the Eigenvalues} \usage{ eigenFrom(x) } \arguments{ \item{x}{numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data} } \value{ character: return the data type to obtain the eigenvalues: \code{"eigenvalues"}, \code{"correlation"} or \code{"data"} } \description{ The \code{eigenFrom} function identifies the data type from which to obtain the eigenvalues. The function is used internally in many functions of the \pkg{nFactors} package to be able to apply these to a vector of eigenvalues, a matrix of correlations or covariance or a \code{data.frame}. } \examples{ # ....................................................... # Different data types # Examples of adequate data sources # Vector of eigenvalues data(dFactors) x1 <- dFactors$Cliff1$eigenvalues eigenFrom(x1) # Data from a data.frame x2 <- data.frame(matrix(20*rnorm(100), ncol=5)) eigenFrom(x2) # From a covariance matrix x3 <- cov(x2) eigenFrom(x3) # From a correlation matrix x4 <- cor(x2) eigenFrom(x4) # Examples of inadequate data sources: not run because of errors generated # x0 <- c(2,1) # Error: not enough eigenvalues # eigenFrom(x0) # x2 <- matrix(x1, ncol=5) # Error: non a symetric covariance matrix # eigenFrom(x2) # eigenFrom(x3[,(1:2)]) # Error: not enough variables # x6 <- table(x5) # Error: not a valid data class # eigenFrom(x6) # ....................................................... } \author{ Gilles Raiche \cr Centre sur les Applications des Modeles de Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr \email{raiche.gilles@uqam.ca} \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege \cr \email{David.Magis@ulg.ac.be} } \keyword{multivariate} nFactors/TODO0000644000176200001440000000127213635161144012527 0ustar liggesusers ****************************************************** * * * Future Developments for the nFactors Package * * * ****************************************************** o Use of the polycor package to take in account correlation matrices from discrete variables. o Add a gui, probably from the fgui package. o A global function to simplify estimations of the number of factors/dimensions. ****************************************************** ****************************************************** nFactors/DESCRIPTION0000644000176200001440000000252713637562616013564 0ustar liggesusersPackage: nFactors Type: Package Title: Parallel Analysis and Other Non Graphical Solutions to the Cattell Scree Test Version: 2.4.1 Date: 2020-03-27 Author: Gilles Raiche (Universite du Quebec a Montreal) and David Magis (Universite de Liege) Maintainer: Gilles Raiche Description: Indices, heuristics and strategies to help determine the number of factors/components to retain: 1. Acceleration factor (af with or without Parallel Analysis); 2. Optimal Coordinates (noc with or without Parallel Analysis); 3. Parallel analysis (components, factors and bootstrap); 4. lambda > mean(lambda) (Kaiser, CFA and related); 5. Cattell-Nelson-Gorsuch (CNG); 6. Zoski and Jurs multiple regression (b, t and p); 7. Zoski and Jurs standard error of the regression coeffcient (sescree); 8. Nelson R2; 9. Bartlett khi-2; 10. Anderson khi-2; 11. Lawley khi-2 and 12. Bentler-Yuan khi-2. License: GPL (>= 3.5.0) Encoding: UTF-8 LazyData: true Depends: R (>= 3.5.0), lattice Imports: stats, MASS, psych RoxygenNote: 6.1.1 Suggests: testthat NeedsCompilation: no Packaged: 2020-03-27 21:00:05 UTC; Gilles Repository: CRAN Date/Publication: 2020-03-28 05:50:06 UTC nFactors/NEWS0000644000176200001440000001250513635160727012545 0ustar liggesusers ****************************************************** * * * Changes and Developments in the nFactors Package * * * ****************************************************** ------------------------------------------ - Changes in nFactors 2.4.0 (2020-03-20) - ------------------------------------------ Use of the function methodss::class1() instead of the function class(). Transition to Roxygen documentation. ------------------------------------------ - Changes in nFactors 2.3.3 (2011-12-16) - ------------------------------------------ Changes done because the functions mean() and sd() in the moreStats() function were depreciated with data.frames. xMean <- sapply(x, mean) # mean(x) before change xSd <- sapply(x, sd) # sd(x) before change ------------------------------------------ - Changes in nFactors 2.3.2 (2010-10-04) - ------------------------------------------ Changes in this version are stricly limited to orthographical correction of the documentation. ------------------------------------------ - Changes in nFactors 2.3.1 (2009-14-10) - ------------------------------------------ o Two bugs were fixed inside the function structureSim. The first one, at line 21, is related to permutation analysis where the quantile paramater of the function call stayed fixed at 0.95 even if the value is different in the call. The variable quantile is now formally use in the call to eigenBootParallel. The second bug was realted to an error in the column names of the data.frame return from structureSim: the per and mean.eig columns were unfortunalely inrerchanged. Version 2.3.1 fixed it. o Bug fixing was the occasion to introduce a new function used for a paper proposed to Behaviormetrika, studySim. But this function is useful for many other simulation settings. ---------------------------------------- - Changes in nFactors 2.3 (2009-15-09) - ---------------------------------------- This version of nFactors is a major upgrade and so presents important additions and modifications. Care was taken to not modified parameters from version 2.2 calls so that functions and packages already requiring nFactors will yet operate correctly in the future. But like with all mojor upgrades, care must be taken and it is recommanded that developpers verify their results. All the future upgrades won't have this potentiel problems. o Many new procedures to determine the number of components or factors to retain are added: permutation and bootstrap parallel analysis, CNG, Bentler and Yuan, Bartlett, Anderson, Lawley, Zosky and Jurs, etc. o Care is taken to uniformise the labelling of new functions and new variables. According to the Java coding practice, with this labelling, the names begin with a small character, and capitals are used inside for added concepts. o It is now possible to do most of the nfactors package analysis on a covariance matrix. o It is now possible to do most of the nfactors package analysis in the CFA context. o The permutation parallel analysis of Buja and Eyuboglu (1992) is added. o It is now possible to bootstrap the eigenvalues from an empirical data matrix. o New heuristic numerical indices are added to determine the number of components/factores to retain: CNG, Zoski and Jurs multiple regression, Joski and Jurs standard error of the scree, and Nelson R. o Likelihood ratio tests are added: Bartlet, Anderson, Lawley, and Bentler and Yuan chi-squared. o The eigenComputes function computes eigenvalues conditional of the class of the object from which data come from: eigenvalues from vector, correlation/covariance matrix, or data from a data.frame. o The eigenFrom function determine the class of the object. o The corFA function is added to insert commulalities in the diagonal of a correlation or a covariance matrix. o The makeCor function creates a full correlation/covariance matrix from a matrix with lower part filled and upper part with zeros. o Functions are added to generate a factor structure (generateStructure) and to simulate data and correlation matrices from a predefined factor structure (structureSim). o A function, moreStats, is added to be computes additionnal statistics on a numeric data.frame. o Utility functions for \code{nScree} class objects werw implemented: is.nScree, plot.nScree, plot.nScree and summary.nScree. ---------------------------------------- - Changes in nFactors 2.2 (2009-02-06) - ---------------------------------------- o Considering the instabillity of the function factanal with ill conditionned correlation matrices, new functions for computing factor analysis are added: componentAxis, iteratePrincipalAxix, principalAxis and principalComponents. o The diagReplace function replace the upper or the lower diagonal of a correlation matrix with the respective lower or lower diagonal. o The rRecovery function is added for a verification of the quality of the recovery of an initial correlation matrix. nFactors/R/0000755000176200001440000000000013636702004012233 5ustar liggesusersnFactors/R/bentlerParameters.r0000644000176200001440000002360513637464427016122 0ustar liggesusers#' Bentler and Yuan's Computation of the LRT Index and the Linear Trend #' Coefficients #' #' This function computes the Bentler and Yuan's (1996, 1998) \emph{LRT} index #' for the linear trend in eigenvalues of a covariance matrix. The related #' \eqn{\chi^2} and \emph{p}-value are also computed. This function is #' generally called from the \code{nBentler} function. But it could be of use #' for graphing the linear trend function and to study it's behavior. #' #' The implemented Bentler and Yuan's procedure must be used with care because #' the minimized function is not always stable. In many cases, constraints must #' applied to obtain a solution. The actual implementation did, but the user #' can modify these constraints. #' #' The hypothesis tested (Bentler and Yuan, 1996, equation 10) is: \cr \cr #' #' (1) \eqn{\qquad \qquad H_k: \lambda_{k+i} = \alpha + \beta x_i, (i = 1, #' \ldots, q)} \cr #' #' The solution of the following simultaneous equations is needed to find #' \eqn{(\alpha, \beta) \in} \cr #' #' (2) \eqn{\qquad \qquad f(x) = \sum_{i=1}^q \frac{ [ \lambda_{k+j} - N \alpha #' + \beta x_j ] x_j}{(\alpha + \beta x_j)^2} = 0} \cr \cr #' #' and \eqn{\qquad \qquad g(x) = \sum_{i=1}^q \frac{ \lambda_{k+j} - N \alpha + #' \beta x_j x_j}{(\alpha + \beta x_j)^2} = 0} \cr #' #' The solution to this system of equations was implemented by minimizing the #' following equation: \cr #' #' (3) \eqn{\qquad \qquad (\alpha, \beta) \in \inf{[h(x)]} = \inf{\log{[f(x)^2 #' + g(x)^2}}]} \cr #' #' The likelihood ratio test \eqn{LRT} proposed by Bentler and Yuan (1996, #' equation 7) follows a \eqn{\chi^2} probability distribution with \eqn{q-2} #' degrees of freedom and is equal to: \cr #' #' (4) \eqn{\qquad \qquad LRT = N(k - p)\left\{ {\ln \left( {{n \over N}} #' \right) + 1} \right\} - N\sum\limits_{j = k + 1}^p {\ln \left\{ {{{\lambda #' _j } \over {\alpha + \beta x_j }}} \right\}} + n\sum\limits_{j = k + 1}^p #' {\left\{ {{{\lambda _j } \over {\alpha + \beta x_j }}} \right\}} } \cr #' #' With \eqn{p} beeing the number of eigenvalues, \eqn{k} the number of #' eigenvalues to test, \eqn{q} the \eqn{p-k} remaining eigenvalues, \eqn{N} #' the sample size, and \eqn{n = N-1}. Note that there is an error in the #' Bentler and Yuan equation, the variables \eqn{N} and \eqn{n} beeing inverted #' in the preceeding equation 4. #' #' A better strategy proposed by Bentler an Yuan (1998) is to use a minimized #' \eqn{\chi^2} solution. This strategy will be implemented in a future version #' of the \pkg{nFactors} package. #' #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param N numeric: number of subjects. #' @param nFactors numeric: number of components to test. #' @param log logical: if \code{TRUE} the minimization is applied on the log #' values. #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param minPar numeric: minimums for the coefficient of the linear trend. #' @param maxPar numeric: maximums for the coefficient of the linear trend. #' @param resParx numeric: restriction on the \eqn{\alpha} coefficient (x) to #' graph the function to minimize. #' @param resPary numeric: restriction on the \eqn{\beta} coefficient (y) to #' graph the function to minimize. #' @param graphic logical: if \code{TRUE} plots the minimized function #' \code{"wireframe"}, \code{"contourplot"} or \code{"levelplot"}. #' @param resolution numeric: resolution of the 3D graph (number of points from #' \eqn{\alpha} and from \eqn{\beta}). #' @param typePlot character: plots the minimized function according to a 3D #' plot: \code{"wireframe"}, \code{"contourplot"} or \code{"levelplot"}. #' @param ... variable: additionnal parameters from the \code{"wireframe"}, #' \code{"contourplot"} or \code{"levelplot"} \code{lattice} functions. Also #' additionnal parameters for the \code{eigenFrom} function. #' #' @return \item{nFactors}{ numeric: vector of the number of factors retained #' by the Bentler and Yuan's procedure. } \item{details}{ numeric: matrix of #' the details of the computation.} #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege #' \cr \email{David.Magis@@ulg.ac.be} #' @seealso \code{\link{nBartlett}}, \code{\link{nBentler}} #' @references #' Bentler, P. M. and Yuan, K.-H. (1996). Test of linear trend in #' eigenvalues of a covariance matrix with application to data analysis. #' \emph{British Journal of Mathematical and Statistical Psychology, 49}, #' 299-312. #' #' Bentler, P. M. and Yuan, K.-H. (1998). Test of linear trend in the smallest #' eigenvalues of the correlation matrix. \emph{Psychometrika, 63}(2), 131-144. # #' @importFrom lattice wireframe contourplot levelplot #' @importFrom stats nlminb lm pchisq #' @importFrom graphics abline #' @import lattice #' @export #' @keywords multivariate #' @examples #' ## ................................................ #' ## SIMPLE EXAMPLE OF THE BENTLER AND YUAN PROCEDURE #' #' # Bentler (1996, p. 309) Table 2 - Example 2 ............. #' n=649 #' bentler2<-c(5.785, 3.088, 1.505, 0.582, 0.424, 0.386, 0.360, 0.337, 0.303, #' 0.281, 0.246, 0.238, 0.200, 0.160, 0.130) #' #' results <- nBentler(x=bentler2, N=n, details=TRUE) #' results #' #' # Two different figures to verify the convergence problem identified with #' # the 2th component #' bentlerParameters(x=bentler2, N=n, nFactors= 2, graphic=TRUE, #' typePlot="contourplot", #' resParx=c(0,9), resPary=c(0,9), cor=FALSE) #' #' bentlerParameters(x=bentler2, N=n, nFactors= 4, graphic=TRUE, drape=TRUE, #' resParx=c(0,9), resPary=c(0,9), #' scales = list(arrows = FALSE) ) #' #' plotuScree(x=bentler2, model="components", #' main=paste(results$nFactors, #' " factors retained by the Bentler and Yuan's procedure (1996, p. 309)", #' sep="")) #' # ........................................................ #' #' # Bentler (1998, p. 140) Table 3 - Example 1 ............. #' n <- 145 #' example1 <- c(8.135, 2.096, 1.693, 1.502, 1.025, 0.943, 0.901, 0.816, #' 0.790,0.707, 0.639, 0.543,0.533, 0.509, 0.478, 0.390, #' 0.382, 0.340, 0.334, 0.316, 0.297,0.268, 0.190, 0.173) #' #' results <- nBentler(x=example1, N=n, details=TRUE) #' results #' #' # Two different figures to verify the convergence problem identified with #' # the 10th component #' bentlerParameters(x=example1, N=n, nFactors= 10, graphic=TRUE, #' typePlot="contourplot", #' resParx=c(0,0.4), resPary=c(0,0.4)) #' #' bentlerParameters(x=example1, N=n, nFactors= 10, graphic=TRUE, drape=TRUE, #' resParx=c(0,0.4), resPary=c(0,0.4), #' scales = list(arrows = FALSE) ) #' #' plotuScree(x=example1, model="components", #' main=paste(results$nFactors, #' " factors retained by the Bentler and Yuan's procedure (1998, p. 140)", #' sep="")) #' # ........................................................ bentlerParameters <- function(x, N, nFactors, log=TRUE, cor=TRUE, minPar=c(min(lambda) - abs(min(lambda)) +.001, 0.001), maxPar=c(max(lambda), lm(lambda ~ I(length(lambda):1))$coef[2]), resParx=c(0.01, 2), resPary=c(0.01, 2), graphic=TRUE, resolution=30, typePlot="wireframe", ...){ stopMessage <- paste("\n These indices are only valid with a principal component solution.\n", " ...................... So, only positive eigenvalues are permitted.\n", sep="") lambda <- eigenComputes(x, cor=cor, ...) if (length(which(lambda <0 )) > 0) {cat(stopMessage);stop()} k <- nFactors p <- length(lambda) q <- p-k i <- 1:q x <- q-i l <- lambda[k+i] n <- N - 1 # Bentler (1996, p. 133) maximization of equations 8 and 9 f1 <- function(n,l,x,alpha,beta) sum((n*l-(n+1)*(alpha+beta*x))/((alpha+beta*x)^2)) f2 <- function(n,l,x,alpha,beta) sum((n*l-(n+1)*(alpha+beta*x))*x/((alpha+beta*x)^2)) f <- function(alpha,beta) f1(n,l,x,alpha,beta)^2+f2(n,l,x,alpha,beta)^2 if (log == FALSE) F <- function(y) f(y[1],y[2]) else F <- function(y) log(f(y[1],y[2])) figure <- NULL if (graphic == TRUE) { p1 <- seq(resParx[1], resParx[2], length=resolution) p2 <- seq(resPary[1], resPary[2], length=resolution) data <- expand.grid(Alpha = p1, Beta = p2) data <- data.frame(data, y=numeric(length(data$Alpha))) for( i in 1:length(data$Alpha)) data$y[i] <- F(c(data$Alpha[i],data$Beta[i])) if (log == FALSE) zlab <- "y" else zlab <- "log(y)" if (typePlot == "wireframe") figure <- wireframe( y ~ Alpha * Beta, data=data, zlab=zlab, ...) if (typePlot == "contourplot") figure <- contourplot(y ~ Alpha * Beta, data=data, region=TRUE, ...) if (typePlot == "levelplot") figure <- levelplot( y ~ Alpha * Beta, data=data, region=TRUE, ...) } res <- nlminb(objective=F,start=lm(l~x)$coefficients,lower=c(minPar[1],minPar[2]),upper=c(maxPar[1],maxPar[2])) para <- res$par[1] parb <- res$par[2] # Bentler (1996, p. 133) equation 7 # !!! Warning: Bentler and Yuan (1998) were in error for the definition of LRT !!! # !!! So N and n must be inversed in the first logarithm !!! lrt <- N*(k-p)*(log(n/N)+1)-N*sum(log(lambda[(k+1):p]/(para+parb*x))) + n*sum(lambda[(k+1):p]/(para+parb*x)) df <- q-2 resp <- list(convergence=res$convergence, figure=figure, coefficients=res$par, lrt=lrt, df=df,k=k,p.value=1-pchisq(lrt,df)) names(resp$coefficients)<-c("alpha","beta") return(resp) } nFactors/R/nBartlett.r0000644000176200001440000002134613620625337014371 0ustar liggesusers#' #' Bartlett, Anderson and Lawley Procedures to Determine the Number of Components/Factors #' #' This function computes the Bartlett, Anderson and Lawley indices for determining the #' number of components/factors to retain. #' @details Note: the latex formulas are available only in the pdf version of this help file. #' #' The hypothesis tested is: \cr #' #' (1) \eqn{\qquad \qquad H_k: \lambda_{k+1} = \ldots = \lambda_p} \cr #' #' This hypothesis is verified by the application of different version of a #' \eqn{\chi^2} test with different values for the degrees of freedom. #' Each of these tests shares the compution of a \eqn{V_k} value: \cr #' #' (2) \eqn{\qquad \qquad V_k = #' \prod\limits_{i = k + 1}^p {\left\{ {{{\lambda _i } #' \over {{\raise0.7ex\hbox{$1$} \!\mathord{\left/ #' {\vphantom {1 q}}\right.\kern-\nulldelimiterspace} #' \!\lower0.7ex\hbox{$q$}}\sum\limits_{i = k + 1}^p {\lambda _i } }}} \right\}} #' } #' #' \eqn{p} is the number of eigenvalues, \eqn{k} the number of eigenvalues to test, #' and \eqn{q} the \eqn{p-k} remaining eigenvalues. \eqn{n} is equal to the sample size #' minus 1 (\eqn{n = N-1}). \cr #' #' The Anderson statistic is distributed as a \eqn{\chi^2} with \eqn{(q + 2)(q - 1)/2} degrees #' of freedom and is equal to: \cr #' #' (3) \eqn{\qquad \qquad - n\log (V_k ) \sim \chi _{(q + 2)(q - 1)/2}^2 } \cr #' #' An improvement of this statistic from Bartlett (Bentler, and Yuan, 1996, p. 300; #' Horn and Engstrom, 1979, equation 8) is distributed as a \eqn{\chi^2} #' with \eqn{(q)(q - 1)/2} degrees of freedom and is equal to: \cr #' #' (4) \eqn{\qquad \qquad - \left[ {n - k - {{2q^2 q + 2} \over {6q}}} #' \right]\log (V_k ) \sim \chi _{(q + 2)(q - 1)/2}^2 } \cr #' #' Finally, Anderson (1956) and James (1969) proposed another statistic. \cr #' #' (5) \eqn{\qquad \qquad - \left[ {n - k - {{2q^2 q + 2} \over {6q}} #' + \sum\limits_{i = 1}^k {{{\bar \lambda _q^2 } \over {\left( {\lambda _i #' - \bar \lambda _q } \right)^2 }}} } \right]\log (V_k ) \sim \chi _{(q + 2)(q - 1)/2}^2 } \cr #' #' Bartlett (1950, 1951) proposed a correction to the degrees of freedom of these \eqn{\chi^2} after the #' first significant test: \eqn{(q+2)(q - 1)/2}. \cr #' #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data (eigenFrom) #' @param N numeric: number of subjects #' @param alpha numeric: statistical significance level #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation matrix, else from a covariance matrix #' @param details logical: if \code{TRUE} also returns detains about the computation for each eigenvalue #' @param correction logical: if \code{TRUE} uses a correction for the degree of freedom after the first eigenvalue #' @param ... variable: additionnal parameters to give to the \code{cor} or \code{cov} functions #' @return \item{nFactors}{numeric: vector of the number of factors retained by the Bartlett, Anderson and Lawley procedures.} #' @return \item{details}{numeric: matrix of the details for each index.} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} #' #' @references #' Anderson, T. W. (1963). Asymptotic theory for principal component analysis. \emph{Annals of Mathematical Statistics, 34}, 122-148. #' #' Bartlett, M. S. (1950). Tests of significance in factor analysis. \emph{British Journal of Psychology, 3}, 77-85. #' #' Bartlett, M. S. (1951). A further note on tests of significance. \emph{British Journal of Psychology, 4}, 1-2. #' #' Bentler, P. M. and Yuan, K.-H. (1996). Test of linear trend in eigenvalues of a covariance matrix with application to data analysis. #' \emph{British Journal of Mathematical and Statistical Psychology, 49}, 299-312. #' #' Bentler, P. M. and Yuan, K.-H. (1998). Test of linear trend in the smallest #' eigenvalues of the correlation matrix. \emph{Psychometrika, 63}(2), 131-144. #' #' Horn, J. L. and Engstrom, R. (1979). Cattell's scree test in relation to #' Bartlett's chi-square test and other observations on the number of factors #' problem. \emph{Multivariate Behavioral Reasearch, 14}(3), 283-300. #' #' James, A. T. (1969). Test of equality of the latent roots of the covariance #' matrix. \emph{In} P. K. Krishna (Eds): \emph{Multivariate analysis, volume 2}.New-York, NJ: Academic Press. #' #' Lawley, D. N. (1956). Tests of significance for the latent roots of covarianceand correlation matrix. \emph{Biometrika, 43}(1/2), 128-136. #' #' @export # #' @importFrom stats pchisq #' @keywords multivariate #' @examples #' ## ................................................ #' ## SIMPLE EXAMPLE OF A BARTLETT PROCEDURE #' #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' #' results <- nBartlett(x=eig, N= 100, alpha=0.05, details=TRUE) #' results #' #' plotuScree(eig, main=paste(results$nFactors[1], ", ", #' results$nFactors[2], " or ", #' results$nFactors[3], #' " factors retained by the LRT procedures", #' sep="")) #' nBartlett <- function(x, N, alpha=0.05, cor=TRUE, details=TRUE, correction=TRUE, ...) { stopMessage <- paste("\n These indices are only valid with a principal component solution.\n", " ...................... So, only positive eugenvalues are permitted.\n", sep="") x <- eigenComputes(x, cor=cor, ...) if (length(which(x<0)) > 0) {cat(stopMessage);stop()} n <- length(x) detail <- NULL bartlett.n <- anderson.n <- lawley.n <- 0 bartlett <- bartlett.chi <- bartlett.df <- bartlett.p <- numeric(n) anderson.chi <- anderson.df <- anderson.p <- numeric(n) lawley.chi <- lawley.df <- lawley.p <- numeric(n) for (k in 0:(n-1)) { i <- k+1 bartlett[i] <- prod(x[(k+1):n]) / (sum(x[(k+1):n])/(n-k))^(n-k) # From Horn et Engstrom (1979) bartlett.chi[i] <- -(N - 1 - ((2*n+5)/6) - ((2*k)/3)) * log(bartlett[i]) bartlett.df[i] <- .5 * (n-k) * (n-k-1) # Bartlett without correction, from Horn and Engstrom (1979. p. 291, equation 8) if (correction==TRUE & bartlett.n > 0) bartlett.df[i] <- .5 * (n-k+2) * (n-k-1) # From Bentler and Yuan (1996, p. 300) bartlett.p[i] <- pchisq(bartlett.chi[i] , bartlett.df[i], lower.tail = FALSE) # Conditions to stop when non significant test are obtained anderson.chi[i] <- -N * log(bartlett[i]) # From Bentler and Yuan (1996, p. 300, equations 3-4) anderson.df[i] <- .5 * (n-k+2) * (n-k-1) # From Bentler and Yuan (1996, p. 300) anderson.p[i] <- pchisq(anderson.chi[i] , anderson.df[i], lower.tail = FALSE) lMean <- mean(x[(k+1):n]) lawley.chi[i] <- -(N - 1 - ((2*n+5)/6) - ((2*k)/3) + sum((lMean^2)/((x[k]+lMean)^2))) * log(bartlett[i]) # From Bentler and Yuan (1996, p. 300, equation 6) lawley.df[i] <- .5 * (n-k) * (n-k-1) # From Horn and Engstrom (1979. p. 291, equation 8) lawley.p[i] <- pchisq(lawley.chi[i] , lawley.df[i], lower.tail = FALSE) # print(c(bartlett[i], bartlett.chi[i], bartlett.df[i], bartlett.p[i]),2) ############ TEST ############# if (i == 1) { bartlett.n <- bartlett.n + as.numeric(bartlett.p[i] <= alpha) anderson.n <- anderson.n + as.numeric(anderson.p[i] <= alpha) lawley.n <- lawley.n + as.numeric(lawley.p[i] <= alpha) } if (i > 1) { if(bartlett.p[i-1] <= 0.05) bartlett.n <- bartlett.n + as.numeric(bartlett.p[i] <= alpha) if(anderson.p[i-1] <= 0.05) anderson.n <- anderson.n + as.numeric(anderson.p[i] <= alpha) if(lawley.p[i-1] <= 0.05) lawley.n <- lawley.n + as.numeric(lawley.p[i] <= alpha) } } if (bartlett.n == 0) bartlett.n <- n # If no test if significant, retain all components if (anderson.n == 0) anderson.n <- n if (lawley.n == 0) lawlwy.n <- n if (details == TRUE) detail <- data.frame(v=(1:(n)),values=x[1:(n)], bartlett, bartlett.chi, bartlett.df, bartlett.p, anderson.chi, anderson.df, anderson.p, lawley.chi, lawley.df, lawley.p) res <- list(detail=detail, nFactors=c(bartlett=bartlett.n, anderson=anderson.n, lawley=lawley.n)) class(res) <- c("nFactors","list") return(res) } nFactors/R/generateStructure.r0000644000176200001440000001226613621045614016141 0ustar liggesusers#' Generate a Factor Structure Matrix #' #' The \code{generateStructure} function returns a \emph{mjc} factor structure matrix. #' The number of variables per major factor \emph{pmjc} is equal for each factor. #' The argument \emph{pmjc} must be divisible by \emph{nVar}. #' The arguments are strongly inspired from Zick and Velicer (1986, p. 435-436) methodology. #' #' @param var numeric: number of variables #' @param mjc numeric: number of major factors (factors with practical significance) #' @param pmjc numeric: number of variables that load significantly on each major factor #' @param loadings numeric: loadings on the significant variables on each major factor #' @param unique numeric: loadings on the non significant variables on each major factor #' @return values numeric matrix: factor structure #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege #' \cr \email{David.Magis@@ulg.ac.be} #' @export #' @importFrom psych sim.structure #' @seealso \code{\link{principalComponents}}, \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules for #' determining the number of components to retain. \emph{Psychological Bulletin, 99}, 432-442. #' @keywords multivariate #' @examples #' # ....................................................... #'# Example inspired from Zwick and Velicer (1986, table 2, p. 437) #'## ................................................................... #'unique=0.2; loadings=0.5 #' zwick1 <- generateStructure(var=36, mjc=6, pmjc= 6, loadings=loadings, #' unique=unique) #'zwick2 <- generateStructure(var=36, mjc=3, pmjc=12, loadings=loadings, #' unique=unique) #'zwick3 <- generateStructure(var=72, mjc=9, pmjc= 8, loadings=loadings, #' unique=unique) #'zwick4 <- generateStructure(var=72, mjc=6, pmjc=12, loadings=loadings, #' unique=unique) #'sat=0.8 #'## ................................................................... #'zwick5 <- generateStructure(var=36, mjc=6, pmjc= 6, loadings=loadings, #' unique=unique) #'zwick6 <- generateStructure(var=36, mjc=3, pmjc=12, loadings=loadings, #' unique=unique) #'zwick7 <- generateStructure(var=72, mjc=9, pmjc= 8, loadings=loadings, #' unique=unique) #'zwick8 <- generateStructure(var=72, mjc=6, pmjc=12, loadings=loadings, #' unique=unique) #'## ................................................................... #' #'# nsubjects <- c(72, 144, 180, 360) #'# require(psych) #'# Produce an usual correlation matrix from a congeneric model #'nsubjects <- 72 #'mzwick5 <- psych::sim.structure(fx=as.matrix(zwick5), n=nsubjects) #'mzwick5$r #' #'# Factor analysis: recovery of the factor structure #'iterativePrincipalAxis(mzwick5$model, nFactors=6, #' communalities="ginv")$loadings #'iterativePrincipalAxis(mzwick5$r , nFactors=6, #' communalities="ginv")$loadings #'factanal(covmat=mzwick5$model, factors=6) #'factanal(covmat=mzwick5$r , factors=6) #' #'# Number of components to retain #'eigenvalues <- eigen(mzwick5$r)$values #'aparallel <- parallel(var = length(eigenvalues), #' subject = nsubjects, #' rep = 30, #' quantile = 0.95, #' model="components")$eigen$qevpea #'results <- nScree(x = eigenvalues, #' aparallel = aparallel) #'results$Components #'plotnScree(results) #' #'# Number of factors to retain #'eigenvalues.fa <- eigen(corFA(mzwick5$r))$values #'aparallel.fa <- parallel(var = length(eigenvalues.fa), #' subject = nsubjects, #' rep = 30, #' quantile = 0.95, #' model="factors")$eigen$qevpea #'results.fa <- nScree(x = eigenvalues.fa, #' aparallel = aparallel.fa, #' model ="factors") #'results.fa$Components #'plotnScree(results.fa) #'# ...................................................... #' #' generateStructure <- function(var, mjc, pmjc, loadings, unique) { if (var/mjc != ceiling(var/mjc)) stop("Bad pmjc value") fload <- matrix(unique, ncol=mjc, nrow=var) for (i in 1:mjc) { if (i == 1) fload[i:(pmjc),i] <- loadings min <- ((i-1)*pmjc+1) max <- ((i-1)*pmjc+pmjc) if (min > dim(fload)[1]) min <- dim(fload)[1] if (max > dim(fload)[1]) max <- dim(fload)[1] if (i > 1) fload[min:max,i] <- loadings if (min > dim(fload)[1]) fload[dim(fload)[1],i] <- unique } return(data.frame(fload)) } nFactors/R/nScree.R0000644000176200001440000002526013636701602013605 0ustar liggesusers#' Non Graphical Cattel's Scree Test #' #' The \code{nScree} function returns an analysis of the number of component or #' factors to retain in an exploratory principal component or factor analysis. #' The function also returns information about the number of components/factors #' to retain with the Kaiser rule and the parallel analysis. #' #' The \code{nScree} function returns an analysis of the number of #' components/factors to retain in an exploratory principal component or factor #' analysis. Different solutions are given. The classical ones are the Kaiser #' rule, the parallel analysis, and the usual scree test #' (\code{\link{plotuScree}}). Non graphical solutions to the Cattell #' subjective scree test are also proposed: an acceleration factor (\emph{af}) #' and the optimal coordinates index \emph{oc}. The acceleration factor #' indicates where the elbow of the scree plot appears. It corresponds to the #' acceleration of the curve, i.e. the second derivative. The optimal #' coordinates are the extrapolated coordinates of the previous eigenvalue that #' allow the observed eigenvalue to go beyond this extrapolation. The #' extrapolation is made by a linear regression using the last eigenvalue #' coordinates and the \eqn{k+1} eigenvalue coordinates. There are \eqn{k-2} #' regression lines like this. The Kaiser rule or a parallel analysis #' criterion (\code{\link{parallel}}) must also be simultaneously satisfied to #' retain the components/factors, whether for the acceleration factor, or for #' the optimal coordinates. #' #' If \eqn{\lambda_i} is the \eqn{i^{th}} eigenvalue, and \eqn{LS_i} is a #' location statistics like the mean or a centile (generally the followings: #' \eqn{1^{st}, \ 5^{th}, \ 95^{th}, \ or \ 99^{th}}). #' #' The Kaiser rule is computed as: \deqn{ n_{Kaiser} = \sum_{i} (\lambda_{i} #' \ge \bar{\lambda}).} Note that \eqn{\bar{\lambda}} is equal to 1 when a #' correlation matrix is used. #' #' The parallel analysis is computed as: \deqn{n_{parallel} = \sum_{i} #' (\lambda_{i} \ge LS_i).} #' #' The acceleration factor (\eqn{AF}) corresponds to a numerical solution to #' the elbow of the scree plot: \deqn{n_{AF} \equiv \ If \ \left[ (\lambda_{i} #' \ge LS_i) \ and \ max(AF_i) \right].} #' #' The optimal coordinates (\eqn{OC}) corresponds to an extrapolation of the #' preceeding eigenvalue by a regression line between the eigenvalue #' coordinates and the last eigenvalue coordinates: \deqn{n_{OC} = \sum_i #' \left[(\lambda_i \ge LS_i) \cap (\lambda_i \ge (\lambda_{i \ predicted}) #' \right].} #' #' #' @param eig depreciated parameter (use x instead): eigenvalues to analyse #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param aparallel numeric: results of a parallel analysis. Defaults #' eigenvalues fixed at \eqn{\lambda >= \bar{\lambda}} (Kaiser and related #' rule) or \eqn{\lambda >= 0} (CFA analysis) #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param model character: \code{"components"} or \code{"factors"} #' @param criteria numeric: by default fixed at \eqn{\bar{\lambda}}. When the #' \eqn{\lambda}s are computed from a principal component analysis on a #' correlation matrix, it corresponds to the usual Kaiser \eqn{\lambda >= 1} #' rule. On a covariance matrix or from a factor analysis, it is simply the #' mean. To apply \eqn{\lambda >= 0}, sometimes used with factor analysis, fix #' the criteria to \eqn{0}. #' @param ... variabe: additionnal parameters to give to the \code{cor} or #' \code{cov} functions #' #' #' @return #' \item{Components }{ Data frame for the number of components/factors #' according to different rules } \item{Components$noc }{ Number of #' components/factors to retain according to optimal coordinates \emph{oc}} #' \item{Components$naf }{ Number of components/factors to retain according to #' the acceleration factor \emph{af}} \item{Components$npar.analysis }{Number #' of components/factors to retain according to parallel analysis } #' \item{Components$nkaiser }{ Number of components/factors to retain according #' to the Kaiser rule } \item{Analysis }{ Data frame of vectors linked to the #' different rules } \item{Analysis$Eigenvalues }{ Eigenvalues } #' \item{Analysis$Prop }{ Proportion of variance accounted by eigenvalues } #' \item{Analysis$Cumu }{ Cumulative proportion of variance accounted by #' eigenvalues } \item{Analysis$Par.Analysis }{ Centiles of the random #' eigenvalues generated by the parallel analysis. } \item{Analysis$Pred.eig }{ #' Predicted eigenvalues by each optimal coordinate regression line } #' \item{Analysis$OC}{ Critical optimal coordinates \emph{oc}} #' \item{Analysis$Acc.factor }{ Acceleration factor \emph{af}} #' \item{Analysis$AF}{ Critical acceleration factor \emph{af}} Otherwise, #' returns a summary of the analysis. #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' #' @seealso \code{\link{plotuScree}}, \code{\link{plotnScree}}, #' \code{\link{parallel}}, \code{\link{plotParallel}}, #' #' @references #' Cattell, R. B. (1966). The scree test for the number of factors. #' \emph{Multivariate Behavioral Research, 1}, 245-276. #' #' Dinno, A. (2009). \emph{Gently clarifying the application of Horn's parallel #' analysis to principal component analysis versus factor analysis}. Portland, #' Oregon: Portland Sate University. #' #' Guttman, L. (1954). Some necessary conditions for common factor analysis. #' \emph{Psychometrika, 19, 149-162}. #' #' Horn, J. L. (1965). A rationale for the number of factors in factor #' analysis. \emph{Psychometrika, 30}, 179-185. #' #' Kaiser, H. F. (1960). The application of electronic computer to factor #' analysis. \emph{Educational and Psychological Measurement, 20}, 141-151. #' #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' # #' @family nScree #' @export #' @importFrom stats lm coef #' @keywords multivariate #' @examples #' #' ## INITIALISATION #' data(dFactors) # Load the nFactors dataset #' attach(dFactors) #' vect <- Raiche # Uses the example from Raiche #' eigenvalues <- vect$eigenvalues # Extracts the observed eigenvalues #' nsubjects <- vect$nsubjects # Extracts the number of subjects #' variables <- length(eigenvalues) # Computes the number of variables #' rep <- 100 # Number of replications for PA analysis #' cent <- 0.95 # Centile value of PA analysis #' #' ## PARALLEL ANALYSIS (qevpea for the centile criterion, mevpea for the #' ## mean criterion) #' aparallel <- parallel(var = variables, #' subject = nsubjects, #' rep = rep, #' cent = cent #' )$eigen$qevpea # The 95 centile #' #' ## NUMBER OF FACTORS RETAINED ACCORDING TO DIFFERENT RULES #' results <- nScree(x=eigenvalues, aparallel=aparallel) #' results #' summary(results) #' #' ## PLOT ACCORDING TO THE nScree CLASS #' plotnScree(results) #' "nScree" <- function(eig=NULL, x=eig, aparallel = NULL, cor=TRUE, model="components", criteria=NULL, ...) { # Initialisation eig <- eigenComputes(x, cor=cor, model=model, ...) if (is.null(aparallel)) aparallel <- rep(1,length(eig)) # default to 1 in the diagonal nk <- length(eig) k <- 1:nk proportion <- eig/sum(eig) cumulative <- proportion if (is.null(criteria)) criteria <- mean(eig) for (i in 2:nk) cumulative[i] = cumulative[i-1] + proportion[i] proportion[proportion < 0] <- 0# To constraint negative proportions to be zero cond1 <- TRUE; cond2 <- TRUE; i <- 0; pred.eig <- af <- rep(NA,nk) while ((cond1 == TRUE) && (cond2 == TRUE) && (i < nk)) { i <- i + 1 ind <- k[c(i+1,nk)] #### Optimal coordinate based on the next eigenvalue regression (scree) vp.p <- lm(eig[c(i+1,nk)] ~ ind) vp.prec <- pred.eig[i] <- sum(c(1,i)* coef(vp.p)) cond1 <- (eig[i] >= vp.prec) cond2 <- (eig[i] >= aparallel[i]) nc <- i-1 } # Second derivative at the i eigenvalue (acceleration factor, elbow) # See Yakowitz and Szidarovszky (1986, p. 84) tag <- 1 for (j in 2:(nk-1)) { if (eig[j-1] >= aparallel[j-1]) { af[j] <- (eig[j+1] -2* eig[j]) + eig[j-1] } } if (model == "components") p.vec <- which(eig >= aparallel,TRUE) else p.vec <- which((eig-aparallel)>=0 & eig >= criteria) ###if (model == "components") p.vec <- which(eig >= aparallel,TRUE) else p.vec <- which((eig-aparallel)>=0 & eig > 0) npar <- sum(p.vec == (1:length(p.vec))) nkaiser <- sum(eig >= rep(criteria,nk)) #### if (model == "components") nkaiser <- sum(eig >= rep(criteria,nk)) else nkaiser <- sum(eig >= rep(0,nk)) #### if (model == "components") nkaiser <- sum(eig >= rep(1,nk)) else nkaiser <- sum(eig >= rep(mean(eig),nk)) naf <- which(af == max(af,na.rm=TRUE),TRUE) - 1 # Assure that all the optimal coordinates will be computed for (i in (nc+1):(nk-2)) { ind <- k[c(i+1,nk)] vp.p <- lm(eig[c(i+1,nk)] ~ ind) vp.prec <- pred.eig[i] <- sum(c(1,i)* coef(vp.p)) } # Assure that all the acceleration factors will be computed for (j in 2:(nk-1)) af[j] <- (eig[j+1] - 2 * eig[j]) + eig[j-1] # Return values by the function coc <- rep("",nk); coc[nc] = "(< OC)" caf <- rep("",nk); caf[naf] = "(< AF)" result <- (list(Components = data.frame(noc = nc, naf = naf, nparallel = npar, nkaiser = nkaiser), Analysis = data.frame(Eigenvalues = eig, Prop = proportion, Cumu = cumulative, Par.Analysis = aparallel, Pred.eig = pred.eig, OC = coc, Acc.factor = af, AF = caf), Model = model)) class(result) <- 'nScree' return(result) } nFactors/R/nSeScree.r0000644000176200001440000001137013620625416014133 0ustar liggesusers#' Standard Error Scree and Coefficient of Determination Procedures to #' Determine the Number of Components/Factors #' #' This function computes the \emph{seScree} (\eqn{S_{Y \bullet X}}) indices #' (Zoski and Jurs, 1996) and the coefficient of determination indices of #' Nelson (2005) \eqn{R^2} for determining the number of components/factors to #' retain. #' #' The Zoski and Jurs \eqn{S_{Y \bullet X}} index is the standard error of the #' estimate (predicted) eigenvalues by the regression from the \eqn{(k+1, #' \ldots, p)} subsequent ranks of the eigenvalues. The standard error is #' computed as: #' #' (1) \eqn{\qquad \qquad S_{Y \bullet X} = \sqrt{ \frac{(\lambda_k - #' \hat{\lambda}_k)^2} {p-2} } } \cr #' #' A value of \eqn{1/p} is choosen as the criteria to determine the number of #' components or factors to retain, \emph{p} corresponding to the number of #' variables. #' #' The Nelson \eqn{R^2} index is simply the multiple regresion coefficient of #' determination for the \eqn{k+1, \ldots, p} eigenvalues. Note that Nelson #' didn't give formal prescriptions for the criteria for this index. He only #' suggested that a value of 0.75 or more must be considered. More is to be #' done to explore adequate values. #' #' @param x numeric: eigenvalues. #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param model character: \code{"components"} or \code{"factors"} #' @param details logical: if \code{TRUE} also returns details about the #' computation for each eigenvalue. #' @param r2limen numeric: criterion value retained for the coefficient of #' determination indices. #' @param ... variable: additionnal parameters to give to the #' \code{eigenComputes} and \code{cor} or \code{cov} functions #' @return \item{nFactors}{ numeric: number of components/factors retained by #' the seScree procedure. } \item{details}{ numeric: matrix of the details for #' each index.} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotnScree}}, \code{\link{plotParallel}} #' #' @references #' Nasser, F. (2002). The performance of regression-based #' variations of the visual scree for determining the number of common factors. #' \emph{Educational and Psychological Measurement, 62(3)}, 397-419. #' #' Nelson, L. R. (2005). Some observations on the scree test, and on #' coefficient alpha. \emph{Thai Journal of Educational Research and #' Measurement, 3(1)}, 1-17. #' #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' Zoski, K. and Jurs, S. (1993). Using multiple regression to determine the #' number of factors to retain in factor analysis. \emph{Multiple Linear #' Regression Viewpoints, 20}(1), 5-9. #' #' Zoski, K. and Jurs, S. (1996). An objective counterpart to the visuel scree #' test for factor analysis: the standard error scree. \emph{Educational and #' Psychological Measurement, 56}(3), 443-451. #' @export # #' @importFrom stats sd lm #' @keywords multivariate #' @examples #' #' ## SIMPLE EXAMPLE OF SESCREE AND R2 ANALYSIS #' #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' #' results <- nSeScree(eig) #' results #' #' plotuScree(eig, main=paste(results$nFactors[1], " or ", results$nFactors[2], #' " factors retained by the sescree and R2 procedures", #' sep="")) #' nSeScree <- function(x, cor=TRUE, model="components", details=TRUE, r2limen=0.75, ...) { x <- eigenComputes(x, cor=cor, model=model, ...) detail <- NULL n <- length(x) criteria <- 1/n seScreeCriteria <- R2Criteria <- 0 if (n < 3) stop("The number of variables must be at least 3.") i <- 1 seScree <- R2 <- numeric(n-3) while ((i) <= (n-2)) { xa <- c(i:n) ya <- x[i:n] ma <- lm(ya ~ xa) seScree[i] <- sd(ya)*sqrt((1-summary(ma)$r.squared) * ((length(ya)-1)/(length(ya)-2))) # Howell(2008, p. 253) seScreeCriteria <- seScreeCriteria + as.numeric(seScree[i] > criteria) R2[i] <- summary(ma)$r.squared R2Criteria <- R2Criteria + as.numeric(R2[i] < r2limen) i <- i + 1 } if (details == TRUE) detail <- data.frame(v=(1:(n-2)),values=x[1:(n-2)], seScree, R2) seScree <- seScreeCriteria R2 <- R2Criteria res <- list(detail=detail, nFactors=c(se=seScree, R2=R2)) class(res) <- c("nFactors","list") return(res) } nFactors/R/makeCor.r0000644000176200001440000000316513620574163014012 0ustar liggesusers#' Create a Full Correlation/Covariance Matrix from a Matrix With Lower Part Filled and Upper Part With Zeros #' #' This function creates a full correlation/covariance matrix from a matrix with #' lower part filled and upper part with zeros. #' @param x numeric: matrix #' @return numeric: full correlation matrix #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} #' @export #' @keywords multivariate #' @examples #' ## ................................................ #'## LOWER CORRELATION MATRIX WITH ZEROS ON UPPER PART #'## From Gorsuch (table 1.3.1) #'gorsuch <- c( #' 1,0,0,0,0,0,0,0,0,0, #' .6283, 1,0,0,0,0,0,0,0,0, #' .5631, .7353, 1,0,0,0,0,0,0,0, #' .8689, .7055, .8444, 1,0,0,0,0,0,0, #' .9030, .8626, .6890, .8874, 1,0,0,0,0,0, #' .6908, .9028, .9155, .8841, .8816, 1,0,0,0,0, #' .8633, .7495, .7378, .9164, .9109, .8572, 1,0,0,0, #' .7694, .7902, .7872, .8857, .8835, .8884, .7872, 1,0,0, #' .8945, .7929, .7656, .9494, .9546, .8942, .9434, .9000, 1,0, #' .5615, .6850, .8153, .7004, .6583, .7720, .6201, .6141, .6378, 1) #' #'## UPPER CORRELATION MATRIX FILLED WITH UPPER CORRELATION MATRIX #'gorsuch <- makeCor(gorsuch) #'gorsuch makeCor <- function(x) { if (is.matrix(x)) stop("x is not a vector.") upper <- matrix(x,ncol=10, byrow=FALSE) diag(upper) <- 0 lower <- matrix(x,ncol=10, byrow=TRUE) res <- lower + upper return(res) } nFactors/R/nFactorsObjectMethods.r0000644000176200001440000000637513636745415016700 0ustar liggesusers#' Utility Functions for nFactors Class Objects #' #' Utility functions for \code{nFactors} class objects. #' #' # #' @aliases is.nFactors print.nFactors summary.nFactors #' @rdname nFactorsObjectMethods #' #' @param x nFactors: an object of the class nFactors #' @param ... variable: additionnal parameters to give to the \code{print} #' function with \code{print.nFactors} or to the \code{summary} function with #' \code{summary.nFactors} #' @return Generic functions for the nFactors class: #' #' \item{is.nFactors}{ logical: is the object of the class nFactors? } #' \item{print.nFactors }{ numeric: vector of the number of components/factors #' to retain: same as the \code{nFactors} vector from the \code{nFactors} #' object} \item{summary.nFactors }{ data.frame: details of the results from a #' nFactors object: same as the \code{details} data.frame from the #' \code{nFactors} object, but with easier control of the number of decimals #' with the \code{digits} parameter} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{nBentler}}, \code{\link{nBartlett}}, #' \code{\link{nCng}}, \code{\link{nMreg}}, \code{\link{nSeScree}} #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' @export #' @keywords multivariate #' @examples #' #' ## SIMPLE EXAMPLE #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' N <- dFactors$Raiche$nsubjects #' #' res <- nBartlett(eig,N); res; is.nFactors(res); summary(res, digits=2) #' res <- nBentler(eig,N); res; is.nFactors(res); summary(res, digits=2) #' res <- nCng(eig); res; is.nFactors(res); summary(res, digits=2) #' res <- nMreg(eig); res; is.nFactors(res); summary(res, digits=2) #' res <- nSeScree(eig); res; is.nFactors(res); summary(res, digits=2) #' #' ## SIMILAR RESULTS, BUT NOT A nFactors OBJECT #' res <- nScree(eig); res; is.nFactors(res); summary(res, digits=2) #' ## ................................................................. is.nFactors <- function(x) { if (any(class(x) == "nFactors")) return(TRUE) else return(FALSE) } ## ................................................................. ## ................................................................. #' @rdname nFactorsObjectMethods #' @export print.nFactors <- function(x, ...) { if (!is.nFactors(x)) stop("Not a nFactors object") res <- x$nFactors print(res, ...) } ## ................................................................. ## ................................................................. #' @rdname nFactorsObjectMethods #' @param object nFactors: an object of the class nFactors #' @export summary.nFactors <- function(object, ...) { if (!is.nFactors(object)) stop("Not a nFactors object") cat("Report For a nFactors Class \n\n") NextMethod() cat(paste("Details:","\n\n")) print(object$detail, ...) cat(paste("\n\n Number of factors retained by index","\n\n")) print(object$nFactors) } ## ................................................................. nFactors/R/rRecovery.r0000644000176200001440000000661613621044313014403 0ustar liggesusers#' Test of Recovery of a Correlation or a Covariance matrix from a Factor #' Analysis Solution #' #' The \code{rRecovery} function returns a verification of the quality of the #' recovery of the initial correlation or covariance matrix by the factor #' solution. #' #' #' @param R numeric: initial correlation or covariance matrix #' @param loadings numeric: loadings from a factor analysis solution #' @param diagCommunalities logical: if \code{TRUE}, the correlation between #' the initial solution and the estimated one will use a correlation of one in #' the diagonal. If \code{FALSE} (default) the diagonal is not used in the #' computation of this correlation. #' @return \item{R}{ numeric: initial correlation or covariance matrix } #' \item{recoveredR}{ numeric: recovered estimated correlation or covariance #' matrix } \item{difference}{ numeric: difference between initial and #' recovered estimated correlation or covariance matrix} \item{cor}{ numeric: #' Pearson correlation between initial and recovered estimated correlation or #' covariance matrix. Computations depend on the logical value of the #' \code{communalities} argument. } #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{componentAxis}}, \code{\link{iterativePrincipalAxis}}, #' \code{\link{principalAxis}} #' @export #' @importFrom MASS ginv #' @importFrom stats cor #' @keywords utilities #' @examples #' #' # ....................................................... #' # Example from Kim and Mueller (1978, p. 10) #' # Population: upper diagonal #' # Simulated sample: lower diagnonal #' R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, #' .5600, 1.000, .4749, .2196, .1912, .2979, #' .4800, .4200, 1.000, .2079, .2010, .2445, #' .2240, .1960, .1680, 1.000, .4334, .3197, #' .1920, .1680, .1440, .4200, 1.000, .4207, #' .1600, .1400, .1200, .3500, .3000, 1.000), #' nrow=6, byrow=TRUE) #' #' #' # Replace upper diagonal with lower diagonal #' RU <- diagReplace(R, upper=TRUE) #' nFactors <- 2 #' loadings <- principalAxis(RU, nFactors=nFactors, #' communalities="component")$loadings #' rComponent <- rRecovery(RU,loadings, diagCommunalities=FALSE)$cor #' #' loadings <- principalAxis(RU, nFactors=nFactors, #' communalities="maxr")$loadings #' rMaxr <- rRecovery(RU,loadings, diagCommunalities=FALSE)$cor #' #' loadings <- principalAxis(RU, nFactors=nFactors, #' communalities="multiple")$loadings #' rMultiple <- rRecovery(RU,loadings, diagCommunalities=FALSE)$cor #' #' round(c(rComponent = rComponent, #' rmaxr = rMaxr, #' rMultiple = rMultiple), 3) #' # ....................................................... #' #' "rRecovery" <- function(R, loadings, diagCommunalities=FALSE) { recoveredR <- loadings %*% t(loadings) recovery <- list(R = R, recoveredR = recoveredR, difference = R - recoveredR) if (diagCommunalities == FALSE) {diag(R) <- NA; diag(recoveredR) <- NA } corr <- cor(c(R),c(recoveredR), use="pairwise.complete.obs") recovery <- list(recovery, cor = corr) return(recovery) } nFactors/R/eigenComputes.r0000644000176200001440000000540213621043463015227 0ustar liggesusers#' Computes Eigenvalues According to the Data Type #' #' The \code{eigenComputes} function computes eigenvalues from the identified data #' type. It is used internally in many #' fonctions of the \pkg{nFactors} package in order to apply these to a vector of #' eigenvalues, a matrix of correlations or covariance or a data frame. #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param model character: \code{"components"} or \code{"factors"} #' @param ... variable: additionnal parameters to give to the \code{cor} or #' \code{cov} functions #' @return numeric: return a vector of eigenvalues #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege #' \cr \email{David.Magis@@ulg.ac.be} #' @export #' @importFrom stats cov cov2cor #' @keywords multivariate #' @examples #' # ....................................................... #' # Different data types #' # Vector of eigenvalues #' data(dFactors) #' x1 <- dFactors$Cliff1$eigenvalues #' eigenComputes(x1) #' #' # Data from a data.frame #' x2 <- data.frame(matrix(20*rnorm(100), ncol=5)) #' eigenComputes(x2, cor=TRUE, use="everything") #' eigenComputes(x2, cor=FALSE, use="everything") #' eigenComputes(x2, cor=TRUE, use="everything", method="spearman") #' eigenComputes(x2, cor=TRUE, use="everything", method="kendall") #' # From a covariance matrix #' x3 <- cov(x2) #' eigenComputes(x3, cor=TRUE, use="everything") #' eigenComputes(x3, cor=FALSE, use="everything") #' # From a correlation matrix #' x4 <- cor(x2) #' eigenComputes(x4, use="everything") #' # ....................................................... #' eigenComputes <- function(x, cor=TRUE, model="components", ...) { dataType <- eigenFrom(x) if (model == "components") { res <- switch(dataType, eigenvalues = as.vector(x), correlation = {if (cor == FALSE) eigen(x)$values else eigen(cov2cor(x))$values}, data = {if (cor == TRUE) eigen(cor(x, ...))$values else eigen(cov(x, ...))$values} ) } if (model == "factors") { res <- switch(dataType, eigenvalues = as.vector(x), correlation = {if (cor == FALSE) eigen(corFA(x, method="ginv"))$values else eigen(cov2cor(corFA(x, method="ginv")))$values}, data = {if (cor == TRUE) eigen(corFA(cor(x, ...), method="ginv"))$values else eigen(corFA(cov(x, ...), method="ginv"))$values} ) } return(res) } nFactors/R/nBentler.r0000644000176200001440000001667413620625351014207 0ustar liggesusers#' Bentler and Yuan's Procedure to Determine the Number of Components/Factors #' #' This function computes the Bentler and Yuan's indices for determining the #' number of components/factors to retain. #' #' The implemented Bentler and Yuan's procedure must be used with care because #' the minimized function is not always stable, as Bentler and Yan (1996, 1998) #' already noted. In many cases, constraints must applied to obtain a solution, #' as the actual implementation did, but the user can modify these constraints. #' #' The hypothesis tested (Bentler and Yuan, 1996, equation 10) is: \cr \cr #' #' (1) \eqn{\qquad \qquad H_k: \lambda_{k+i} = \alpha + \beta x_i, (i = 1, #' \ldots, q)} \cr #' #' The solution of the following simultaneous equations is needed to find #' \eqn{(\alpha, \beta) \in} \cr #' #' (2) \eqn{\qquad \qquad f(x) = \sum_{i=1}^q \frac{ [ \lambda_{k+j} - N \alpha #' + \beta x_j ] x_j}{(\alpha + \beta x_j)^2} = 0} \cr \cr and \eqn{\qquad #' \qquad g(x) = \sum_{i=1}^q \frac{ \lambda_{k+j} - N \alpha + \beta x_j #' x_j}{(\alpha + \beta x_j)^2} = 0} \cr #' #' The solution to this system of equations was implemented by minimizing the #' following equation: \cr #' #' (3) \eqn{\qquad \qquad (\alpha, \beta) \in \inf{[h(x)]} = \inf{\log{[f(x)^2 #' + g(x)^2}}]} \cr #' #' The likelihood ratio test \eqn{LRT} proposed by Bentler and Yuan (1996, #' equation 7) follows a \eqn{\chi^2} probability distribution with \eqn{q-2} #' degrees of freedom and is equal to: \cr #' #' (4) \eqn{\qquad \qquad LRT = N(k - p)\left\{ {\ln \left( {{n \over N}} #' \right) + 1} \right\} - N\sum\limits_{j = k + 1}^p {\ln \left\{ {{{\lambda #' _j } \over {\alpha + \beta x_j }}} \right\}} + n\sum\limits_{j = k + 1}^p #' {\left\{ {{{\lambda _j } \over {\alpha + \beta x_j }}} \right\}} } \cr #' #' With \eqn{p} beeing the number of eigenvalues, \eqn{k} the number of #' eigenvalues to test, \eqn{q} the \eqn{p-k} remaining eigenvalues, \eqn{N} #' the sample size, and \eqn{n = N-1}. Note that there is an error in the #' Bentler and Yuan equation, the variables \eqn{N} and \eqn{n} beeing inverted #' in the preceeding equation 4. #' #' A better strategy proposed by Bentler an Yuan (1998) is to used a minimized #' \eqn{\chi^2} solution. This strategy will be implemented in a future version #' of the \pkg{nFactors} package. #' #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param N numeric: number of subjects. #' @param log logical: if \code{TRUE} does the maximization on the log values. #' @param alpha numeric: statistical significance level. #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param details logical: if \code{TRUE} also returns detains about the #' computation for each eigenvalue. #' @param minPar numeric: minimums for the coefficient of the linear trend to #' maximize. #' @param maxPar numeric: maximums for the coefficient of the linear trend to #' maximize. #' @param ... variable: additionnal parameters to give to the \code{cor} or #' \code{cov} functions #' @return \item{nFactors}{ numeric: vector of the number of factors retained #' by the Bentler and Yuan's procedure. } \item{details}{ numeric: matrix of #' the details of the computation.} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege #' \cr \email{David.Magis@@ulg.ac.be} #' @seealso \code{\link{nBartlett}}, \code{\link{bentlerParameters}} #' @references Bentler, P. M. and Yuan, K.-H. (1996). Test of linear trend in #' eigenvalues of a covariance matrix with application to data analysis. #' \emph{British Journal of Mathematical and Statistical Psychology, 49}, #' 299-312. #' #' Bentler, P. M. and Yuan, K.-H. (1998). Test of linear trend in the smallest #' eigenvalues of the correlation matrix. \emph{Psychometrika, 63}(2), 131-144. #' @export # #' @importFrom stats lm #' @keywords multivariate #' @examples #' #' ## ................................................ #' ## SIMPLE EXAMPLE OF THE BENTLER AND YUAN PROCEDURE #' #' # Bentler (1996, p. 309) Table 2 - Example 2 ............. #' n=649 #' bentler2<-c(5.785, 3.088, 1.505, 0.582, 0.424, 0.386, 0.360, 0.337, 0.303, #' 0.281, 0.246, 0.238, 0.200, 0.160, 0.130) #' #' results <- nBentler(x=bentler2, N=n) #' results #' #' plotuScree(x=bentler2, model="components", #' main=paste(results$nFactors, #' " factors retained by the Bentler and Yuan's procedure (1996, p. 309)", #' sep="")) #' # ........................................................ #' #' # Bentler (1998, p. 140) Table 3 - Example 1 ............. #' n <- 145 #' example1 <- c(8.135, 2.096, 1.693, 1.502, 1.025, 0.943, 0.901, 0.816, 0.790, #' 0.707, 0.639, 0.543, #' 0.533, 0.509, 0.478, 0.390, 0.382, 0.340, 0.334, 0.316, 0.297, #' 0.268, 0.190, 0.173) #' #' results <- nBentler(x=example1, N=n) #' results #' #' plotuScree(x=example1, model="components", #' main=paste(results$nFactors, #' " factors retained by the Bentler and Yuan's procedure (1998, p. 140)", #' sep="")) #' # ........................................................ #' nBentler <- function(x, N, log=TRUE, alpha=0.05, cor=TRUE, details=TRUE, minPar=c(min(lambda) - abs(min(lambda)) +.001, 0.001), maxPar=c(max(lambda), lm(lambda ~ I(length(lambda):1))$coef[2]), ...) { stopMessage <- paste("\n These indices are only valid with a principal component solution.\n", " ...................... So, only positive eugenvalues are permitted.\n", sep="") lambda <- eigenComputes(x, cor=cor, ...) if (length(which(lambda <0 )) > 0) {cat(stopMessage);stop()} n <- N significance <- alpha min.k <- 3 LRT <- data.frame(q=numeric(length(lambda)-min.k), k=numeric(length(lambda)-min.k), LRT=numeric(length(lambda)-min.k), a=numeric(length(lambda)-min.k), b=numeric(length(lambda)-min.k), p=numeric(length(lambda)-min.k), convergence=numeric(length(lambda)-min.k)) bentler.n <- 0 for (i in 1:(length(lambda)-min.k)) { temp <- bentlerParameters(x=lambda, N=n, nFactors=i, log=log, cor=cor, minPar=minPar, maxPar=maxPar) LRT[i,3] <- temp$lrt LRT[i,4] <- ifelse(is.null(temp$coef[1]), NA, temp$coef[1]) LRT[i,5] <- ifelse(is.null(temp$coef[2]), NA, temp$coef[2]) LRT[i,6] <- ifelse(is.null(temp$p.value), NA, temp$p.value) LRT[i,7] <- ifelse(is.null(temp$convergence), NA, temp$convergence) LRT[i,2] <- i LRT[i,1] <- length(lambda) - i } #LRT <- LRT[order(LRT[,1],decreasing = TRUE),] for (i in 1:(length(lambda)-min.k)) { if (i == 1) bentler.n <- bentler.n + as.numeric(LRT$p[i] <= significance) if (i > 1) {if(LRT$p[i-1] <= 0.05) bentler.n <- bentler.n + as.numeric(LRT$p[i] <= significance)} } if (bentler.n == 0) bentler.n <- length(lambda) if (details == TRUE) details <- LRT else details <- NULL res <- list(detail=details, nFactors=bentler.n) class(res) <- c("nFactors","list") return(res) } nFactors/R/parallel.R0000644000176200001440000001372013621044016014151 0ustar liggesusers#' Parallel Analysis of a Correlation or Covariance Matrix #' #' This function gives the distribution of the eigenvalues of correlation or a #' covariance matrices of random uncorrelated standardized normal variables. #' The mean and a selected quantile of this distribution are returned. #' #' Note that if the decision is based on a quantile value rather than on the #' mean, care must be taken with the number of replications (\code{rep}). In #' fact, the smaller the quantile (\code{cent}), the bigger the number of #' necessary replications. #' #' @param subject numeric: nmber of subjects (default is 100) #' @param var numeric: number of variables (default is 10) #' @param rep numeric: number of replications of the correlation matrix #' (default is 100) #' @param cent depreciated numeric (use quantile instead): quantile of the #' distribution on which the decision is made (default is 0.05) #' @param quantile numeric: quantile of the distribution on which the decision #' is made (default is 0.05) #' @param model character: \code{"components"} or \code{"factors"} #' @param sd numeric: vector of standard deviations of the simulated variables #' (for a parallel analysis on a covariance matrix) #' @param ... variable: other parameters for the \code{"mvrnorm"}, \code{corr} #' or \code{cov} functions #' @return \item{eigen}{ Data frame consisting of the mean and the quantile of #' the eigenvalues distribution } \item{eigen$mevpea}{ Mean of the eigenvalues #' distribution} \item{eigen$sevpea}{ Standard deviation of the eigenvalues #' distribution} \item{eigen$qevpea}{ quantile of the eigenvalues distribution} #' \item{eigen$sqevpea}{ Standard error of the quantile of the eigenvalues #' distribution} \item{subject}{ Number of subjects} \item{variables}{ Number #' of variables} \item{centile}{ Selected quantile} Otherwise, returns a #' summary of the parallel analysis. #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotnScree}}, \code{\link{plotParallel}} #' @references Drasgow, F. and Lissak, R. (1983) Modified parallel analysis: a #' procedure for examining the latent dimensionality of dichotomously scored #' item responses. \emph{Journal of Applied Psychology, 68}(3), 363-373. #' #' Hoyle, R. H. and Duvall, J. L. (2004). Determining the number of factors in #' exploratory and confirmatory factor analysis. In D. Kaplan (Ed.): \emph{The #' Sage handbook of quantitative methodology for the social sciences}. Thousand #' Oaks, CA: Sage. #' #' Horn, J. L. (1965). A rationale and test of the number of factors in factor #' analysis. \emph{Psychometrika, 30}, 179-185. #' @export #' @importFrom MASS ginv mvrnorm #' @importFrom stats cov dnorm qnorm #' @keywords multivariate #' @examples #' #' ## SIMPLE EXAMPLE OF A PARALLEL ANALYSIS #' ## OF A CORRELATION MATRIX WITH ITS PLOT #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' subject <- dFactors$Raiche$nsubjects #' var <- length(eig) #' rep <- 100 #' quantile <- 0.95 #' results <- parallel(subject, var, rep, quantile) #' #' results #' #' ## IF THE DECISION IS BASED ON THE CENTILE USE qevpea INSTEAD #' ## OF mevpea ON THE FIRST LINE OF THE FOLLOWING CALL #' plotuScree(x = eig, #' main = "Parallel Analysis" #' ) #' #' lines(1:var, #' results$eigen$qevpea, #' type="b", #' col="green" #' ) #' #' #' ## ANOTHER SOLUTION IS SIMPLY TO #' plotParallel(results) #' "parallel" <- function(subject=100, var=10, rep=100, cent=0.05, quantile=cent, model="components", sd=diag(1,var), ...) { r <- subject c <- var y <- matrix(c(1:r*c), nrow=r, ncol=c) ycor <- matrix(c(1:c*c), nrow=c, ncol=c) evpea <- NULL leg.txt <- "Pearson" # Simulation of k samples to obtain k random eigenvalues vectors # for Pearson correlation coefficients for (k in c(1:rep)) { # y <- rnorm(y, sd=sqrt(mean(diag(sd)))) # Old version without covariance # y <- matrix(y, nrow=r, ncol=c) # Old version without covariance y <- mvrnorm(n = r, mu=rep(0,var), Sigma=sd, empirical=FALSE) corY <- cov(y, ...) # The previous version was only cor(y) if (model == "components") diag(corY) <- diag(sd) # To constraint the diagonal to sd for PCA if (model == "factors") corY <- corY - ginv(diag(diag(ginv(corY)))) # To constraint the diagonal to communalities for FCA evpea <- rbind(evpea, eigen(corY)[[1]]) } # Temporay function to compute the standard error of a quantile SEcentile <- function(sd, n = 100, p = 0.95) {return(sd/sqrt(n) * sqrt(p*(1-p))/dnorm(qnorm(p))) } # Summary statistics sprob <- c(cent) mevpea <- sapply(as.data.frame(evpea), mean) # Eigenvalues means sevpea <- sapply(as.data.frame(evpea), sd ) # Eigenvalues Standard deviations qevpea <- moreStats(evpea, quantile=quantile)[3,] # Would be more in line with version 2.3 #quant <- function(x, sprobs = sprobs) {return(as.vector(quantile(x, probs = sprob))) } #qevpea <- sapply(as.data.frame(evpea), quant) # Eigenvalues centiles sqevpea <- sevpea sqevpea <- sapply(as.data.frame(sqevpea), SEcentile, n = rep, p = cent) # Standard error of the centiles # List of results return result <- list(eigen = data.frame(mevpea, sevpea, qevpea, sqevpea), subject = r, variables = c, centile = cent ) class(result) <- 'parallel' # For future use return(result) } nFactors/R/studySim.r0000644000176200001440000001360313620574534014252 0ustar liggesusers#' Simulation Study from Given Factor Structure Matrices and Conditions #' #' The \code{structureSim} function returns statistical results from #' simulations from predefined congeneric factor structures. The main ideas #' come from the methodology applied by Zwick and Velicer (1986). #' #' #' @param var numeric: vector of the number of variables #' @param nFactors numeric: vector of the number of components/factors #' @param pmjc numeric: vector of the number of major loadings on each #' component/factor #' @param loadings numeric: vector of the major loadings on each #' component/factor #' @param unique numeric: vector of the unique loadings on each #' component/factor #' @param N numeric: vector of the number of subjects/observations #' @param repsim numeric: number of replications of the matrix correlation #' simulation #' @param reppar numeric: number of replications for the parallel and #' permutation analysis #' @param stats numeric: vector of the statistics to return: mean(1), #' median(2), sd(3), quantile(4), min(5), max(6) #' @param quantile numeric: quantile for the parallel and permutation analysis #' @param model character: \code{"components"} or \code{"factors"} #' @param r2limen numeric: R2 limen value for the R2 Nelson index #' @param all logical: if \code{TRUE} computes the Bentler and Yuan index (very #' long computing time to consider) #' @param dir character: directory where to save output. Default to NA #' @param trace logical: if \code{TRUE} outputs details of the status of the #' simulations #' @return \item{values}{ Returns selected statistics about the number of #' components/factors to retain: mean, median, quantile, standard deviation, #' minimum and maximum.} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{generateStructure}}, \code{\link{structureSim}} #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules #' for determining the number of components to retain. \emph{Psychological #' Bulletin, 99}, 432-442. #' @export #' @keywords multivariate #' @examples #' #' \dontrun{ #' # .................................................................... #' # Example inspired from Zwick and Velicer (1986) #' # Very long computimg time #' # ................................................................... #' #' # 1. Initialisation #' # reppar <- 30 #' # repsim <- 5 #' # quantile <- 0.50 #' #' # 2. Simulations #' # X <- studySim(var=36,nFactors=3, pmjc=c(6,12), loadings=c(0.5,0.8), #' # unique=c(0,0.2), quantile=quantile, #' # N=c(72,180), repsim=repsim, reppar=reppar, #' # stats=c(1:6)) #' #' # 3. Results (first 10 results) #' # print(X[1:10,1:14],2) #' # names(X) #' #' # 4. Study of the error done in the determination of the number #' # of components/factors. A positive value is associated to over #' # determination. #' # results <- X[X$stats=="mean",] #' # residuals <- results[,c(11:25)] - X$nfactors #' # BY <- c("nsubjects","var","loadings") #' # round(aggregate(residuals, by=results[BY], mean),0) #' } #' studySim <- function(var, nFactors, pmjc, loadings, unique, N, repsim, reppar, stats=1, quantile=0.5, model="components", r2limen=0.75, all=FALSE, dir=NA, trace=TRUE) { nsubjects <- N result <- NULL id <- 0 nid <- length(nFactors) * length(loadings) * length(pmjc) * length(var) * length(unique) * length(nsubjects) for (i in 1:length(nFactors)) { for (j in 1:length(loadings)) { for (l in 1:length(pmjc)) { for (n in 1:length(var)) { for (k in 1:length(unique)) { for (m in 1:length(nsubjects)) { id <- id + 1 kid <- paste(id,"/",nid,sep="") ident <- c(nFactors=nFactors[i], loadings=loadings[j], unique=unique[k], quantile=quantile, pmjc=pmjc[l], nsubjects=nsubjects[m], var=var[n], reppar=reppar, repsim=repsim, id=kid, model=model) if (trace == TRUE) print(ident) fStruct <- generateStructure(var=var[n], mjc=nFactors[i], pmjc=pmjc[l], loadings=loadings[j], unique=unique[k]) fSim <- structureSim(fload=as.matrix(fStruct), reppar=reppar, repsim=repsim, details=FALSE, all=all, N=nsubjects[m], quantile=quantile, model=model, r2limen=r2limen)[[2]][stats,] if (length(stats) == 1) { fSim <- data.frame(var=var[n], nsubjects=nsubjects[m], nfactors=nFactors[i], pmjc=pmjc[l], loadings=loadings[j], unique=unique[k], t(fSim), repsim=repsim, reppar=reppar) } if (length(stats) > 1) { ls <- length(stats) info <- data.frame(stats =rownames(fSim), id =rep(id, ls), var =rep(var[n], ls), nsubjects=rep(nsubjects[m], ls), nfactors=rep(nFactors[i], ls), pmjc =rep(pmjc[l], ls), loadings=rep(loadings[j], ls), unique =rep(unique[k], ls), repsim =rep(repsim, ls), reppar =rep(reppar, ls)) fSim <- data.frame(info, fSim) } result <- rbind(result, fSim) rownames(result) <- 1:dim(result)[1] fString <- paste("RES_", paste(ident,"_", sep="", collapse=""), sep="") # if (!is.na(dir)) save("fSim", file=paste(dirPack, fString,".Rdata", sep="")) # Old erroneous code if (!is.na(dir)) save("fSim", file=paste(dir, fString,".Rdata", sep="")) }}}}}} return(result) } nFactors/R/plotParallel.R0000644000176200001440000000707513635223204015021 0ustar liggesusers#' Plot a Parallel Analysis Class Object #' #' Plot a scree plot adding information about a parallel analysis. #' #' If \code{eig} is \code{FALSE} the plot shows only the parallel analysis #' without eigenvalues. #' #' @param parallel numeric: vector of the results of a previous parallel #' analysis #' @param eig depreciated parameter: eigenvalues to analyse (not used if x is #' used, recommended) #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param model character: \code{"components"} or \code{"factors"} #' @param main character: title of the plot #' @param xlab character: label of the x axis #' @param ylab character: label of the y axis #' @param legend logical: indicator of the presence or not of a legend #' @param ... variable: additionnal parameters to give to the \code{cor} or #' \code{cov} functions #' @return Nothing returned. #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotnScree}}, \code{\link{parallel}} #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' @export #' @importFrom graphics plot.default lines #' @keywords Graphics #' @examples #' #' ## SIMPLE EXAMPLE OF A PARALLEL ANALYSIS #' ## OF A CORRELATION MATRIX WITH ITS PLOT #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' subject <- dFactors$Raiche$nsubjects #' var <- length(eig) #' rep <- 100 #' cent <- 0.95 #' results <- parallel(subject,var,rep,cent) #' #' results #' #' #' ## PARALLEL ANALYSIS SCREE PLOT #' plotParallel(results, x=eig) #' plotParallel(results) #' #' "plotParallel" <- function(parallel, eig = NA, x = eig, model = "components", legend = TRUE, ylab = "Eigenvalues", xlab = "Components", main = "Parallel Analysis", ... ) { if (any(!is.na(x))) eig <- eigenComputes(x, ...) if (!inherits(parallel, "parallel")) stop("Method is only for parallel objects") if (model == "factors") xlab <- "Factors" var <- length(parallel$eigen$qevpea) if (length(eig) == 1) { Component <- var:1 Location <- seq(from = 0, to = max(parallel$eigen$qevpea)*3, length.out = var) plot.default(as.numeric(Component), as.numeric(Location), type = "n", main = main, xlab = xlab, ylab = ylab) } if (length(eig) > 1) {plotuScree(eig, main = main, xlab = xlab, ylab = ylab) } lines(1:var, parallel$eigen$qevpea , col = "green", type = "p", pch = 2) lines(1:var, parallel$eigen$mevpea, col = "red") if (legend == TRUE) { if (length(eig) == 1) { leg <- c("Mean Eigenvalues", "Centiles of the Eigenvalues") tco <- c("red", "green") co <- c("red", "green") pc <- c(NA, 2) } if (length(eig) > 1) { leg <- c("Eigenvalues", "Mean Eigenvalues", "Centiles of the Eigenvalues") tco <- c("black", "red", "green") co <- c("black", "red", "green") pc <- c(1, NA, 2) } legend("topright", legend = leg, text.col = tco, col = co, pch = pc ) } } nFactors/R/plotnScree.R0000644000176200001440000001134413635223230014475 0ustar liggesusers#' Scree Plot According to a nScree Object Class #' #' Plot a scree plot adding information about a non graphical \code{nScree} #' analysis. #' #' #' @param nScree Results of a previous \code{nScree} analysis #' @param legend Logical indicator of the presence or not of a legend #' @param xlab Label of the x axis (default to \code{"Component"}) #' @param ylab Label of the y axis (default to \code{"Eigenvalue"}) #' @param main Main title (default to \code{"Non Graphical Solutions to the #' Scree Test"}) #' @return Nothing returned. #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotParallel}}, \code{\link{parallel}} #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' @export #' @importFrom graphics lines par text plot.default # #' @importFrom stats lm coef #' @keywords Graphics #' @examples #' #' ## INITIALISATION #' data(dFactors) # Load the nFactors dataset #' attach(dFactors) #' vect <- Raiche # Use the second example from Buja and Eyuboglu #' # (1992, p. 519, nsubjects not specified by them) #' eigenvalues <- vect$eigenvalues # Extract the observed eigenvalues #' nsubjects <- vect$nsubjects # Extract the number of subjects #' variables <- length(eigenvalues) # Compute the number of variables #' rep <- 100 # Number of replications for the parallel analysis #' cent <- 0.95 # Centile value of the parallel analysis #' #' ## PARALLEL ANALYSIS (qevpea for the centile criterion, mevpea for the mean criterion) #' aparallel <- parallel(var = variables, #' subject = nsubjects, #' rep = rep, #' cent = cent)$eigen$qevpea # The 95 centile #' #' ## NOMBER OF FACTORS RETAINED ACCORDING TO DIFFERENT RULES #' results <- nScree(eig = eigenvalues, #' aparallel = aparallel #' ) #' #' results #' #' ## PLOT ACCORDING TO THE nScree CLASS #' plotnScree(results) #' "plotnScree" <- function (nScree, legend = TRUE, ylab = "Eigenvalues", xlab = "Components", main = "Non Graphical Solutions to Scree Test") { if (!inherits(nScree, "nScree")) stop("Method is only for nScree objects") #if (!exists("legend", mode="logical") ) legend <- TRUE # To develop #if (!exists("ylab")) ylab <- "Eigenvalues" # To develop #if (!exists("xlab")) xlab <- "Components" # To develop #if (!exists("main")) main <- "Non Graphical Solutions to Scree Test" # To develop if (nScree$Model == "components") nkaiser = "Eigenvalues (>mean = " else nkaiser = "Eigenvalues (>0 = " if (nScree$Model == "factors") xlab = "Factors" par(col = 1, pch = 1) # Color and symbol for usual scree par(mfrow = c(1,1)) eig <- nScree$Analysis$Eigenvalues k <- 1:length(eig) #plotuScree(x=eig, ...) # To develop plotuScree(x=eig, main=main, xlab=xlab, ylab=ylab) nk <- length(eig) noc <- nScree$Components$noc vp.p <- lm(eig[c(noc+1,nk)] ~ k[c(noc+1,nk)]) x <- sum(c(1,1) * coef(vp.p)) y <- sum(c(1,nk)* coef(vp.p)) par(col = 10) # Color for optimal coordinates lines(k[c(1,nk)],c(x,y)) par(col = 11,pch=2) # Color and symbol for parallel analysis lines(1:nk, nScree$Analysis$Par.Analysis, type = "b") if (legend == TRUE) { leg.txt <- c(paste(nkaiser,nScree$Components$nkaiser,")"), c(paste("Parallel Analysis (n = ",nScree$Components$nparallel,")")), c(paste("Optimal Coordinates (n = ",nScree$Components$noc,")")), c(paste("Acceleration Factor (n = ",nScree$Components$naf,")")) ) legend("topright", legend = leg.txt, pch = c(1,2,NA,NA), text.col = c(1,3,2,4), col = c(1,3,2,4) ) } naf <- nScree$Components$naf text(x = noc , y = eig[noc], label = " (OC)", cex = .70, adj = c(0,0), col = 2) text(x = naf + 1, y = eig[naf + 1], label = " (AF)", cex = .70, adj = c(0,0), col = 4) } nFactors/R/plotuScree.R0000644000176200001440000000400713620574423014511 0ustar liggesusers#' Plot of the Usual Cattell's Scree Test #' #' \code{uScree} plot a usual scree test of the eigenvalues of a correlation #' matrix. #' #' #' @param Eigenvalue depreciated parameter: eigenvalues to analyse (not used if #' x is used, recommended) #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param model character: \code{"components"} or \code{"factors"} #' @param main character: title of the plot (default is \code{Scree Plot}) #' @param xlab character: label of the x axis (default is \code{Component}) #' @param ylab character: label of the y axis (default is \code{Eigenvalue}) #' @param ... variable: additionnal parameters to give to the #' \code{eigenComputes} function #' @return Nothing returned with this function. #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{nScree}}, \code{\link{parallel}} #' @references Cattell, R. B. (1966). The scree test for the number of factors. #' \emph{Multivariate Behavioral Research, 1}, 245-276. #' @export #' @keywords Graphics #' @examples #' #' ## SCREE PLOT #' data(dFactors) #' attach(dFactors) #' eig = Cliff1$eigenvalues #' plotuScree(x=eig) #' "plotuScree" <- function(Eigenvalue, x=Eigenvalue, model = "components", ylab = "Eigenvalues", xlab = "Components", main = "Scree Plot" , ...) { Eigenvalue <- eigenComputes(x, ...) if (!inherits(Eigenvalue, "numeric")) stop("use only with \"numeric\" objects") if (model == "factors") xlab <- "Factors" par(mfrow = c(1,1)) nk <- length(Eigenvalue) Component <- 1:nk plot.default(as.numeric(Component), as.numeric(Eigenvalue), type = 'b',col = "black", pch = 1, ylab = ylab, xlab = xlab, main = main ) } nFactors/R/data.R0000644000176200001440000000756013635223330013276 0ustar liggesusers #' Eigenvalues from classical studies #' #' Classical examples of eigenvalues vectors used to study the number of factors #' to retain in the litterature. These examples generally give the number of #' subjects use to obtain these eigenvalues. #' The number of subjects is used with the parallel analysis. #' #' Other datasets will be added in future versions of the package. #' #' @name dFactors #' @docType data #' #' @format A list of examples. For each example, a list is also used to give the eigenvalues #' vector and the number of subjects. #' \describe{ #' \item{Bentler}{$eigenvalues and $nsubjects} #' \item{Buja}{$eigenvalues and $nsubjects} #' \item{Cliff1}{$eigenvalues and $nsubjects} #' \item{Cliff2}{$eigenvalues and $nsubjects} #' \item{Cliff3}{$eigenvalues and $nsubjects} #' \item{Hand}{$eigenvalues and $nsubjects} #' \item{Harman}{$eigenvalues and $nsubjects} #' \item{Lawley}{$eigenvalues and $nsubjects} #' \item{Raiche}{$eigenvalues and $nsubjects} #' \item{Tucker1}{$eigenvalues and $nsubjects} #' \item{Tucker2}{$eigenvalues and $nsubjects} #' } #' #' @source #' Lawley and Hand dataset: Bartholomew \emph{et al}. (2002, p. 123, 126) #' #' Bentler dataset: Bentler and Yuan (1998, p. 139-140) #' #' Buja datasets: Buja and Eyuboglu (1992, p. 516, 519) < Number of subjects not specified by Buja and Eyuboglu > #' #' Cliff datasets: Cliff (1970, p. 165) #' #' Raiche dataset: Raiche, Langevin, Riopel and Mauffette (2006) #' #' Raiche dataset: Raiche, Riopel and Blais (2006, p. 9) #' #' Tucker datasets: Tucker \emph{et al}. (1969, p. 442) #' #' @references Bartholomew, D. J., Steele, F., Moustaki, I. and Galbraith, J. #' I. (2002). \emph{The analysis and interpretation of multivariate data for #' social scientists}. Boca Raton, FL: Chapman and Hall. #' #' Bentler, P. M. and Yuan, K.-H. (1998). Tests for linear trend in the #' smallest eigenvalues of the correlation matrix. \emph{Psychometrika, 63}(2), #' 131-144. #' #' Buja, A. and Eyuboglu, N. (1992). Remarks on parallel analysis. #' \emph{Multivariate Behavioral Research, 27}(4), 509-540. #' #' Cliff, N. (1970). The relation between sample and population characteristic #' vectors. \emph{Psychometrika, 35}(2), 163-178. #' #' Hand, D. J., Daly, F., Lunn, A. D., McConway, K. J. and Ostrowski, E. #' (1994). \emph{A handbook of small data sets}. Boca Raton, FL: Chapman and #' Hall. #' #' Lawley, D. N. and Maxwell, A. E. (1971). \emph{Factor analysis as a #' statistical method} (2nd edition). London: Butterworth. #' #' Raiche, G., Langevin, L., Riopel, M. and Mauffette, Y. (2006). Etude #' exploratoire de la dimensionnalite et des facteurs expliques par une #' traduction francaise de l'Inventaire des approches d'enseignement de #' Trigwell et Prosser dans trois universite quebecoises. \emph{Mesure et #' Evaluation en Education, 29}(2), 41-61. #' #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). #' Non-graphical solutions for Cattell's scree test. Methodology, 9(1), 23-29. #' #' Tucker, L. D., Koopman, R. F. and Linn, R. L. (1969). Evaluation of factor #' analytic research procedures by mean of simulated correlation matrices. #' \emph{Psychometrika, 34}(4), 421-459. #' #' Zoski, K. and Jurs, S. (1993). Using multiple regression to determine the #' number of factors to retain in factor analysis. \emph{Multiple Linear #' Regression Viewpoint, 20}(1), 5-9. #' #' @keywords datasets #' #' @examples #' #' # EXAMPLES FROM DATASET #' data(dFactors) #' #' # COMMAND TO VISUALIZE THE CONTENT AND ATTRIBUTES OF THE DATASETS #' names(dFactors) #' attributes(dFactors) #' dFactors$Cliff1$eigenvalues #' dFactors$Cliff1$nsubjects #' #' # SCREE PLOT OF THE Cliff1 DATASET #' plotuScree(dFactors$Cliff1$eigenvalues) #' "dFactors" nFactors/R/nMreg.r0000644000176200001440000001066113621043750013472 0ustar liggesusers#' Multiple Regression Procedure to Determine the Number of Components/Factors #' #' This function computes the \eqn{\beta} indices, like their associated #' Student \emph{t} and probability (Zoski and Jurs, 1993, 1996, p. 445). These #' three values can be used as three different indices for determining the #' number of components/factors to retain. #' #' When the associated Student \emph{t} test is applied, the following #' hypothesis is considered: \cr #' #' (1) \eqn{\qquad \qquad H_k: \beta (\lambda_1 \ldots \lambda_k) - \beta #' (\lambda_{k+1} \ldots \lambda_p), (k = 3, \ldots, p-3) = 0} \cr #' #' #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data (eigenFrom) #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param model character: \code{"components"} or \code{"factors"} #' @param details logical: if \code{TRUE} also returns details about the #' computation for each eigenvalue. #' @param ... variable: additionnal parameters to give to the #' \code{eigenComputes} and \code{cor} or \code{cov} functions #' @return \item{nFactors}{ numeric: number of components/factors retained by #' the \emph{MREG} procedures. } \item{details}{ numeric: matrix of the details #' for each indices.} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotnScree}}, \code{\link{plotParallel}} #' @references Zoski, K. and Jurs, S. (1993). Using multiple regression to #' determine the number of factors to retain in factor analysis. \emph{Multiple #' Linear Regression Viewpoints, 20}(1), 5-9. #' #' Zoski, K. and Jurs, S. (1996). An objective counterpart to the visual scree #' test for factor analysis: the standard error scree test. \emph{Educational #' and Psychological Measurement, 56}(3), 443-451. #' @export #' @importFrom stats sd lm pt #' @keywords multivariate #' @examples #' #' ## SIMPLE EXAMPLE OF A MREG ANALYSIS #' #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' #' results <- nMreg(eig) #' results #' #' plotuScree(eig, main=paste(results$nFactors[1], ", ", #' results$nFactors[2], " or ", #' results$nFactors[3], #' " factors retained by the MREG procedures", #' sep="")) #' nMreg <- function(x, cor=TRUE, model="components", details=TRUE, ...) { x <- eigenComputes(x, cor=cor, model=model, ...) nlength <- 3 detail <- NULL n <- length(x) if (n < 6) stop("The number of variables must be at least 6.") i <- 1 mreg <- tmreg <- tmreg2 <-pmreg <- numeric(n-5) while (i <= (length(x)-5)) { xa <- c(1:(i+2)) ya <- x[1:(i+2)] ma <- lm(ya ~ xa) Syx.a <- sd(ya)*sqrt((1-summary(ma)$r.squared) * ((length(ya)-1)/(length(ya)-2))) # Howell(2008, p. 253) compa <- ma$coef[2] seCompa <- summary(ma)$coef[2,2] xb <- c((i+1+nlength):length(x)) yb <- x[(i+1+nlength):length(x)] mb <- lm(yb ~ xb) Syx.b <- sd(yb)*sqrt((1-summary(mb)$r.squared) * ((length(yb)-1)/(length(yb)-2))) # Howell(2008, p. 253) compb <- mb$coef[2] seCompb <- summary(mb)$coef[2,2] mreg[i] <- compb - compa semreg <- sqrt((Syx.a^2)/((length(xa)-1)*sd(xa)^2) + (Syx.b^2)/((length(xb)-1)*sd(xb)^2)) # Se_dif_b -> Howell(2008, p. 259, 266) tmreg[i] <- (compb - compa)/(semreg) tmreg2[i] <- (mreg[i])/sqrt(seCompa^2 + seCompb^2) # Il semble, selon moi, qu'il y aurait une erreur dans la formule de Zoski et Just. Et ce serait la bonne formul, comme celle plu shaut, mais plus rapide de calcul. pmreg[i] <- pt(tmreg[i],(length(xa)-1) + (length(xb)-1) - 4, lower.tail=FALSE, log.p=TRUE) i <- i + 1 } if (details == TRUE) detail <- data.frame(v=(1:(n-5)),values=x[1:(n-5)], mreg=mreg, tmreg=tmreg, pmreg=pmreg) mreg <- as.numeric(which(mreg ==max( mreg, na.rm=TRUE)) + nlength) tmreg <- as.numeric(which(tmreg==max(tmreg, na.rm=TRUE))) pmreg <- as.numeric(which(pmreg==min(pmreg, na.rm=TRUE))) res <- list(detail=detail, nFactors=c(b=mreg,t.p=tmreg,p.b=pmreg)) class(res) <- c("nFactors","list") return(res) } nFactors/R/componentAxis.r0000644000176200001440000000540313620574024015251 0ustar liggesusers#' Principal Component Analysis With Only n First Components Retained #' #' The \code{componentAxis} function returns a principal component analysis #' with the first \emph{n} components retained. #' #' #' @param R numeric: correlation or covariance matrix #' @param nFactors numeric: number of components/factors to retain #' @return \item{values}{ numeric: variance of each component/factor retained } #' \item{varExplained}{ numeric: variance explained by each component/factor #' retained } \item{varExplained}{ numeric: cumulative variance explained by #' each component/factor retained } \item{loadings}{ numeric: loadings of each #' variable on each component/factor retained } #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{principalComponents}}, #' \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} #' @references Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to #' factor analysis. What it is and how to do it}. Beverly Hills, CA: Sage. #' #' Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical #' methods and practical issues}. Beverly Hills, CA: Sage. #' @keywords multivariate #' @export #' @examples #' #' # ....................................................... #' # Example from Kim and Mueller (1978, p. 10) #' # Simulated sample: lower diagnonal #' R <- matrix(c( 1.000, 0.560, 0.480, 0.224, 0.192, 0.16, #' 0.560, 1.000, 0.420, 0.196, 0.168, 0.14, #' 0.480, 0.420, 1.000, 0.168, 0.144, 0.12, #' 0.224, 0.196, 0.168, 1.000, 0.420, 0.35, #' 0.192, 0.168, 0.144, 0.420, 1.000, 0.30, #' 0.160, 0.140, 0.120, 0.350, 0.300, 1.00), #' nrow=6, byrow=TRUE) #' #' # Factor analysis: Selected principal components - Kim and Mueller #' # (1978, p. 20) #' componentAxis(R, nFactors=2) #' #' # ....................................................... "componentAxis" <- function(R, nFactors=2) { nVar <- dim(R)[2] acp <- principalComponents(R) values <- acp$values[(1:nFactors)] varExplained <- round((values/nVar)*100, 2) cumVarExplained <- round(cumsum(varExplained), 2) loadings <- acp$vectors[,(1:nFactors)] %*% diag(values^0.5) # F1 * diag(E) communalities <- apply(loadings*loadings,1,sum) apa <- list(values = values, varExplained = varExplained, cumVarExplained = cumVarExplained, loadings = loadings, communalities = communalities) return(apa) } nFactors/R/nCng.r0000644000176200001440000000730713620625364013320 0ustar liggesusers#' Cattell-Nelson-Gorsuch CNG Indices #' #' This function computes the \emph{CNG} indices for the eigenvalues of a #' correlation/covariance matrix (Gorsuch and Nelson, 1981; Nasser, 2002, p. #' 400; Zoski and Jurs, 1993, p. 6). #' #' Note that the \code{nCng} function is only valid when more than six #' eigenvalues are used and that these are obtained in the context of a #' principal component analysis. For a factor analysis, some eigenvalues could #' be negative and the function will stop and give an error message. #' #' The slope of all possible sets of three adjacent eigenvalues are compared, #' so \emph{CNG} indices can be applied only when more than six eigenvalues are #' used. The eigenvalue at which the greatest difference between two successive #' slopes occurs is the indicator of the number of components/factors to #' retain. #' #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of #' correlations or of covariances or a \code{data.frame} of data #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix #' @param model character: \code{"components"} or \code{"factors"} #' @param details logical: if \code{TRUE} also returns detains about the #' computation for each eigenvalue. #' @param ... variable: additionnal parameters to give to the #' \code{eigenComputes} function #' @return \item{nFactors}{ numeric: number of factors retained by the CNG #' procedure. } \item{details}{ numeric: matrix of the details for each index.} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotnScree}}, \code{\link{plotParallel}} #' @references Gorsuch, R. L. and Nelson, J. (1981). \emph{CNG scree test: an #' objective procedure for determining the number of factors}. Presented at the #' annual meeting of the Society for multivariate experimental psychology. #' #' Nasser, F. (2002). The performance of regression-based variations of the #' visual scree for determining the number of common factors. \emph{Educational #' and Psychological Measurement, 62(3)}, 397-419. #' #' Zoski, K. and Jurs, S. (1993). Using multiple regression to determine the #' number of factors to retain in factor analysis. \emph{Multiple Linear #' Regression Viewpoints, 20}(1), 5-9. #' @export # #' @importFrom stats lm #' @keywords multivariate #' @examples #' #' ## SIMPLE EXAMPLE OF A CNG ANALYSIS #' #' data(dFactors) #' eig <- dFactors$Raiche$eigenvalues #' #' results <- nCng(eig, details=TRUE) #' results #' #' plotuScree(eig, main=paste(results$nFactors, #' " factors retained by the CNG procedure", #' sep="")) #' nCng <- function(x, cor=TRUE, model="components", details=TRUE, ...) { x <- eigenComputes(x, cor=cor, model=model, ...) detail <- NULL nlength <- 2 n <- length(x) if (n < 6) stop("The number of variables must be at least 6.") i <- 1 cng <- numeric(n-5) while ((i+2*nlength+1) <= n) { xa <- c(i:(i+nlength)) ya <- x[i:(i+nlength)] compa <- lm(ya ~ xa)$coef[2] xb <- c((i+1+nlength):(i+2*nlength+1)) yb <- x[(i+1+nlength):(i+1+2*nlength)] compb <- lm(yb ~ xb)$coef[2] cng[i] <- compb - compa i <- i + 1 } if (details == TRUE) detail <- data.frame(v=(1:(n-5)),values=x[1:(n-5)], cng) cng <- as.numeric(which(cng==max(cng, na.rm=TRUE))+nlength) res <- list(detail=detail, nFactors=c(cng)) class(res) <- c("nFactors","list") return(res) } nFactors/R/principalComponents.r0000644000176200001440000000614513620574457016467 0ustar liggesusers#' Principal Component Analysis #' #' The \code{principalComponents} function returns a principal component #' analysis. Other R functions give the same results, but #' \code{principalComponents} is customized mainly for the other factor #' analysis functions available in the \pkg{nfactors} package. In order to #' retain only a small number of components the \code{componentAxis} function #' has to be used. #' #' #' @param R numeric: correlation or covariance matrix #' @return \item{values}{ numeric: variance of each component } #' \item{varExplained}{ numeric: variance explained by each component } #' \item{varExplained}{ numeric: cumulative variance explained by each #' component } \item{loadings}{ numeric: loadings of each variable on each #' component } #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{componentAxis}}, \code{\link{iterativePrincipalAxis}}, #' \code{\link{rRecovery}} #' @references Joliffe, I. T. (2002). \emph{Principal components analysis} (2th #' Edition). New York, NJ: Springer-Verlag. #' #' Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to factor analysis. #' What it is and how to do it}. Beverly Hills, CA: Sage. #' #' Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical #' methods and practical issues}. Beverly Hills, CA: Sage. #' @export #' @keywords multivariate #' @examples #' #' # ....................................................... #' # Example from Kim and Mueller (1978, p. 10) #' # Population: upper diagonal #' # Simulated sample: lower diagnonal #' R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, #' .5600, 1.000, .4749, .2196, .1912, .2979, #' .4800, .4200, 1.000, .2079, .2010, .2445, #' .2240, .1960, .1680, 1.000, .4334, .3197, #' .1920, .1680, .1440, .4200, 1.000, .4207, #' .1600, .1400, .1200, .3500, .3000, 1.000), #' nrow=6, byrow=TRUE) #' #' # Factor analysis: Principal component - #' # Kim et Mueller (1978, p. 21) #' # Replace upper diagonal with lower diagonal #' RU <- diagReplace(R, upper=TRUE) #' principalComponents(RU) #' #' # Replace lower diagonal with upper diagonal #' RL <- diagReplace(R, upper=FALSE) #' principalComponents(RL) #' # ....................................................... #' "principalComponents" <- function(R) { nVar <- dim(R)[2] acp <- eigen(R) values <- acp$values vectors <- acp$vectors # Normed vecteurs to 1 varExplained <- round((values/nVar)*100, 2) cumVarExplained <- round(cumsum(varExplained), 2) loadings <- vectors %*% diag(values^0.5) # F1 * diag(E) acp <- list(values = values, varExplained = varExplained, cumVarExplained = cumVarExplained, vectors = vectors, loadings = loadings) return(acp) } nFactors/R/principalAxis.r0000644000176200001440000000755613620625540015243 0ustar liggesusers#' Principal Axis Analysis #' #' The \code{PrincipalAxis} function returns a principal axis analysis without #' iterated communalities estimates. Three different choices of communalities #' estimates are given: maximum corelation, multiple correlation or estimates #' based on the sum of the squared principal component analysis loadings. #' Generally statistical packages initialize the the communalities at the #' multiple correlation value (usual inverse or generalized inverse). #' Unfortunately, this strategy cannot deal with singular correlation or #' covariance matrices. If a generalized inverse, the maximum correlation or #' the estimated communalities based on the sum of loading are used instead, #' then a solution can be computed. #' #' #' @param R numeric: correlation or covariance matrix #' @param nFactors numeric: number of factors to retain #' @param communalities character: initial values for communalities #' (\code{"component", "maxr", "ginv" or "multiple"}) #' @return \item{values}{ numeric: variance of each component/factor } #' \item{varExplained}{ numeric: variance explained by each component/factor } #' \item{varExplained}{ numeric: cumulative variance explained by each #' component/factor } \item{loadings}{ numeric: loadings of each variable on #' each component/factor } #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{componentAxis}}, \code{\link{iterativePrincipalAxis}}, #' \code{\link{rRecovery}} #' @references Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to #' factor analysis. What it is and how to do it}. Beverly Hills, CA: Sage. #' #' Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical #' methods and practical issues}. Beverly Hills, CA: Sage. #' @export # #' @importFrom MASS ginv #' @keywords multivariate #' @examples #' #' # ....................................................... #' # Example from Kim and Mueller (1978, p. 10) #' # Population: upper diagonal #' # Simulated sample: lower diagnonal #' R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, #' .5600, 1.000, .4749, .2196, .1912, .2979, #' .4800, .4200, 1.000, .2079, .2010, .2445, #' .2240, .1960, .1680, 1.000, .4334, .3197, #' .1920, .1680, .1440, .4200, 1.000, .4207, #' .1600, .1400, .1200, .3500, .3000, 1.000), #' nrow=6, byrow=TRUE) #' #' # Factor analysis: Principal axis factoring #' # without iterated communalities - #' # Kim and Mueller (1978, p. 21) #' # Replace upper diagonal with lower diagonal #' RU <- diagReplace(R, upper=TRUE) #' principalAxis(RU, nFactors=2, communalities="component") #' principalAxis(RU, nFactors=2, communalities="maxr") #' principalAxis(RU, nFactors=2, communalities="multiple") #' # Replace lower diagonal with upper diagonal #' RL <- diagReplace(R, upper=FALSE) #' principalAxis(RL, nFactors=2, communalities="component") #' principalAxis(RL, nFactors=2, communalities="maxr") #' principalAxis(RL, nFactors=2, communalities="multiple") #' # ....................................................... #' "principalAxis" <- function(R, nFactors=2, communalities="component") { if (communalities == "component") diag(R) <- componentAxis(R)$communalities if (communalities == "maxr") { RT <- R; diag(RT) <- 0; diag(R) <- apply(RT, 1, max)} if (communalities == "ginv") diag(R) <- sqrt(1-1/diag(ginv(R))) if (communalities == "multiple") { if (all(eigen(R)$values > 0)) diag(R) <- sqrt(1-1/diag(solve(R))) # Gorsuch (1983, p. 106) else return("Not all eigenvalues are greater than 0") # Verication of positive definiteness } apa <- componentAxis(R, nFactors) return(apa) } nFactors/R/eigenFrom.r0000644000176200001440000000544713620627010014337 0ustar liggesusers#' Identify the Data Type to Obtain the Eigenvalues #' #' The \code{eigenFrom} function identifies the data type from which to obtain the #' eigenvalues. The function is used internally in many functions of #' the \pkg{nFactors} package to be able to apply these to a vector of eigenvalues, #' a matrix of correlations or covariance or a \code{data.frame}. #' @param x numeric: a \code{vector} of eigenvalues, a \code{matrix} of correlations or of covariances or a \code{data.frame} of data #' @return character: return the data type to obtain the eigenvalues: \code{"eigenvalues"}, \code{"correlation"} or \code{"data"} #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege #' \cr \email{David.Magis@@ulg.ac.be} #' @export # #' @import methods #' @keywords multivariate #' @examples #' # ....................................................... #' # Different data types #' # Examples of adequate data sources #' # Vector of eigenvalues #' data(dFactors) #' x1 <- dFactors$Cliff1$eigenvalues #' eigenFrom(x1) #' #' # Data from a data.frame #' x2 <- data.frame(matrix(20*rnorm(100), ncol=5)) #' eigenFrom(x2) #' #' # From a covariance matrix #' x3 <- cov(x2) #' eigenFrom(x3) #' #' # From a correlation matrix #' x4 <- cor(x2) #' eigenFrom(x4) #' #' # Examples of inadequate data sources: not run because of errors generated #' # x0 <- c(2,1) # Error: not enough eigenvalues #' # eigenFrom(x0) #' # x2 <- matrix(x1, ncol=5) # Error: non a symetric covariance matrix #' # eigenFrom(x2) #' # eigenFrom(x3[,(1:2)]) # Error: not enough variables #' # x6 <- table(x5) # Error: not a valid data class #' # eigenFrom(x6) #' # ....................................................... eigenFrom <- function(x) { #classType <- methods::class1(x) classType <- data.class(x) res <- switch (classType, data.frame = "data", matrix = "correlation", numeric = "eigenvalues", stop("Not a data.frame, a matrix, or a numeric vector") ) switch (res, data = if (dim(x)[2] <= 2) stop("At least 3 variables must be supplied"), correlation = if (dim(x)[2] <= 2) stop("At least 3 variables must be supplied"), eigenvalues = if (length(x) <= 2) stop("A vector of 3 eigenvalues or more must be supplied") ) if (res == "correlation") if (any(x[lower.tri(x)] != t(x)[lower.tri(t(x))])) { stop("A correlation/covariance matrix must be symetric, empirical data must come from a data.frame, or eigenvalues must directly come from a vector. Verify the documentation about the eigenFrom function.") } invisible(res) } nFactors/R/structureSimObjectMethods.r0000644000176200001440000001545413634756462017632 0ustar liggesusers#' Utility Functions for nScree Class Objects #' #' Utility functions for \code{structureSim} class objects. Note that with the #' \code{plot.structureSim} a dotted black vertical line shows the median #' number of factors retained by all the different indices. #' @rdname structureSimObjectMethods #' #' @aliases boxplot.structureSim is.structureSim plot.structureSim #' print.structureSim summary.structureSim #' @param eigenSelect numeric: vector of the index of the selected eigenvalues #' @param index numeric: vector of the index of the selected indices #' @param main character: main title #' @param nFactors numeric: if known, number of factors #' @param object structureSim: an object of the class \code{structureSim} #' @param vLine character: color of the vertical indicator line of the initial #' number of factors in the eigen boxplot #' @param x structureSim: an object of the class \code{structureSim} #' @param xlab character: x axis label #' @param ylab character: y axis label #' @param ... variable: additionnal parameters to give to the \code{boxplot}, #' \code{plot}, \code{print} and \code{summary functions.} #' @return Generic functions for the \code{structureSim} class: #' \item{boxplot.structureSim }{ graphic: plots an eigen boxplot } #' \item{is.structureSim}{ logical: is the object of the class #' \code{structureSim}? } \item{plot.structureSim }{ graphic: plots an index #' acuracy plot} \item{print.structureSim }{ numeric: data.frame of statistics #' about the number of components/factors to retain according to different #' indices following a \code{structureSim} simulation} #' \item{summary.structureSim }{ list: two data.frame, the first with the #' details of the simulated eigenvalues, the second with the details of the #' simulated indices} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{nFactors-package}} #' @references #' #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' @export #' @importFrom stats median #' @keywords multivariate #' @examples #' #' \dontrun{ #' ## INITIALISATION #' library(xtable) #' library(nFactors) #' nFactors <- 3 #' unique <- 0.2 #' loadings <- 0.5 #' nsubjects <- 180 #' repsim <- 10 #' var <- 36 #' pmjc <- 12 #' reppar <- 10 #' zwick <- generateStructure(var=var, mjc=nFactors, pmjc=pmjc, #' loadings=loadings, #' unique=unique) #' #' ## SIMULATIONS #' mzwick <- structureSim(fload=as.matrix(zwick), reppar=reppar, #' repsim=repsim, details=TRUE, #' N=nsubjects, quantile=0.5) #' #' ## TEST OF structureSim METHODS #' is(mzwick) #' summary(mzwick, index=1:5, eigenSelect=1:10, digits=3) #' print(mzwick, index=1:10) #' plot(x=mzwick, index=c(1:10), cex.axis=0.7, col="red") #' boxplot(x=mzwick, nFactors=3, vLine="blue", col="red") #' } #' ## ................................................................. summary.structureSim <- function(object, index=c(1:15), eigenSelect=NULL, ...) { if (!is.structureSim(object)) stop("Not a structureSim object") if (is.null(eigenSelect)) eigenSelect <- c(1:dim(object$details$eigenvalues)[2]) cat("Report For a structureSim Class \n\n") NextMethod() cat(paste("Simulated eigenvalues","\n\n")) object$details$eigenvalues <- round(object$details$eigenvalues[,eigenSelect], ...) colnames(object$details$eigenvalues) <- paste("E",eigenSelect,sep="") print(object$details$eigenvalues) cat(paste("\n\n Number of factors retained by each index for each simulation","\n\n")) object$details$components <- round(object$details$components[,index], ...) print(object$details$components) } # summary(mzwick, index=1:5, eigenSelect=1:10, digits=2) # summary.structureSim(x) # summary(x) ## ................................................................. #' @rdname structureSimObjectMethods #' @export ## ................................................................. print.structureSim <- function(x, index=NULL, ...) { if (!is.structureSim(x)) stop("Not a structureSim object") if (is.null(index)) index <- c(1:dim(x$nFactors)[2]) res <- x$nFactors[,index] print(res, ...) } # print(mzwick, index=c(1:13), 2) # print.structureSim(x) # print(x) ## ................................................................. #' @rdname structureSimObjectMethods #' @export ## ................................................................. boxplot.structureSim <- function(x, nFactors=NULL, eigenSelect=NULL, vLine="green", xlab="Factors", ylab="Eigenvalues", main="Eigen Box Plot", ...) { if (!is.structureSim(x)) stop("Not a structureSim object") if (is.null(eigenSelect)) eigenSelect <- c(1:dim(x$details$eigenvalues)[2]) boxplot(x$details$eigenvalues[,eigenSelect], xlab=xlab, ylab=ylab, main=main, ...) abline(v=nFactors, lty=2, col=vLine) } # boxplot(mzwick, nFactors=3, eigenSelect=1:5, vLine="blue", col="red") # boxplot.structureSim(x) # boxplot(x) ## ................................................................. #' @rdname structureSimObjectMethods #' @export ## ................................................................. plot.structureSim <- function(x, nFactors=NULL, index=NULL, main="Index Acuracy Plot", ...) { if (!is.structureSim(x)) stop("Not a structureSim object") if (is.null(index)) index <- c(1:dim(x$details$components)[2]) if (!exists("col") == TRUE) col <- "black" ylab <- "Average Number of Factors Retained" tx <- t(x[[2]][,index]) tx <- data.frame(Index=rownames(tx),tx) colnames(tx)[2] <- "Mean" tx <- tx[order(tx[,1]),] plot(Mean ~ Index, type="n", data=tx, main=main, ...) #plot(Mean ~ Index, data=tx, cex.lab=1, cex.axis=0.7, type="n", ylab=ylab) abline(h=nFactors, ...) abline(h=median(tx[2,], na.rm=TRUE), lty=2, col="black") for (i in 1:length(tx[,2])) lines(y=c(0,tx[i,2]), x=c(i,i), lty=2) } # plot.structureSim(x=mzwick, nFactors=3, index=c(1:10), cex.axis=0.7, col="red") # plot.structureSim(x) # plot(x) ## ................................................................. #' @rdname structureSimObjectMethods #' @export ## ................................................................. is.structureSim <- function(object) { if (class(object) == "structureSim") return(TRUE) else return(FALSE) } # is.structureSim(mzwick) # is.structureSim(x) ## ................................................................. nFactors/R/eigenBootParallel.r0000644000176200001440000001065313621043451016011 0ustar liggesusers# PERMETTRE LES CALCULS AVEC DES DONNEES DISCRETES AUSSI #' Bootstrapping of the Eigenvalues From a Data Frame #' #' The \code{eigenBootParallel} function samples observations from a #' \code{data.frame} to produce correlation or covariance matrices from which #' eigenvalues are computed. The function returns statistics about these #' bootstrapped eigenvalues. Their means or their quantile could be used later #' to replace the eigenvalues inputted to a parallel analysis. The #' \code{eigenBootParallel} can also compute random eigenvalues from empirical #' data by column permutation (Buja and Eyuboglu, 1992). #' #' #' @param x data.frame: data from which a correlation matrix will be obtained #' @param quantile numeric: eigenvalues quantile to be reported #' @param nboot numeric: number of bootstrap samples #' @param option character: \code{"permutation"} or \code{"bootstrap"} #' @param cor logical: if \code{TRUE} computes eigenvalues from a correlation #' matrix, else from a covariance matrix (\code{eigenComputes}) #' @param model character: bootstraps from a principal component analysis #' (\code{"components"}) or from a factor analysis (\code{"factors"}) #' @param ... variable: additionnal parameters to give to the \code{cor} or #' \code{cov} functions #' @return \item{values}{ data.frame: mean, median, quantile, standard #' deviation, minimum and maximum of bootstrapped eigenvalues } #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{principalComponents}}, #' \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} #' @references Buja, A. and Eyuboglu, N. (1992). Remarks on parallel analysis. #' \emph{Multivariate Behavioral Research, 27}(4), 509-540. #' #' Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules for #' determining the number of components to retain. \emph{Psychological #' bulletin, 99}, 432-442. #' @keywords multivariate #' @export #' @importFrom stats cov #' @examples #' #' # ....................................................... #' # Example from the iris data #' eigenvalues <- eigenComputes(x=iris[,-5]) #' #' # Permutation parallel analysis distribution #' aparallel <- eigenBootParallel(x=iris[,-5], quantile=0.95)$quantile #' #' # Number of components to retain #' results <- nScree(x = eigenvalues, aparallel = aparallel) #' results$Components #' plotnScree(results) #' # ...................................................... #' #' # ...................................................... #' # Bootstrap distributions study of the eigenvalues from iris data #' # with different correlation methods #' eigenBootParallel(x=iris[,-5],quantile=0.05, #' option="bootstrap",method="pearson") #' eigenBootParallel(x=iris[,-5],quantile=0.05, #' option="bootstrap",method="spearman") #' eigenBootParallel(x=iris[,-5],quantile=0.05, #' option="bootstrap",method="kendall") #' "eigenBootParallel" <- function(x, quantile=0.95, nboot=30, option="permutation", cor=TRUE, model="components", ...) { if (eigenFrom(x) != "data") stop("Only data from a data.frame must be used as input") x <- data.frame(x) res <- data.frame(matrix(NA, ncol=dim(x)[2], nrow=nboot)) if (model == "components") { names(res) <- paste("C", 1:dim(x)[2], sep="") } else names(res) <- paste("F", 1:dim(x)[2], sep="") if (option == "permutation") { for (i in 1:nboot) { rPerm <- apply(x,2,sample, replace=TRUE) if (cor == TRUE) corY <- cor(rPerm, ...) if (cor == FALSE) corY <- cov(rPerm, ...) if (model == "factors") corY <- corFA(corY, method="ginv") res[i,] <- eigen(corY, only.values=TRUE)$values } } if (option == "bootstrap") { for (i in 1:nboot) { rBoot <- sample(1:dim(x)[1], dim(x)[1], replace=TRUE) if (cor == TRUE) corY <- cor(x[rBoot,], ...) if (cor == FALSE) corY <- cov(x[rBoot,], ...) if (model == "factors") corY <- corFA(corY, method="ginv") res[i,] <- eigen(corY, only.values=TRUE)$values #if (cor == TRUE) res[i,] <- eigen(cor(x[rBoot,], ...), only.values=TRUE)$values #if (cor == FALSE) res[i,] <- eigen(cov(x[rBoot,], ...), only.values=TRUE)$values } } res <- data.frame(t(moreStats(res, quantile=quantile))) return(res) } nFactors/R/iterativePrincipalAxis.r0000644000176200001440000001234213620626123017103 0ustar liggesusers#' Iterative Principal Axis Analysis #' #' The \code{iterativePrincipalAxis} function returns a principal axis analysis with #' iterated communality estimates. Four different choices of initial communality #' estimates are given: maximum correlation, multiple correlation (usual and #' generalized inverse) or estimates based #' on the sum of the squared principal component analysis loadings. Generally, statistical #' packages initialize the communalities at the multiple correlation value. #' Unfortunately, this strategy cannot always deal with singular correlation or #' covariance matrices. #' If a generalized inverse, the maximum correlation or the estimated communalities #' based on the sum of loadings #' are used instead, then a solution can be computed. #' #' #' @param R numeric: correlation or covariance matrix #' @param nFactors numeric: number of factors to retain #' @param communalities character: initial values for communalities (\code{"component", "maxr", "ginv" or "multiple"}) #' @param iterations numeric: maximum number of iterations to obtain a solution #' @param tolerance numeric: minimal difference in the estimated communalities after a given iteration #' #' @return values numeric: variance of each component #' @return varExplained numeric: variance explained by each component #' @return varExplained numeric: cumulative variance explained by each component #' @return loadings numeric: loadings of each variable on each component #' @return iterations numeric: maximum number of iterations to obtain a solution #' @return tolerance numeric: minimal difference in the estimated communalities after a given iteration #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' \cr \cr David Magis \cr Departement de mathematiques \cr Universite de Liege #' \cr \email{David.Magis@@ulg.ac.be} #' #' @references #' Kim, J.-O. and Mueller, C. W. (1978). \emph{Introduction to factor analysis. What it #' is and how to do it}. Beverly Hills, CA: Sage. #' #' Kim, J.-O. and Mueller, C. W. (1987). \emph{Factor analysis. Statistical methods and #' practical issues}. Beverly Hills, CA: Sage. #' #' @export # #' @importFrom MASS ginv #' @keywords multivariate #' @seealso \code{\link{componentAxis}}, \code{\link{principalAxis}}, \code{\link{rRecovery}} #' #' @examples #' ## ................................................ #' # Example from Kim and Mueller (1978, p. 10) #' # Population: upper diagonal #' # Simulated sample: lower diagnonal #' R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, #' .5600, 1.000, .4749, .2196, .1912, .2979, #' .4800, .4200, 1.000, .2079, .2010, .2445, #' .2240, .1960, .1680, 1.000, .4334, .3197, #' .1920, .1680, .1440, .4200, 1.000, .4207, #' .1600, .1400, .1200, .3500, .3000, 1.000), #' nrow=6, byrow=TRUE) #' #' # Factor analysis: Principal axis factoring with iterated communalities #' # Kim and Mueller (1978, p. 23) #' # Replace upper diagonal with lower diagonal #' RU <- diagReplace(R, upper=TRUE) #' nFactors <- 2 #' fComponent <- iterativePrincipalAxis(RU, nFactors=nFactors, #' communalities="component") #' fComponent #' rRecovery(RU,fComponent$loadings, diagCommunalities=FALSE) #' #' fMaxr <- iterativePrincipalAxis(RU, nFactors=nFactors, #' communalities="maxr") #' fMaxr #' rRecovery(RU,fMaxr$loadings, diagCommunalities=FALSE) #' #' fMultiple <- iterativePrincipalAxis(RU, nFactors=nFactors, #' communalities="multiple") #' fMultiple #' rRecovery(RU,fMultiple$loadings, diagCommunalities=FALSE) #' # ....................................................... #' iterativePrincipalAxis <- function(R, nFactors=2, communalities="component", iterations=20, tolerance=0.001) { if (communalities == "component") diag(R) <- componentAxis(R)$communalities if (communalities == "maxr") { RT <- R; diag(RT) <- 0; diag(R) <- apply(RT, 1, max)} if (communalities == "ginv") diag(R) <- sqrt(1-1/diag(ginv(R))) if (communalities == "multiple") { if (all(eigen(R)$values > 0)) diag(R) <- sqrt(1-1/diag(solve(R))) # Gorsuch (1983, p. 106) else return("Not all eigenvalues are grater than 0") # Verication of positive definiteness } iter <- 1; tol <- 1 while ((iter < iterations) && (tol > tolerance)) { # for (i in (1:iterations)) oldR <- diag(R) diag(R) <- componentAxis(R, nFactors)$communalities tol <- max(abs(diag(R) - oldR)) iter <- iter + 1 } if (tol > tolerance) warning("Maximum number of iterations needed before the desired tolerance: cautious solution.") iapa <- componentAxis(R, nFactors) iapa <- list(values = iapa$values, varExplained = iapa$varExplained, cumVarExplained = iapa$cumVarExplained, loadings = iapa$loadings, iterations = iter, tolerance = tol) return(iapa) } nFactors/R/corFA.r0000644000176200001440000000312113621043165013405 0ustar liggesusers#' Insert Communalities in the Diagonal of a Correlation or a Covariance Matrix #' #' This function inserts communalities in the diagonal of a correlation/covariance matrix. #' #' @param R An integer matrix or a data.frame of correlations #' @param method A character vector: inversion method #' @return A correlation matrix with coerced variables with communalities in the diagonal. #' @author Gilles Raiche, Universite du Quebec a Montreal (\email{raiche.gilles@@uqam.ca}) #' @export #' @importFrom MASS ginv #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, #' \code{\link{plotnScree}}, \code{\link{plotParallel}} #' #' #' @examples #' #' ## LOWER CORRELATION MATRIX WITH ZEROS ON UPPER PART #' ## From Gorsuch (table 1.3.1) #' gorsuch <- c( #' 1,0,0,0,0,0,0,0,0,0, #' .6283, 1,0,0,0,0,0,0,0,0, #' .5631, .7353, 1,0,0,0,0,0,0,0, #' .8689, .7055, .8444, 1,0,0,0,0,0,0, #' .9030, .8626, .6890, .8874, 1,0,0,0,0,0, #' .6908, .9028, .9155, .8841, .8816, 1,0,0,0,0, #' .8633, .7495, .7378, .9164, .9109, .8572, 1,0,0,0, #' .7694, .7902, .7872, .8857, .8835, .8884, .7872, 1,0,0, #' .8945, .7929, .7656, .9494, .9546, .8942, .9434, .9000, 1,0, #' .5615, .6850, .8153, .7004, .6583, .7720, .6201, .6141, .6378, 1) #' #' ## UPPER CORRELATION MATRIX FILLED WITH UPPER CORRELATION MATRIX #' gorsuch <- makeCor(gorsuch) #' #' ## REPLACE DIAGONAL WITH COMMUNALITIES #' gorsuchCfa <- corFA(gorsuch) #' gorsuchCfa #' #' @keywords manip "corFA" <- function(R, method="ginv") { R <- as.matrix(R) if (method == "ginv") return(R - ginv(diag(diag(ginv(R))))) } nFactors/R/moreStats.r0000644000176200001440000000400513621043502014371 0ustar liggesusers#' Statistical Summary of a Data Frame #' #' This function produces another summary of a \code{data.frame}. This function #' was proposed in order to apply some functions globally on a \code{data.frame}: #' \code{quantile}, \code{median}, \code{min} and \code{max}. The usual \emph{R} #' version cannot do so. #' #' @param x numeric: matrix or \code{data.frame} #' @param quantile numeric: quantile of the distribution #' @param show logical: if \code{TRUE} prints the quantile choosen #' @return numeric: \code{data.frame} of statistics: mean, median, quantile, standard deviation, minimum and maximum #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' #' @seealso \code{\link{plotuScree}}, \code{\link{nScree}}, \code{\link{plotnScree}}, \code{\link{plotParallel}} #' @export #' @importFrom stats sd median #' @keywords multivariate #' @examples #' ## ................................................ #' ## GENERATION OF A MATRIX OF 100 OBSERVATIONS AND 10 VARIABLES #' x <- matrix(rnorm(1000),ncol=10) #' #' ## STATISTICS #' res <- moreStats(x, quantile=0.05, show=TRUE) #' res #' moreStats <- function(x, quantile=0.95, show=FALSE) { cent <- quantile # The old parameter was labeled cent x <- data.frame(x) xMean <- sapply(x, mean) # mean(x) xSd <- sapply(x, sd) # sd(x) xMin <- xMax <- xMedian <- xQuantile <- numeric(ncol(x)) for (i in 1:ncol(x)) { xMin[i] <- min(x[,i]) xMax[i] <- max(x[,i]) xMedian[i] <- median(x[,i]) xQuantile[i] <- quantile(x[,i],probs=cent,names=FALSE, na.rm=TRUE) # quantile(rnorm(1000),probs=cent) } names <- colnames(x) results <- rbind(mean=xMean, median=xMedian, quantile=xQuantile, sd=xSd, min=xMin, max=xMax) if (show==TRUE) { cat("------------------------ \n") cat("Quantile specified:", cent, "\n") cat("------------------------ \n") } return(results) } nFactors/R/aphabetical.R0000644000176200001440000000000213622033401014574 0ustar liggesusers nFactors/R/diagReplace.r0000644000176200001440000000400113620574061014614 0ustar liggesusers#' Replacing Upper or Lower Diagonal of a Correlation or Covariance Matrix #' #' The \code{diagReplace} function returns a modified correlation or covariance #' matrix by replacing upper diagonal with lower diagonal, or lower diagonal #' with upper diagonal. #' #' @param R numeric: correlation or covariance matrix #' @param upper logical: if \code{TRUE} upper diagonal is replaced with lower #' diagonal. If \code{FALSE}, lower diagonal is replaced with upper diagonal. #' @return \item{R }{ numeric: correlation or covariance matrix } #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @keywords manip #' @export #' @examples #' #' # ....................................................... #' # Example from Kim and Mueller (1978, p. 10) #' # Population: upper diagonal #' # Simulated sample: lower diagnonal #' R <- matrix(c( 1.000, .6008, .4984, .1920, .1959, .3466, #' .5600, 1.000, .4749, .2196, .1912, .2979, #' .4800, .4200, 1.000, .2079, .2010, .2445, #' .2240, .1960, .1680, 1.000, .4334, .3197, #' .1920, .1680, .1440, .4200, 1.000, .4207, #' .1600, .1400, .1200, .3500, .3000, 1.000), #' nrow=6, byrow=TRUE) #' #' # Replace upper diagonal with lower diagonal #' RU <- diagReplace(R, upper=TRUE) #' #' # Replace lower diagonal with upper diagonal #' RL <- diagReplace(R, upper=FALSE) #' # ....................................................... #' diagReplace <- function(R, upper=TRUE) { RT <- R if (upper == TRUE) { Rtranspose <- t(RT) # Replacing upper diagonal with lower diagonal RT[upper.tri(RT)] <- Rtranspose[upper.tri(Rtranspose)] return(RT) } if (upper == FALSE) { Rtranspose <- t(RT) # Replacing lower diagonal with upper diagonal RT[lower.tri(RT)] <- Rtranspose[lower.tri(Rtranspose)] return(RT) } } nFactors/R/nFactors.R0000644000176200001440000000132213635222743014141 0ustar liggesusers#' nFactors: Number of factor or components to retain in a factor analysis #' #' A package for determining the number of factor or components to retain in a factor analysis. #' The methods are all based on eigenvalues. #' # #' @section Foo functions: # #' BentlerParameters # #' componentAxis # #' corFA #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' @docType package #' @name nFactors NULL nFactors/R/nScreeObjectMethods.r0000644000176200001440000001126513635224751016324 0ustar liggesusers#' Utility Functions for nScree Class Objects #' #' Utility functions for \code{nScree} class objects. Some of these functions #' are already implemented in the \code{nFactors} package, but are easier to #' use with generic functions like these. #' #' @rdname nScreeObjectMethods #' @param object nScree: an object of the class \code{nScree} #' @param ... variable: additionnal parameters to give to the \code{print} #' function with \code{print.nScree}, the \code{plotnScree} with #' \code{plot.nScree} or to the \code{summary} function with #' \code{summary.nScree} #' #' @return Generic functions for the nScree class: #' \item{is.nScree}{ logical: is the object of the class \code{nScree}? } #' \item{plot.nScree }{ graphic: plots a figure according to the #' \code{plotnScree} function} #' \item{print.nScree }{ numeric: vector of the #' number of components/factors to retain: same as the \code{Components} vector #' from the \code{nScree} object} #' \item{summary.nScree }{ data.frame: details #' of the results from a nScree analysis: same as the \code{Analysis} #' data.frame from the \code{nScree} object, but with easier control of the #' number of decimals with the \code{digits} parameter} #' #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} # #' @family nScree # #' @seealso \code{\link{plotuScree}}, \code{\link{plotnScree}}, # #' \code{\link{parallel}}, \code{\link{plotParallel}}, #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' @export #' @importFrom stats coef #' @keywords multivariate #' @examples #' #' ## INITIALISATION #' data(dFactors) # Load the nFactors dataset #' attach(dFactors) #' vect <- Raiche # Use the example from Raiche #' eigenvalues <- vect$eigenvalues # Extract the observed eigenvalues #' nsubjects <- vect$nsubjects # Extract the number of subjects #' variables <- length(eigenvalues) # Compute the number of variables #' rep <- 100 # Number of replications for the parallel analysis #' cent <- 0.95 # Centile value of the parallel analysis #' #' ## PARALLEL ANALYSIS (qevpea for the centile criterion, mevpea for the mean criterion) #' aparallel <- parallel(var = variables, #' subject = nsubjects, #' rep = rep, #' cent = cent #' )$eigen$qevpea # The 95 centile #' #' ## NOMBER OF FACTORS RETAINED ACCORDING TO DIFFERENT RULES #' results <- nScree(x=eigenvalues, aparallel=aparallel) #' #' is.nScree(results) #' results #' summary(results) #' #' ## PLOT ACCORDING TO THE nScree CLASS #' plot(results) #' #' # #' @method summary summary.nScree # #' @S3method summary summary.nScree ## ................................................................. summary.nScree <- function(object, ...) { if (!is.nScree(object)) stop("Not a nScree object") cat("Report For a nScree Class \n\n") #digits <- 2 NextMethod() cat(paste("Details:",object$Model,"\n\n")) object$Analysis[,c(1:5,7)] <- round(object$Analysis[,c(1:5,7)], ...) print(object[[2]]) cat(paste("\n\n Number of factors retained by index","\n\n")) print(object[[1]]) } ## ................................................................. #' @rdname nScreeObjectMethods #' @param x Results of a previous \code{nScree} analysis # #' @family nScree #' @export # #' @method print print.nScree # #' @S3method print print.nScree ## ................................................................. print.nScree <- function(x, ...) { res <- x[[1]] print(res, ...) } ## ................................................................. #' @rdname nScreeObjectMethods # #' @param x Results of a previous \code{nScree} analysis # #' @family nScree #' @export # #' @method plot plot.nScree # #' @S3method plot plot.nScree ## ................................................................. plot.nScree <- function(x, ...) { plotnScree(x, ...) } ## ................................................................. #' @rdname nScreeObjectMethods # #' @family nScree #' @export # #' @method is is.nScree # #' @S3method is is.nScree ## ................................................................. is.nScree <- function(object) { if (class(object) == "nScree") return(TRUE) else return(FALSE) } ## ................................................................. nFactors/R/structureSim.r0000644000176200001440000001554013621044275015137 0ustar liggesusers#' Population or Simulated Sample Correlation Matrix from a Given Factor #' Structure Matrix #' #' The \code{structureSim} function returns a population and a sample #' correlation matrices from a predefined congeneric factor structure. #' #' #' @param fload matrix: loadings of the factor structure #' @param reppar numeric: number of replications for the parallel analysis #' @param repsim numeric: number of replications of the matrix correlation #' simulation #' @param N numeric: number of subjects #' @param quantile numeric: quantile for the parallel analysis #' @param model character: \code{"components"} or \code{"factors"} #' @param adequacy logical: if \code{TRUE} prints the recovered population #' matrix from the factor structure #' @param details logical: if \code{TRUE} outputs details of the \code{repsim} #' simulations #' @param r2limen numeric: R2 limen value for the R2 Nelson index #' @param all logical: if \code{TRUE} computes the Bentler and Yuan index (very #' long computing time to consider) #' @return \item{values}{ the output depends of the logical value of details. #' If \code{FALSE}, returns only statistics about the eigenvalues: mean, #' median, quantile, standard deviation, minimum and maximum. If \code{TRUE}, #' returns also details about the \code{repsim} simulations. If #' \code{adequacy} = \code{TRUE} returns the recovered factor structure} #' @author Gilles Raiche \cr Centre sur les Applications des Modeles de #' Reponses aux Items (CAMRI) \cr Universite du Quebec a Montreal\cr #' \email{raiche.gilles@@uqam.ca} #' @seealso \code{\link{principalComponents}}, #' \code{\link{iterativePrincipalAxis}}, \code{\link{rRecovery}} #' @references #' Raiche, G., Walls, T. A., Magis, D., Riopel, M. and Blais, J.-G. (2013). Non-graphical solutions #' for Cattell's scree test. Methodology, 9(1), 23-29. #' #' Zwick, W. R. and Velicer, W. F. (1986). Comparison of five rules #' for determining the number of components to retain. \emph{Psychological #' Bulletin, 99}, 432-442. #' @export #' @importFrom stats median factanal #' @importFrom graphics boxplot plot abline lines #' @importFrom psych sim.structure #' @keywords multivariate #' @examples #' #' \dontrun{ #' # ....................................................... #' # Example inspired from Zwick and Velicer (1986, table 2, p. 437) #' ## ................................................................... #' nFactors <- 3 #' unique <- 0.2 #' loadings <- 0.5 #' nsubjects <- 180 #' repsim <- 30 #' zwick <- generateStructure(var=36, mjc=nFactors, pmjc=12, #' loadings=loadings, #' unique=unique) #' ## ................................................................... #' #' # Produce statistics about a replication of a parallel analysis on #' # 30 sampled correlation matrices #' #' mzwick.fa <- structureSim(fload=as.matrix(zwick), reppar=30, #' repsim=repsim, N=nsubjects, quantile=0.5, #' model="factors") #' #' mzwick <- structureSim(fload=as.matrix(zwick), reppar=30, #' repsim=repsim, N=nsubjects, quantile=0.5, all=TRUE) #' #' # Very long execution time that could be used only with model="components" #' # mzwick <- structureSim(fload=as.matrix(zwick), reppar=30, #' # repsim=repsim, N=nsubjects, quantile=0.5, all=TRUE) #' #' par(mfrow=c(2,1)) #' plot(x=mzwick, nFactors=nFactors, index=c(1:14), cex.axis=0.7, col="red") #' plot(x=mzwick.fa, nFactors=nFactors, index=c(1:11), cex.axis=0.7, col="red") #' par(mfrow=c(1,1)) #' #' par(mfrow=c(2,1)) #' boxplot(x=mzwick, nFactors=3, cex.axis=0.8, vLine="blue", col="red") #' boxplot(x=mzwick.fa, nFactors=3, cex.axis=0.8, vLine="blue", col="red", #' xlab="Components") #' par(mfrow=c(1,1)) #' # ...................................................... #' } #' structureSim <- function(fload, reppar=30, repsim=100, N, quantile=0.95, model="components", adequacy=FALSE, details=TRUE, r2limen=0.75, all=FALSE) { simulation <- sim.structure(fx=fload, n=N, raw=TRUE) if (adequacy == TRUE) print(factanal(covmat=simulation$model, factors=dim(fload)[2])) # Verification of the adequacy of the model eigenvalues <- eigenComputes(simulation$r, cor=TRUE, model=model) variables <- length(eigenvalues) # Compute the number of variables aparallel <- parallel(var=dim(fload)[1],subject=N,rep=reppar,cent=quantile,model=model)$eigen$qevpea # The percentile components <- matrix(NA, ncol=15,nrow=repsim) analysis <- NA values <- matrix(NA, ncol=length(eigenvalues),nrow=repsim) for (i in 1:repsim) { simulation <- sim.structure(fx=fload, n=N, raw=TRUE) aparallel <- parallel(var=dim(fload)[1],subject=N,rep=reppar,cent=quantile,model=model)$eigen$qevpea eigenvalues <- eigenComputes(simulation$r, cor=TRUE, model=model) values[i,] <- eigenvalues results <- nScree(x=eigenvalues,aparallel = aparallel, cor=TRUE, model=model) components[i,(1:4)] <- t(results$Components) ### PERMUTATIONS if (eigenFrom(data.frame(simulation$observed)) == "data") { permutation <- eigenBootParallel(x=data.frame(simulation$observed), quantile=quantile, model=model)$quantile } results <- nScree(x=eigenvalues,aparallel = permutation, cor=TRUE, model=model) components[i, 5] <- results$Components$nparallel ### ... components[i, 6] <- nCng(x=eigenvalues, model=model)$nFactors components[i, (7:9)] <- nMreg(x=eigenvalues, model=model)$nFactors components[i, (10:11)] <- nSeScree(x=eigenvalues, model=model, r2limen=r2limen)$nFactors if (model == "components") { components[i, (12:14)] <- nBartlett(x=eigenvalues, N=N, alpha=1-quantile, cor=TRUE, correction=TRUE)$nFactors if (all == TRUE) { cat(paste("-- repsim = ", i, "/",repsim,"\n", sep="")) components[i, (15)] <- nBentler(x=eigenvalues, N=N, alpha=1-quantile, cor=TRUE)$nFactors } } # analysis <- rbind(analysis, results$Analysis) #components[2,] <- t(results$Components);components } names <- colnames(results$Components) names <- c("oc", "af", "par", "mean.eig", "per") components <- data.frame(components) colnames(components) <- c(names,"cng","b","t.b","p.b","sescree","R2","Bartlett","Anderson","Lawley","Bentler") if (details == TRUE) analysis <- list(components=components, eigenvalues=values) if (repsim > 1) components <- moreStats(components, quantile=quantile) else components <- NA res <- list(details=analysis, nFactors=components) class(res) <- 'structureSim' return(res) } ## LIGNE 21 MODIFIEE: ETAIT quantile=0.95 ## LIGNE 42 MODIFIEE: EAIT c("oc", "af", "par", "per", "mean.eig") nFactors/MD50000644000176200001440000000675113637562616012371 0ustar liggesusers35d5998ae24baecdfc0dd82cc656bcb6 *DESCRIPTION 65a4508d30752594927bde4b80f67147 *NAMESPACE d7e767d105de490d3063a8c33d4d26c0 *NEWS 81051bcc2cf1bedf378224b0a93e2877 *R/aphabetical.R bba2fd4ed7f8d28c3001742dc48a5a01 *R/bentlerParameters.r f7a33fe660ba064be02bf249ca7f578b *R/componentAxis.r 5c1512456af5533f4e128d217b6b4ccf *R/corFA.r f9942bcd48cad078948f6d973279d1ec *R/data.R 17aedb02a245055893433e3d4bc86417 *R/diagReplace.r 3a87203df40b0187bac1f91ef02f8eae *R/eigenBootParallel.r e5265056a1fb335493cf4d7812498d31 *R/eigenComputes.r f82d5fab96a63dfbd49e937b93c0ba79 *R/eigenFrom.r a1caad26ac35e4511d73f0a432a9325f *R/generateStructure.r f913f629e996237554cfd1d194b75747 *R/iterativePrincipalAxis.r 51fab34787e10f5ec156a190125c831e *R/makeCor.r e5ac77eecb0f34c765961d6e0353491d *R/moreStats.r 50ec6d056ccfd0f87b81443204d015cd *R/nBartlett.r 0364d949311142f2ff821ef5b8f041cf *R/nBentler.r 47302b89a7235e83e59aabaec0162905 *R/nCng.r d243316a5291bf5e7a2423a774f602ab *R/nFactors.R 91195761ce8d68f17d4ea3b6b7c6bb6e *R/nFactorsObjectMethods.r 5d0b042badbe0a2612e62fed25dcc2e0 *R/nMreg.r f1f0cf62b3fc57d869a6f691c4a596a5 *R/nScree.R 72418533c2da75d02b7c340e6cf06e93 *R/nScreeObjectMethods.r 9d57228783175de408cdcfef7a5b16aa *R/nSeScree.r 7601c416c7124c4492cb67f536b61d1b *R/parallel.R 02ed9acac358fd6bccfe2c23540f982d *R/plotParallel.R 0d76c0a738cee93fdc5161fe00845654 *R/plotnScree.R 463900511f61d13c8597a06fad638017 *R/plotuScree.R a4438c4e4dfa111246cd5d8cdb1b9fa8 *R/principalAxis.r 8bdf069688cedf2e86120d6740132a59 *R/principalComponents.r 7464dc674338c9211e096e864d248df2 *R/rRecovery.r 66c895b316839f641aea24600750500a *R/structureSim.r ca335a4f10cb7c3012ef578169cd2b7f *R/structureSimObjectMethods.r ad9e7ab938dd927b8afbf8c328e0ea83 *R/studySim.r 566eaca37f817829ed73d0675955df39 *TODO 9c56378595dfdc3e9b3d080efb276f71 *data/dFactors.rda 36526c8ed6f66cf0c77f1b8af82b287f *man/bentlerParameters.Rd f1e4a8a91b1740ca946932e61a5c6261 *man/componentAxis.Rd 45459a9c10a1f17e2bddb3a419af6932 *man/corFA.Rd ee239143776074ed05ef31895b8e479c *man/dFactors.Rd 9022144097ca1ab4dffef57b50747d29 *man/diagReplace.Rd 55e0c287dc0b734d0b474e7d1ee9fbde *man/eigenBootParallel.Rd eae2bbf59bd1f1c50d3baf5b34cf7b56 *man/eigenComputes.Rd d7c218e8ffafcce87592c8733205d42b *man/eigenFrom.Rd 79e52de283e924ef5f6e628e60c550b3 *man/figures/essai.png.png c6eb961e377539cb175f711297ab3d29 *man/generateStructure.Rd 3e388c70e1c5e41682f48f2c0a07b4aa *man/iterativePrincipalAxis.Rd 52c905148b120feac65c62b21f67e60a *man/makeCor.Rd c199fedf6be6626b533e94424e43676b *man/moreStats.Rd adc80eed5f88c0f6a82fa77d63a6d873 *man/nBartlett.Rd cca33e6c0e433fcdf477cb9e2d4e9c8b *man/nBentler.Rd 60f8fb9e56e0c6157ccc215b41b45b28 *man/nCng.Rd 25f934e5a457869d2879f80253e57a86 *man/nFactors.Rd ba9504072fcca5e902754858ac22fbb6 *man/nFactorsObjectMethods.Rd 02ae845252bd943933f7173c3b4b4bea *man/nMreg.Rd 43f85fd5cfaaf3876a4eebb113d7c422 *man/nScree.Rd 4e8192c73723aaf4277c25c8d100ca9c *man/nScreeObjectMethods.Rd fcbe2c23bb8dacdc662ad3173a85dd7c *man/nSeScree.Rd 448de44b107d2708df30290d0cd1601d *man/parallel.Rd 3d52f1f5fb45f9f465f20c0668ed86c8 *man/plotParallel.Rd 592c36c5045aa47cbc13744709bc9433 *man/plotnScree.Rd f3b3982f6c52a22dc7353b5c6ca45015 *man/plotuScree.Rd 60f682e37ee838ba83222039275e6492 *man/principalAxis.Rd a02ea4a0c63601086a05032fdcd22f09 *man/principalComponents.Rd a95b1e87ce67cfbcd72b9fd4467aee18 *man/rRecovery.Rd c12071cc3aa8f6fe3fe00536c750b2c6 *man/structureSim.Rd ab91ce2dc4b7aca524bc813d2f7f9469 *man/structureSimObjectMethods.Rd 2c220e4b27a6d1bf1f220fb9e729f801 *man/studySim.Rd